diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000..3779792
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,63 @@
+# Python
+__pycache__/
+*.pyc
+*.pyo
+*.pyd
+.Python
+*.so
+*.egg
+*.egg-info/
+dist/
+build/
+
+# Virtual Environment
+.venv/
+venv/
+ENV/
+env/
+
+# Testing
+.pytest_cache/
+.coverage
+htmlcov/
+.tox/
+
+# IDE
+.vscode/
+.idea/
+*.swp
+*.swo
+*~
+
+# Git
+.git/
+.gitignore
+.github/
+
+# Environment
+# .env enthält lokale Konfiguration (Secrets) und darf NICHT committed werden
+.env.local
+.env.*.local
+
+# Backup files
+*.backup*
+*.bak
+*.bak[0-9]
+*.backup_*
+
+# Logs (werden als Volume gemountet)
+logs/
+
+# Data (wird als Volume gemountet)
+data/filamenthub.db
+data/backups/
+
+# Documentation
+docs/
+*.md
+!README.md
+
+# Temporary files
+temp_*
+tmp_*
+nul
diff --git a/.env b/.env
new file mode 100644
index 0000000..a9cb532
--- /dev/null
+++ b/.env
@@ -0,0 +1,10 @@
+# FilamentHub Environment Configuration
+#
+# WICHTIG: Dieser Hash ist ein FAKE-PLATZHALTER und funktioniert NICHT zum Einloggen!
+#
+# Für normale Nutzer: Der Admin-Bereich ist NICHT erforderlich - die App funktioniert vollständig ohne!
+# Für Entwickler: Verwenden Sie Ihre separate .env Datei mit dem echten Admin-Hash.
+# Admin-Zugang wird nur auf Anfrage beim Entwickler vergeben.
+
+ADMIN_PASSWORD_HASH=$2b$12$L5hUkHdH.NH6CeC6FiH0o.lpnNpRA3zaAho6QyerwP3ZQF19xqmmq
+ADMIN_COOKIE_SECURE=false
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000..5a62c61
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,18 @@
+# Ensure shell scripts always use LF line endings (Unix style)
+*.sh text eol=lf
+
+# Python files
+*.py text eol=lf
+
+# Config files
+*.yaml text eol=lf
+*.yml text eol=lf
+*.json text eol=lf
+*.ini text eol=lf
+
+# Markdown
+*.md text eol=lf
+
+# Docker files
+Dockerfile text eol=lf
+docker-compose.yml text eol=lf
diff --git a/.github/workflows/cleanup-beta-releases.yml b/.github/workflows/cleanup-beta-releases.yml
new file mode 100644
index 0000000..76255f8
--- /dev/null
+++ b/.github/workflows/cleanup-beta-releases.yml
@@ -0,0 +1,53 @@
+name: Cleanup old beta releases
+
+on:
+ workflow_dispatch:
+ push:
+ branches:
+ - beta
+
+jobs:
+ cleanup:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Delete old beta releases (keep last 5)
+ uses: actions/github-script@v7
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ script: |
+ const owner = context.repo.owner;
+ const repo = context.repo.repo;
+ const keep = 5;
+
+ const releases = await github.paginate(
+ github.rest.repos.listReleases,
+ { owner, repo }
+ );
+
+ const betaReleases = releases
+ .filter(r => r.prerelease)
+ .sort((a, b) => new Date(b.created_at) - new Date(a.created_at));
+
+ const toDelete = betaReleases.slice(keep);
+
+ console.log(`Found ${betaReleases.length} beta releases`);
+ console.log(`Keeping ${keep}, deleting ${toDelete.length}`);
+
+ for (const release of toDelete) {
+ console.log(`Deleting beta release: ${release.tag_name}`);
+
+ await github.rest.repos.deleteRelease({
+ owner,
+ repo,
+ release_id: release.id,
+ });
+
+ await github.rest.git.deleteRef({
+ owner,
+ repo,
+ ref: `tags/${release.tag_name}`,
+ }).catch(() => {
+ console.log(`Tag ${release.tag_name} already removed`);
+ });
+ }
diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml
index f2e6fe0..5ce81b1 100644
--- a/.github/workflows/docker-publish.yml
+++ b/.github/workflows/docker-publish.yml
@@ -4,8 +4,8 @@ on:
push:
branches:
- main
+ - beta
paths:
- # Build-relevanter Code
- "app/**"
- "services/**"
- "backend/**"
@@ -16,7 +16,6 @@ on:
- "Dockerfile"
- "entrypoint.sh"
- "run.py"
- # Version steuert Release
- "config.yaml"
workflow_dispatch:
@@ -27,7 +26,7 @@ jobs:
steps:
# ------------------------------------------------------------
- # 1. Checkout (volle Historie für Tags & Changelog)
+ # 1. Checkout
# ------------------------------------------------------------
- name: Checkout repository
uses: actions/checkout@v4
@@ -35,7 +34,7 @@ jobs:
fetch-depth: 0
# ------------------------------------------------------------
- # 2. Aktuelle Version aus config.yaml lesen (Python/YAML)
+ # 2. Version aus config.yaml lesen
# ------------------------------------------------------------
- name: Read current version
id: version
@@ -43,30 +42,47 @@ jobs:
ver=$(python - << 'EOF'
import yaml
with open("config.yaml") as f:
- cfg = yaml.safe_load(f)
- print(cfg["app"]["version"])
+ print(yaml.safe_load(f)["app"]["version"])
EOF
)
echo "version=$ver" >> $GITHUB_OUTPUT
# ------------------------------------------------------------
- # 3. Alte Version aus main vergleichen
+ # 3. Channel bestimmen (main = stable, beta = prerelease)
+ # ------------------------------------------------------------
+ - name: Detect channel
+ id: channel
+ run: |
+ if [ "${GITHUB_REF##*/}" = "beta" ]; then
+ echo "channel=beta" >> $GITHUB_OUTPUT
+ echo "docker_main_tag=beta" >> $GITHUB_OUTPUT
+ echo "docker_version_tag=${{ steps.version.outputs.version }}-beta" >> $GITHUB_OUTPUT
+ echo "suffix=-beta" >> $GITHUB_OUTPUT
+ echo "prerelease=true" >> $GITHUB_OUTPUT
+ else
+ echo "channel=stable" >> $GITHUB_OUTPUT
+ echo "docker_main_tag=latest" >> $GITHUB_OUTPUT
+ echo "docker_version_tag=${{ steps.version.outputs.version }}" >> $GITHUB_OUTPUT
+ echo "suffix=" >> $GITHUB_OUTPUT
+ echo "prerelease=false" >> $GITHUB_OUTPUT
+ fi
+
+ # ------------------------------------------------------------
+ # 4. Versionsvergleich (NUR stable)
# ------------------------------------------------------------
- name: Read previous version from main
+ if: ${{ steps.channel.outputs.channel == 'stable' }}
id: oldversion
run: |
old_ver=$(git show origin/main:config.yaml | python - << 'EOF'
import sys, yaml
- cfg = yaml.safe_load(sys.stdin)
- print(cfg["app"]["version"])
+ print(yaml.safe_load(sys.stdin)["app"]["version"])
EOF
)
echo "old=$old_ver" >> $GITHUB_OUTPUT
- # ------------------------------------------------------------
- # 4. Versionsänderung prüfen
- # ------------------------------------------------------------
- - name: Check version change
+ - name: Check version change (stable only)
+ if: ${{ steps.channel.outputs.channel == 'stable' }}
id: version_changed
run: |
if [ "${{ steps.version.outputs.version }}" = "${{ steps.oldversion.outputs.old }}" ]; then
@@ -75,19 +91,17 @@ jobs:
echo "changed=true" >> $GITHUB_OUTPUT
fi
- # ------------------------------------------------------------
- # 5. Abbruch wenn kein Versionssprung
- # ------------------------------------------------------------
- - name: Skip if no version bump
- if: ${{ steps.version_changed.outputs.changed == 'false' }}
+ - name: Skip if no version bump (stable only)
+ if: ${{ steps.channel.outputs.channel == 'stable' && steps.version_changed.outputs.changed == 'false' }}
run: |
- echo "Kein Versionssprung – kein Build, kein Release."
+ echo "Kein Versionssprung – Stable Build übersprungen."
exit 0
# ------------------------------------------------------------
- # 6. Git-Tag setzen (fehlertolerant)
+ # 5. Git Tag (NUR stable)
# ------------------------------------------------------------
- - name: Create Git tag
+ - name: Create Git tag (stable only)
+ if: ${{ steps.channel.outputs.channel == 'stable' }}
run: |
git tag v${{ steps.version.outputs.version }} || echo "Tag existiert bereits"
git push origin v${{ steps.version.outputs.version }} || true
@@ -96,9 +110,10 @@ jobs:
run: sleep 3
# ------------------------------------------------------------
- # 7. Changelog DE
+ # 6. Changelog DE (NUR stable)
# ------------------------------------------------------------
- name: Generate Changelog (DE)
+ if: ${{ steps.channel.outputs.prerelease == 'false' }}
id: changelog_de
uses: mikepenz/release-changelog-builder-action@v4
with:
@@ -107,9 +122,11 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Write CHANGELOG.md
+ if: ${{ steps.channel.outputs.prerelease == 'false' }}
run: echo "${{ steps.changelog_de.outputs.changelog }}" > CHANGELOG.md
- name: Commit CHANGELOG.md
+ if: ${{ steps.channel.outputs.prerelease == 'false' }}
run: |
git config user.name "github-actions"
git config user.email "github-actions@github.com"
@@ -117,14 +134,16 @@ jobs:
git commit -m "Update CHANGELOG for v${{ steps.version.outputs.version }}" || echo "No changes"
- name: Push CHANGELOG.md
+ if: ${{ steps.channel.outputs.prerelease == 'false' }}
uses: ad-m/github-push-action@v0.8.0
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
# ------------------------------------------------------------
- # 8. Changelog EN
+ # 7. Changelog EN (NUR stable)
# ------------------------------------------------------------
- name: Generate Changelog (EN)
+ if: ${{ steps.channel.outputs.prerelease == 'false' }}
id: changelog_en
uses: mikepenz/release-changelog-builder-action@v4
with:
@@ -133,20 +152,23 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Write CHANGELOG_EN.md
+ if: ${{ steps.channel.outputs.prerelease == 'false' }}
run: echo "${{ steps.changelog_en.outputs.changelog }}" > CHANGELOG_EN.md
- name: Commit CHANGELOG_EN.md
+ if: ${{ steps.channel.outputs.prerelease == 'false' }}
run: |
git add CHANGELOG_EN.md
git commit -m "Update CHANGELOG_EN for v${{ steps.version.outputs.version }}" || echo "No changes"
- name: Push CHANGELOG_EN.md
+ if: ${{ steps.channel.outputs.prerelease == 'false' }}
uses: ad-m/github-push-action@v0.8.0
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
# ------------------------------------------------------------
- # 9. Docker Build Vorbereitung
+ # 8. Docker Setup
# ------------------------------------------------------------
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
@@ -161,7 +183,7 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }}
# ------------------------------------------------------------
- # 10. Docker Image bauen & pushen (Multi-Arch)
+ # 9. Docker Build & Push
# ------------------------------------------------------------
- name: Build & Push Docker Image
uses: docker/build-push-action@v5
@@ -170,25 +192,26 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: |
- d3nn3s/filamenthub:latest
- d3nn3s/filamenthub:${{ steps.version.outputs.version }}
+ d3nn3s/filamenthub:${{ steps.channel.outputs.docker_main_tag }}
+ d3nn3s/filamenthub:${{ steps.channel.outputs.docker_version_tag }}
d3nn3s/filamenthub:sha-${{ github.sha }}
# ------------------------------------------------------------
- # 11. GitHub Release (DE + EN)
+ # 10. GitHub Release (stable + beta)
# ------------------------------------------------------------
- name: Create GitHub Release
uses: softprops/action-gh-release@v1
with:
- tag_name: v${{ steps.version.outputs.version }}
- name: FilamentHub v${{ steps.version.outputs.version }}
+ tag_name: v${{ steps.version.outputs.version }}${{ steps.channel.outputs.suffix }}
+ name: FilamentHub v${{ steps.version.outputs.version }}${{ steps.channel.outputs.suffix }}
+ prerelease: ${{ steps.channel.outputs.prerelease }}
body: |
## 🇩🇪 Deutsch
- ${{ steps.changelog_de.outputs.changelog }}
+ ${{ steps.changelog_de.outputs.changelog || 'Beta Release – kein finaler Changelog.' }}
---
## 🇬🇧 English
- ${{ steps.changelog_en.outputs.changelog }}
+ ${{ steps.changelog_en.outputs.changelog || 'Beta release – no final changelog.' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..1d0371f
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,73 @@
+# =========================
+# Python / Bytecode
+# =========================
+__pycache__/
+*.py[cod]
+*.pyd
+*.so
+.Python
+
+# =========================
+# Test / Coverage
+# =========================
+.pytest_cache/
+.mypy_cache/
+.ruff_cache/
+.coverage
+coverage.xml
+htmlcov/
+
+# =========================
+# Virtual Environments
+# =========================
+.venv/
+venv/
+ENV/
+
+# =========================
+# OS / Editor
+# =========================
+.DS_Store
+Thumbs.db
+.vscode/
+
+# =========================
+# Runtime Output
+# =========================
+logs/
+
+# =========================
+# Database (lokal & Test)
+# =========================
+data/*.db
+data/filamenthub.db
+data/filamenthub.db-*
+data/test*.db
+data/backups/
+
+# =========================
+# Snapshots (Runtime-State)
+# =========================
+data/job_snapshots.json
+
+# =========================
+# Temporary / Debug Scripts
+# =========================
+scripts/_tmp_*
+scripts/tmp_*
+
+# =========================
+# Backup / Scratch Files
+# =========================
+*.bak
+*.bak*
+*.backup
+*.backup*
+**/*.bak*
+**/*.backup*
+Backup/
+# =========================
+# Environment / Secrets
+# =========================
+.env.local
+.env.*.local
diff --git a/.scripts/env_check.py b/.scripts/env_check.py
new file mode 100644
index 0000000..bca3c2d
--- /dev/null
+++ b/.scripts/env_check.py
@@ -0,0 +1,16 @@
+import os
+
+e = os.environ.get('ADMIN_PASSWORD_HASH')
+if not e:
+ print('ADMIN_PASSWORD_HASH: UNSET')
+else:
+ if e.startswith('$2y$'):
+ p = '$2y$'
+ elif e.startswith('$2b$'):
+ p = '$2b$'
+ else:
+ p = e[:10]
+ print('ADMIN_PASSWORD_HASH: SET, prefix=' + p + ', len=' + str(len(e)))
+
+print('FILAMENTHUB_DB_PATH:', os.environ.get('FILAMENTHUB_DB_PATH'))
+print('ADMIN_COOKIE_SECURE:', os.environ.get('ADMIN_COOKIE_SECURE'))
diff --git a/ANLEITUNG/AMS-Live-Tracking.md b/ANLEITUNG/AMS-Live-Tracking.md
new file mode 100644
index 0000000..83fd572
--- /dev/null
+++ b/ANLEITUNG/AMS-Live-Tracking.md
@@ -0,0 +1,7 @@
+# AMS-Live-Tracking & Verbrauch
+
+- Verbrauch und Restbestand werden live aus AMS-Reports berechnet: Start-Stand wird gemerkt, aktuelle `remain_percent` fließt in Verbrauch (mm/g) und `weight_current` ein.
+- Genauigkeit nur, wenn der Job von Beginn an gesehen wird. Steigen wir später ein, fehlt der Verbrauch vor unserem Einstieg.
+- Abweichungen zur Slicer-Schätzung sind während des Drucks normal; zum Ende sollten sich die Werte annähern.
+- `total_len` aus dem AMS wird verwendet, um Verbrauch/Rest in Metern zu zeigen (siehe AMS-Helper-Seite). Fehlt `total_len`, können keine m-Werte berechnet werden.
+
diff --git a/ANLEITUNG/API.md b/ANLEITUNG/API.md
index 7f9865d..f3904d8 100644
--- a/ANLEITUNG/API.md
+++ b/ANLEITUNG/API.md
@@ -1,4 +1,4 @@
-# API-Dokumentation
+# API-Dokumentation
## Übersicht
FilamentHub bietet eine REST-API für Material, Spool, Drucker und Jobs.
@@ -6,32 +6,32 @@ FilamentHub bietet eine REST-API für Material, Spool, Drucker und Jobs.
## Endpunkte (Beispiele)
### Material
-- `GET /api/materials` – Liste aller Materialien
-- `POST /api/materials` – Neues Material anlegen
-- `GET /api/materials/{id}` – Material abrufen
-- `PUT /api/materials/{id}` – Material aktualisieren
-- `DELETE /api/materials/{id}` – Material löschen
+- `GET /api/materials` - Liste aller Materialien
+- `POST /api/materials` - Neues Material anlegen
+- `GET /api/materials/{id}` - Material abrufen
+- `PUT /api/materials/{id}` - Material aktualisieren
+- `DELETE /api/materials/{id}` - Material löschen
### Spool
-- `GET /api/spools` – Liste aller Spulen
-- `POST /api/spools` – Neue Spule anlegen
-- `GET /api/spools/{id}` – Spule abrufen
-- `PUT /api/spools/{id}` – Spule aktualisieren
-- `DELETE /api/spools/{id}` – Spule löschen
+- `GET /api/spools` - Liste aller Spulen
+- `POST /api/spools` - Neue Spule anlegen
+- `GET /api/spools/{id}` - Spule abrufen
+- `PUT /api/spools/{id}` - Spule aktualisieren
+- `DELETE /api/spools/{id}` - Spule löschen
### Drucker
-- `GET /api/printers` – Liste aller Drucker
-- `POST /api/printers` – Drucker anlegen
-- `GET /api/printers/{id}` – Drucker abrufen
-- `PUT /api/printers/{id}` – Drucker aktualisieren
-- `DELETE /api/printers/{id}` – Drucker löschen
+- `GET /api/printers` - Liste aller Drucker
+- `POST /api/printers` - Drucker anlegen
+- `GET /api/printers/{id}` - Drucker abrufen
+- `PUT /api/printers/{id}` - Drucker aktualisieren
+- `DELETE /api/printers/{id}` - Drucker löschen
### Jobs
-- `GET /api/jobs` – Liste aller Druckjobs
-- `POST /api/jobs` – Job anlegen
-- `GET /api/jobs/{id}` – Job abrufen
-- `PUT /api/jobs/{id}` – Job aktualisieren
-- `DELETE /api/jobs/{id}` – Job löschen
+- `GET /api/jobs` - Liste aller Druckjobs
+- `POST /api/jobs` - Job anlegen
+- `GET /api/jobs/{id}` - Job abrufen
+- `PUT /api/jobs/{id}` - Job aktualisieren
+- `DELETE /api/jobs/{id}` - Job löschen
## Beispiel-Request
```http
@@ -53,3 +53,4 @@ Content-Type: application/json
## Weitere Infos
- Swagger/OpenAPI-Doku unter `/docs` im Webinterface
+
diff --git a/ANLEITUNG/Benutzerverwaltung.md b/ANLEITUNG/Benutzerverwaltung.md
index 07b4557..9034352 100644
--- a/ANLEITUNG/Benutzerverwaltung.md
+++ b/ANLEITUNG/Benutzerverwaltung.md
@@ -1,4 +1,4 @@
-# Benutzerverwaltung
+# Benutzerverwaltung
## Nutzerverwaltung
- Nutzer können im Webinterface angelegt werden (optional, falls Feature aktiviert).
@@ -6,7 +6,7 @@
## Rechte und Rollen
- Standard: Admin, Benutzer, Gast (optional)
-- Rechte können pro Nutzer vergeben werden (z. B. Material anlegen, löschen, nur lesen)
+- Rechte können pro Nutzer vergeben werden (z. B. Material anlegen, löschen, nur lesen)
## Login/Logout
- Login über das Webinterface
@@ -16,3 +16,4 @@
## Hinweise
- Benutzerverwaltung ist optional und kann später aktiviert werden.
- Für produktive Nutzung empfiehlt sich Authentifizierung.
+
diff --git a/ANLEITUNG/Entwicklung.md b/ANLEITUNG/Entwicklung.md
index c7bbc10..0a6246b 100644
--- a/ANLEITUNG/Entwicklung.md
+++ b/ANLEITUNG/Entwicklung.md
@@ -1,11 +1,11 @@
-# Entwickleranleitung
+# Entwickleranleitung
## Projektstruktur
-- `app/` – Backend, Modelle, Routen
-- `frontend/` – UI, Templates, JS/CSS
-- `services/` – externe Schnittstellen
-- `data/` – Datenbank
-- `docs/` – Dokumentation
+- `app/` - Backend, Modelle, Routen
+- `frontend/` - UI, Templates, JS/CSS
+- `services/` - externe Schnittstellen
+- `data/` - Datenbank
+- `docs/` - Dokumentation
## PR-Workflow
1. Forke das Repo
@@ -27,3 +27,4 @@
## Hinweise
- Feedback und Beiträge sind willkommen!
+
diff --git a/ANLEITUNG/Features.md b/ANLEITUNG/Features.md
index bcfa04d..1a816ae 100644
--- a/ANLEITUNG/Features.md
+++ b/ANLEITUNG/Features.md
@@ -1,4 +1,4 @@
-# Erweiterte Features
+# Erweiterte Features
## AMS-Integration
- AMS-Status und Slot-Infos abrufen
@@ -17,7 +17,7 @@
- Charts im Dashboard
## Automatische Zuordnung
-- Beim Druckstart: Material → passende Spule finden
+- Beim Druckstart: Material -> passende Spule finden
- Vorschlag bei mehreren passenden Spulen
- AMS-Slot automatisch verlinken
@@ -28,3 +28,4 @@
## Hinweise
- Erweiterte Features können schrittweise aktiviert werden.
+
diff --git a/ANLEITUNG/Fehlermeldung.md b/ANLEITUNG/Fehlermeldung.md
index ccd9163..7822338 100644
--- a/ANLEITUNG/Fehlermeldung.md
+++ b/ANLEITUNG/Fehlermeldung.md
@@ -1,8 +1,8 @@
-# FilamentHub – Troubleshooting & Lösungen
+# FilamentHub - Troubleshooting & Lösungen
## 1. Datenbank kann nicht beschrieben werden
**Problem:**
-Fehlermeldung: „Keine Schreibrechte auf data/filamenthub.db“
+Fehlermeldung: "Keine Schreibrechte auf data/filamenthub.db"
**Lösung:**
- Prüfe, ob der Benutzer/Container Schreibrechte auf den Ordner `data/` hat.
- Unter Docker: `-v $(pwd)/data:/app/data` und `--user` korrekt setzen.
@@ -22,7 +22,7 @@ Tabellen fehlen, App startet ohne Fehler, aber keine Datenbankstruktur.
## 3. Alembic nicht installiert / nicht gefunden
**Problem:**
-Fehlermeldung: „alembic: command not found“
+Fehlermeldung: "alembic: command not found"
**Lösung:**
- Im venv: `pip install alembic`
- Prüfe, ob das venv aktiviert ist (`.venv\Scripts\Activate.ps1` unter Windows).
@@ -32,11 +32,16 @@ Fehlermeldung: „alembic: command not found“
## 4. Webinterface nicht erreichbar
**Problem:**
-Browser zeigt „Seite nicht gefunden“ oder „Connection refused“.
+Browser zeigt "Seite nicht gefunden" oder "Connection refused".
**Lösung:**
- Prüfe, ob die App läuft (`python run.py` oder `uvicorn app.main:app`).
+<<<<<<< Updated upstream
- Prüfe die Portfreigabe (Standard: 8080).
- Unter Docker: Port mit `-p 8000:8000` freigeben.
+=======
+- Prüfe die Portfreigabe (Standard: 8085).
+- Unter Docker: Port mit `-p 8085:8085` freigeben.
+>>>>>>> Stashed changes
---
@@ -54,7 +59,7 @@ Keine Einträge im Logfile, Debugcenter zeigt nichts an.
**Problem:**
Nach dem ersten Start sind keine Drucker, Materialien oder Spulen vorhanden.
**Lösung:**
-- Führe das zentrale Setup-Skript aus (z. B. `python setup.py`), das Beispiel-Daten einträgt.
+- Führe das zentrale Setup-Skript aus (z. B. `python setup.py`), das Beispiel-Daten einträgt.
- Alternativ: Manuell im Webinterface anlegen.
---
@@ -66,3 +71,4 @@ Fehlermeldung beim Start, Container beendet sich sofort.
- Prüfe die Logs mit `docker logs `.
- Prüfe, ob alle Umgebungsvariablen und Volumes korrekt gesetzt sind.
- Stelle sicher, dass alle Abhängigkeiten installiert sind (`requirements.txt`).
+
diff --git a/ANLEITUNG/Handbuch.md b/ANLEITUNG/Handbuch.md
index 01584ca..afc30d5 100644
--- a/ANLEITUNG/Handbuch.md
+++ b/ANLEITUNG/Handbuch.md
@@ -1,4 +1,4 @@
-# FilamentHub – Handbuch
+# FilamentHub - Handbuch
Willkommen zum FilamentHub-Handbuch! Hier findest du alle wichtigen Anleitungen und Dokumentationen für Installation, Nutzung, Entwicklung und Fehlerbehebung.
@@ -15,4 +15,14 @@ Willkommen zum FilamentHub-Handbuch! Hier findest du alle wichtigen Anleitungen
- [Fehlerbehebung](Troubleshooting.md)
- [Release/Deployment](Release.md)
-Jeder Punkt ist als eigene Datei im Ordner `ANLEITUNG` abgelegt und kann direkt auf GitHub angezeigt werden.
\ No newline at end of file
+<<<<<<< Updated upstream
+Jeder Punkt ist als eigene Datei im Ordner `ANLEITUNG` abgelegt und kann direkt auf GitHub angezeigt werden.
+=======
+## Weitere Anleitungen
+
+- [Installation.md](Installation.md): Installationsanleitung für Windows, Linux, Docker, Unraid
+- [Unraid_Image_bauen.md](Unraid_Image_bauen.md): Schritt-für-Schritt-Anleitung zum Bau und Start des Docker-Images auf Unraid
+
+Jeder Punkt ist als eigene Datei im Ordner `ANLEITUNG` abgelegt und kann direkt auf GitHub angezeigt werden.
+
+>>>>>>> Stashed changes
diff --git a/ANLEITUNG/Installation.md b/ANLEITUNG/Installation.md
index a67c1c4..176d485 100644
--- a/ANLEITUNG/Installation.md
+++ b/ANLEITUNG/Installation.md
@@ -1,3 +1,4 @@
+<<<<<<< Updated upstream
# Installationsanleitung
## Voraussetzungen
@@ -60,15 +61,380 @@
2. Image bauen oder aus Registry laden
3. Ports und Volumes konfigurieren (`/data` und `/logs` als persistente Volumes)
4. Container starten
+=======
+# FilamentHub - Installationsanleitung
+
+## Voraussetzungen
+- **Für lokale Installation:** Python 3.10 oder neuer
+- **Für Docker/Unraid:** Docker oder Docker Compose
+- Git (optional, zum Klonen des Repositories)
+
+---
+
+## Installation mit Docker (Empfohlen)
+
+### Docker Compose (Unraid / Linux / NAS)
+
+**1. Dateien vorbereiten**
+```bash
+# Projektverzeichnis erstellen
+mkdir -p /mnt/user/appdata/filamenthub
+cd /mnt/user/appdata/filamenthub
+
+# Projekt-Dateien hochladen (siehe unten)
+```
+
+**2. Erforderliche Dateien**
+
+Alle benötigten Dateien sind bereits im Repository enthalten:
+- `docker-compose.yml`
+- `Dockerfile`
+- `.env` (enthält Fake-Hash, muss für Admin-Zugang überschrieben werden)
+- `entrypoint.sh`
+- `alembic.ini`
+- `config.yaml`
+- `requirements.txt`
+- Ordner: `app/`, `alembic/`, `frontend/`, `services/`, `utils/`
+
+> **Hinweis für Nutzer:** Die App funktioniert sofort ohne Konfiguration!
+> Der Admin-Bereich ist optional und nur für Entwickler/Administratoren.
+
+**3. Admin-Bereich (Nur für Entwickler - überspringen für normale Nutzer)**
+
+> **Hinweis für Nutzer:** Der Admin-Bereich ist **NICHT** für normale Benutzer!
+> **Die App funktioniert vollständig ohne Admin-Zugang.** Überspringen Sie diesen Schritt.
+
+> **Hinweis für Entwickler:** Die `.env` Datei im Repository enthält einen Fake-Hash.
+> Verwenden Sie Ihre separate `.env` mit dem echten Admin-Hash auf Ihrem Server.
+> **Admin-Zugang wird nur auf Anfrage beim Entwickler vergeben.**
+
+**4. Container bauen und starten**
+
+```bash
+# Image bauen (ohne Cache für sauberen Build)
+docker build --no-cache -t filamenthub .
+
+# Mit Docker Compose starten
+docker-compose up -d
+
+# Logs anschauen
+docker-compose logs -f
+```
+
+**5. Container verwalten**
+
+```bash
+# Status prüfen
+docker-compose ps
+
+# Logs anzeigen
+docker-compose logs -f
+
+# Container neu starten
+docker-compose restart
+
+# Container stoppen
+docker-compose down
+
+# Container neu bauen nach Code-Änderungen
+docker-compose down
+docker build --no-cache -t filamenthub .
+docker-compose up -d
+```
+
+**6. Health Check**
+```bash
+curl http://localhost:8085/health
+# Erwartete Antwort: {"status":"healthy","service":"filamenthub"}
+```
+
+**7. Zugriff**
+- **Web-Interface:** `http://:8085`
+- **Admin-Panel (Developer only):** `http://:8085/admin`
+- **API-Docs:** `http://:8085/docs`
+
+---
+
+## Docker-Konfiguration (docker-compose.yml)
+
+Aktuelle empfohlene Konfiguration:
+
+```yaml
+services:
+ filamenthub:
+ container_name: filamenthub
+ image: filamenthub:latest
+ restart: unless-stopped
+ network_mode: host
+ env_file:
+ - .env
+ environment:
+ FILAMENTHUB_DB_PATH: /app/data/filamenthub.db
+ PYTHONPATH: /app
+ volumes:
+ - /mnt/user/appdata/filamenthub/data:/app/data
+ - /mnt/user/appdata/filamenthub/logs:/app/logs
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://localhost:8085/health"]
+ interval: 30s
+ timeout: 10s
+ retries: 3
+ start_period: 40s
+ entrypoint: ["./entrypoint.sh"]
+```
+
+**Wichtige Punkte:**
+- `network_mode: host` - Verwendet Host-Netzwerk (keine Port-Mappings nötig)
+- `env_file: - .env` - Lädt Umgebungsvariablen aus .env Datei
+- Volumes für persistente Daten (`data/`) und Logs (`logs/`)
+- Health Check prüft alle 30s ob die App läuft
+
+---
+
+## Installation lokal (Entwicklung)
+
+### Windows
+
+**1. Repository klonen**
+```powershell
+git clone https://github.com/your-repo/FilamentHub.git
+cd FilamentHub
+```
+
+**2. Virtuelle Umgebung erstellen**
+```powershell
+python -m venv .venv
+.venv\Scripts\Activate.ps1
+```
+
+**3. Abhängigkeiten installieren**
+```powershell
+pip install -r requirements.txt
+```
+
+**4. .env Datei erstellen**
+```powershell
+# In PowerShell
+@"
+ADMIN_PASSWORD_HASH=`$2b`$12`$L5hUkHdH.NH6CeC6FiH0o.lpnNpRA3zaAho6QyerwP3ZQF19xqmmq
+ADMIN_COOKIE_SECURE=false
+"@ | Out-File -Encoding UTF8 .env
+```
+
+> **Hinweis:** Platzhalter-Hash! Generiere deinen eigenen Hash für den Admin-Zugang.
+
+**5. Datenbank initialisieren**
+```powershell
+# Ordner erstellen
+mkdir -p data, logs
+
+# Migrationen ausführen
+alembic upgrade head
+```
+
+**6. App starten**
+```powershell
+python run.py
+```
+
+**7. Webinterface öffnen**
+- http://localhost:8085
+
+### Linux / Raspberry Pi
+
+**1. Repository klonen**
+```bash
+git clone https://github.com/your-repo/FilamentHub.git
+cd FilamentHub
+```
+
+**2. Virtuelle Umgebung erstellen**
+```bash
+python3 -m venv .venv
+source .venv/bin/activate
+```
+
+**3. Abhängigkeiten installieren**
+```bash
+pip install -r requirements.txt
+```
+
+**4. .env Datei erstellen**
+```bash
+cat > .env << 'EOF'
+ADMIN_PASSWORD_HASH=$2b$12$L5hUkHdH.NH6CeC6FiH0o.lpnNpRA3zaAho6QyerwP3ZQF19xqmmq
+ADMIN_COOKIE_SECURE=false
+EOF
+```
+
+> **Hinweis:** Platzhalter-Hash! Generiere deinen eigenen Hash für den Admin-Zugang.
+
+**5. Datenbank initialisieren**
+```bash
+# Ordner erstellen
+mkdir -p data logs
+
+# Migrationen ausführen
+alembic upgrade head
+```
+
+**6. App starten**
+```bash
+python run.py
+```
+
+**7. Webinterface öffnen**
+- http://localhost:8085
+
+---
+
+## Datenbank-Migrationen (Alembic)
+
+FilamentHub nutzt Alembic für Datenbank-Schema-Updates.
+
+### Lokale Installation
+
+**Windows:**
+```powershell
+.venv\Scripts\Activate.ps1
+alembic upgrade head
+```
+
+**Linux/Pi:**
+```bash
+source .venv/bin/activate
+alembic upgrade head
+```
+
+### Docker
+
+Migrationen werden automatisch beim Container-Start ausgeführt durch `entrypoint.sh`.
+
+**Manuelle Migration im laufenden Container:**
+```bash
+docker exec -it filamenthub alembic upgrade head
+```
+
+**Migration mit temporärem Container:**
+```bash
+docker run --rm \
+ -v /mnt/user/appdata/filamenthub/data:/app/data \
+ filamenthub \
+ alembic upgrade head
+```
+
+### Bestehende Datenbank stampen
+
+Falls du eine existierende Datenbank hast (ohne Alembic-Versionierung):
+
+```bash
+# Im Container
+docker exec -it filamenthub alembic stamp head
+
+# Oder lokal
+alembic stamp head
+```
+
+---
+>>>>>>> Stashed changes
## Ports und Konfiguration
- Standard-Port: 8080
- Konfigurierbar über Umgebungsvariablen oder `config.yaml`
## Troubleshooting
+<<<<<<< Updated upstream
- Schreibrechte auf `data/` und `logs/` sicherstellen
- Bei Problemen siehe [Fehlermeldung.md](Fehlermeldung.md)
+=======
+
+### Container startet nicht
+
+**1. Logs prüfen:**
+```bash
+docker logs filamenthub
+# oder
+docker-compose logs
+```
+
+**2. Häufige Probleme:**
+
+**Problem:** `ADMIN_PASSWORD_HASH must be set in environment`
+- **Lösung:** `.env` Datei fehlt oder ist nicht korrekt
+- Erstelle `.env` mit `ADMIN_PASSWORD_HASH=...`
+
+**Problem:** `table material already exists`
+- **Lösung:** Bestehende Datenbank ohne Alembic-Versionierung
+- Lösung 1: Datenbank löschen `rm data/filamenthub.db`
+- Lösung 2: Datenbank stampen `docker exec -it filamenthub alembic stamp head`
+
+**Problem:** `slice bounds out of range` (Docker Compose Panic)
+- **Lösung:** Docker Compose Bug - verwende `docker build` + `docker run` statt `docker-compose build`
+
+**Problem:** Port 8085 already in use
+- **Lösung:** Anderen Container/Prozess stoppen oder Port in `config.yaml` ändern
+
+### Schreibrechte
+
+**Linux/Unraid:**
+```bash
+# Verzeichnisse erstellen und Rechte setzen
+mkdir -p /mnt/user/appdata/filamenthub/data
+mkdir -p /mnt/user/appdata/filamenthub/logs
+chmod 777 /mnt/user/appdata/filamenthub/data
+chmod 777 /mnt/user/appdata/filamenthub/logs
+```
+
+### Datenbank zurücksetzen
+
+**WARNUNG: Alle Daten gehen verloren!**
+
+```bash
+# Container stoppen
+docker-compose down
+
+# Backup erstellen
+cp data/filamenthub.db data/filamenthub.db.backup
+
+# Datenbank löschen
+rm data/filamenthub.db
+
+# Neu starten
+docker-compose up -d
+```
+
+### Admin-Passwort vergessen (Developer/Advanced)
+
+> **Hinweis:** Nur für Entwickler/Administratoren relevant.
+
+**Neues Passwort generieren:**
+
+```bash
+# Python BCrypt installieren (falls nicht vorhanden)
+pip install bcrypt
+
+# Neuen Hash generieren
+python3 -c "import bcrypt; pw = input('Neues Passwort: ').encode(); print('ADMIN_PASSWORD_HASH=' + bcrypt.hashpw(pw, bcrypt.gensalt()).decode())"
+
+# Hash in .env Datei eintragen
+# Container neu starten
+docker-compose restart
+```
+
+---
+>>>>>>> Stashed changes
## Weiterführende Links
- [Handbuch](Handbuch.md)
- [API-Dokumentation](API.md)
+<<<<<<< Updated upstream
+=======
+- [Fehlerbehebung](Fehlermeldung.md)
+- [GitHub Issues](https://github.com/your-repo/FilamentHub/issues)
+
+---
+
+**Letzte Aktualisierung:** 2025-12-25
+**Version:** 0.1.0
+
+>>>>>>> Stashed changes
diff --git a/ANLEITUNG/Integration.md b/ANLEITUNG/Integration.md
index ee1060c..dedd8e0 100644
--- a/ANLEITUNG/Integration.md
+++ b/ANLEITUNG/Integration.md
@@ -1,4 +1,4 @@
-# Integration externer Systeme
+# Integration externer Systeme
## Bambu Cloud Integration
- API-Key und Cloud-Seriennummer im Drucker hinterlegen
@@ -32,3 +32,4 @@ mqtt:
## Hinweise
- Integration ist optional und kann schrittweise aktiviert werden.
- Für Tests können Dummy-Daten genutzt werden.
+
diff --git a/ANLEITUNG/Kurz anleitung.md b/ANLEITUNG/Kurz anleitung.md
index da8ba3a..dd0ae62 100644
--- a/ANLEITUNG/Kurz anleitung.md
+++ b/ANLEITUNG/Kurz anleitung.md
@@ -1,10 +1,16 @@
-# FilamentHub – Kurzanleitung
+# FilamentHub - Kurzanleitung
## 1. Drucker hinzufügen
+<<<<<<< Updated upstream
1. Öffne das Webinterface (z. B. http://localhost:8000).
2. Navigiere zur Seite „Drucker“.
3. Klicke auf „Neuen Drucker hinzufügen“.
+=======
+1. Öffne das Webinterface (z. B. http://localhost:8085).
+2. Navigiere zur Seite "Drucker".
+3. Klicke auf "Neuen Drucker hinzufügen".
+>>>>>>> Stashed changes
4. Trage die Drucker-Daten ein:
- Name
- Typ (Bambu, Klipper, Manual)
@@ -14,16 +20,16 @@
## 2. Filament/Material verwalten
-1. Gehe zur Seite „Materialien“.
-2. Klicke auf „Neues Material hinzufügen“.
+1. Gehe zur Seite "Materialien".
+2. Klicke auf "Neues Material hinzufügen".
3. Gib die Materialdaten ein:
- Name, Typ, Farbe, Hersteller, Dichte, Durchmesser
4. Speichere das Material. Es erscheint in der Materialliste.
## 3. Spulen verwalten
-1. Gehe zur Seite „Spulen“.
-2. Klicke auf „Neue Spule hinzufügen“.
+1. Gehe zur Seite "Spulen".
+2. Klicke auf "Neue Spule hinzufügen".
3. Wähle das Material aus und gib die Spulendaten ein:
- Gewicht, Farbe, Hersteller, AMS-Slot (optional)
4. Speichere die Spule. Sie erscheint in der Spulenliste.
@@ -35,10 +41,11 @@
## 5. Status und Verbrauch überwachen
-- Im Dashboard siehst du den aktuellen Verbrauch, Restgewicht und Warnungen (z. B. „Spule fast leer“).
+- Im Dashboard siehst du den aktuellen Verbrauch, Restgewicht und Warnungen (z. B. "Spule fast leer").
- Die Historie zeigt, wann und auf welchem Drucker eine Spule zuletzt genutzt wurde.
## 6. Debugcenter nutzen
-- Über die Seite „Debug“ kannst du Logs und Systemstatus einsehen.
+- Über die Seite "Debug" kannst du Logs und Systemstatus einsehen.
- Live-Logs werden per Websocket gestreamt.
+
diff --git a/ANLEITUNG/README.en.md b/ANLEITUNG/README.en.md
new file mode 100644
index 0000000..5a571e2
--- /dev/null
+++ b/ANLEITUNG/README.en.md
@@ -0,0 +1,88 @@
+
+
+
+
+FilamentHub
+Modern, local filament management for Bambu, Klipper & standalone printers.
+
+
+
+
+
+
+
+
+
+
+
+> **German version:** [README.md](README.md)
+
+---
+
+## Features
+
+### Printer Management
+- Overview of all registered printers
+- Live status, temperatures, current job
+- Print history, usage data, MQTT for Bambu (LAN)
+
+### Filament Management
+- Stock with vendor, color, material, remaining weight
+- Last usage per printer, consumption data
+- Optional low-stock warnings
+
+### Analytics & Statistics
+- Print time per printer
+- Filament consumption & cost estimation
+- Daily/monthly overviews
+
+### Web UI
+- Structured navigation (Dashboard / Printer / Filament / System)
+- Cards, tables, status badges
+- Dark, calm UI (Unraid-inspired)
+
+### Database & Backups
+- SQLite as integrated local database
+- Debug/Service tab: backup button (ZIP with DB + logs) -> `data/backups/filamenthub_backup_.zip`
+- DB maintenance: VACUUM, table explorer, ad-hoc SELECT
+
+### Debug & Maintenance
+- Debug Center with System, Service, MQTT, Performance, Database tabs
+- Test runner (Smoke/DB/Coverage) against test DB
+- Log management (rotation, list, clear)
+- Backup (DB + logs) with one click
+
+---
+
+## Status & Roadmap
+- Target: stable release **May 2026**
+- Roadmap: [ANLEITUNG/Roadmap.md](ANLEITUNG/Roadmap.md)
+- Handbook: [ANLEITUNG/Handbuch.md](ANLEITUNG/Handbuch.md)
+
+---
+
+## Quickstart (Dev)
+```bash
+python -m venv .venv
+.venv\Scripts\python -m pip install -r requirements.txt # Windows
+# or: source .venv/bin/activate && pip install -r requirements.txt # Linux/Mac
+python run.py # starts FastAPI/uvicorn, default port 8085
+```
+Open the Debug/Service tab (browser on port 8085), test the backup button: ZIP will be in `data/backups/`.
+
+## Quickstart (Docker)
+```bash
+docker build -t filamenthub .
+docker run -d -p 8085:8085 -v $(pwd)/data:/app/data filamenthub
+```
+
+---
+
+## License
+MIT License
+
+---
+
+## Contact
+Built by **d3nn3s08**
+
diff --git a/ANLEITUNG/Release.md b/ANLEITUNG/Release.md
index b55c19b..0cda76a 100644
--- a/ANLEITUNG/Release.md
+++ b/ANLEITUNG/Release.md
@@ -1,10 +1,14 @@
-# Release/Deployment
+# Release/Deployment
## Docker-Image
- Im Projektverzeichnis:
```bash
docker build -t filamenthub .
+<<<<<<< Updated upstream
docker run -d -p 8000:8000 -v $(pwd)/data:/app/data filamenthub
+=======
+ docker run -d -p 8085:8085 -v $(pwd)/data:/app/data filamenthub
+>>>>>>> Stashed changes
```
- Volumes für Datenbank und Logs konfigurieren
@@ -27,3 +31,4 @@
## Hinweise
- Nach jedem Release: Backup und Testlauf durchführen
- Windows-Startskripte (`Start_FilamentHub.bat`/`menu_pro_v3.ps1`) sind nur für lokale Entwicklung; in Docker/Pi nicht erforderlich
+
diff --git a/ANLEITUNG/Roadmap.md b/ANLEITUNG/Roadmap.md
index 514dca0..3256a60 100644
--- a/ANLEITUNG/Roadmap.md
+++ b/ANLEITUNG/Roadmap.md
@@ -1,4 +1,4 @@
-# FilamentHub – Roadmap
+# FilamentHub - Roadmap
## Übersicht
Die Roadmap zeigt die geplanten Phasen und Meilensteine für die Entwicklung von FilamentHub. Sie ist an die persönliche Zeitplanung des Entwicklers angepasst und wird regelmäßig aktualisiert.
@@ -7,72 +7,73 @@ Die Roadmap zeigt die geplanten Phasen und Meilensteine für die Entwicklung von
## Phasen
-### 🧱 PHASE 1 – Grundgerüst
-🟩 Repo anlegen
-🟩 Ordnerstruktur erstellen
-🟩 FastAPI Grundgerüst
-🟩 Dockerfile Stub
-🟩 config.yaml Vorlage
-🟩 Leeres UI im Browser anzeigen
-
-### 🎨 PHASE 2 – UI/Design Dark Mode
-🟩 Dark Theme
-🟩 Karten, Layout
-🟩 Navigation
-🟩 Dashboard Skeleton
-🟩 Tabellen, Icons
-
-### 📦 PHASE 3 – Material & Spulen
-🟩 Materials DB
-🟩 Spulenverwaltung
- UUIDs
-🟩 Restgewicht (manuell)
- Materialsignatur
-
-### 🔌 PHASE 4 – Bambu LAN Integration
-MQTT Parser
-AMS Slots
-Live mm Verbrauch
-„Neue Spule“ Erkennung
-„Alte Spule wieder eingesetzt“ Logik
-Jobs speichern
-
-### ☁️ PHASE 5 – Bambu Cloud Modus
-Spulenwahl manuell
-Job-Ende Erkennung
-
-### 🤖 PHASE 6 – Klipper Support
-Moonraker API
-aktive Spule setzen
-Jobs speichern
-Verbrauch mm → g
-
-### 🧠 PHASE 7 – Verbrauchsengine
-mm → g
-Dichte-Bibliothek
-Toleranzsystem
-Verbrauchshistorie
-
-### 📊 PHASE 8 – Dashboard & Statistiken
-Charts
-Tages-/Monatsverbrauch
-angebrochene Spulen
-Lagerauswertung
-
-### 🐳 PHASE 9 – Deployment
-ZIP Installer
-Docker Multi-Arch
-Unraid Template
-Pi Guide
+### PHASE 1 - Grundgerüst
+- [x] Repo anlegen
+- [x] Ordnerstruktur erstellen
+- [x] FastAPI Grundgerüst
+- [x] Dockerfile Stub
+- [x] config.yaml Vorlage
+- [x] Leeres UI im Browser anzeigen
+
+### PHASE 2 - UI/Design Dark Mode
+- [x] Dark Theme
+- [x] Karten, Layout
+- [x] Navigation
+- [x] Dashboard Skeleton
+- [x] Tabellen, Icons
+
+### PHASE 3 - Material & Spulen
+- [x] Materials DB
+- [x] Spulenverwaltung
+- [ ] UUIDs
+- [x] Restgewicht (manuell)
+- [ ] Materialsignatur
+
+### PHASE 4 - Bambu LAN Integration
+- [ ] MQTT Parser
+- [ ] AMS Slots
+- [ ] Live mm Verbrauch
+- [ ] "Neue Spule" Erkennung
+- [ ] "Alte Spule wieder eingesetzt" Logik
+- [ ] Jobs speichern
+
+### PHASE 5 - Bambu Cloud Modus
+- [ ] Spulenwahl manuell
+- [ ] Job-Ende Erkennung
+
+### PHASE 6 - Klipper Support
+- [ ] Moonraker API
+- [ ] aktive Spule setzen
+- [ ] Jobs speichern
+- [ ] Verbrauch mm -> g
+
+### PHASE 7 - Verbrauchsengine
+- [ ] mm -> g
+- [ ] Dichte-Bibliothek
+- [ ] Toleranzsystem
+- [ ] Verbrauchshistorie
+
+### PHASE 8 - Dashboard & Statistiken
+- [ ] Charts
+- [ ] Tages-/Monatsverbrauch
+- [ ] angebrochene Spulen
+- [ ] Lagerauswertung
+
+### PHASE 9 - Deployment
+- [ ] ZIP Installer
+- [ ] Docker Multi-Arch
+- [ ] Unraid Template
+- [ ] Pi Guide
---
## Zeitplanung
-- bis 22.12 → 4-Schicht, wenig Zeit
-- 22.12–01.01 → Urlaub, Hauptentwicklungszeit
-- ab Januar → 3-Schicht, Wochenenden frei
+- bis 22.12 -> 4-Schicht, wenig Zeit
+- 22.12-01.01 -> Urlaub, Hauptentwicklungszeit
+- ab Januar -> 3-Schicht, Wochenenden frei
---
## Finale Fertigstellung
-**Mai 2026** – Ziel: Ein modernes, automatisches, plattformübergreifendes Filament-Verwaltungssystem.
+**Mai 2026** - Ziel: Ein modernes, automatisches, plattformübergreifendes Filament-Verwaltungssystem.
+
diff --git a/ANLEITUNG/Troubleshooting.md b/ANLEITUNG/Troubleshooting.md
index 8d569c7..dba8dde 100644
--- a/ANLEITUNG/Troubleshooting.md
+++ b/ANLEITUNG/Troubleshooting.md
@@ -1,4 +1,4 @@
-# Fehlerbehebung (Troubleshooting)
+# Fehlerbehebung (Troubleshooting)
## Typische Fehler
- Datenbank kann nicht beschrieben werden
@@ -15,7 +15,7 @@
- Debugcenter im Webinterface nutzen
## Debugcenter
-- Seite „Debug“ für Systemstatus und Live-Logs
+- Seite "Debug" für Systemstatus und Live-Logs
- Websocket-Streaming für Echtzeit-Logs
## Lösungen
@@ -23,3 +23,4 @@
## Hinweise
- Bei Problemen immer zuerst die Logs prüfen!
+
diff --git a/ANLEITUNG/UI.md b/ANLEITUNG/UI.md
index 0e8c95b..77741a2 100644
--- a/ANLEITUNG/UI.md
+++ b/ANLEITUNG/UI.md
@@ -1,4 +1,4 @@
-# UI/Frontend-Anleitung
+# UI/Frontend-Anleitung
## Dashboard
- Übersicht über alle Drucker, Materialien, Spulen und Jobs
@@ -18,3 +18,4 @@
## Hinweise
- UI wird laufend erweitert und kann individuell angepasst werden.
+
diff --git a/ANLEITUNG/UPLOAD_ANLEITUNG.md b/ANLEITUNG/UPLOAD_ANLEITUNG.md
new file mode 100644
index 0000000..c6ef481
--- /dev/null
+++ b/ANLEITUNG/UPLOAD_ANLEITUNG.md
@@ -0,0 +1,176 @@
+# FilamentHub Server Upload Anleitung
+
+## Methode 1: WinSCP Script (Empfohlen für Automatisierung)
+
+### Schritt 1: Server-Daten eintragen
+Öffne `upload_to_server.txt` und passe die Zeile an:
+
+```
+open sftp://DEIN_USERNAME:DEIN_PASSWORD@DEIN_SERVER:22
+```
+
+Beispiel:
+```
+open sftp://denis:MeinPasswort123@filamenthub.example.com:22
+```
+
+Passe auch den Zielpfad an:
+```
+cd /pfad/zu/filamenthub
+```
+
+Beispiel:
+```
+cd /home/denis/filamenthub
+```
+
+### Schritt 2: Upload starten
+Doppelklick auf `upload.bat` oder führe in CMD aus:
+```cmd
+"C:\Program Files (x86)\WinSCP\WinSCP.com" /script=upload_to_server.txt
+```
+
+---
+
+## Methode 2: WinSCP GUI mit Synchronisation
+
+### Schritt 1: Verbindung einrichten
+1. Öffne WinSCP
+2. Erstelle eine neue Site:
+ - File Protocol: SFTP
+ - Host name: dein-server.de
+ - Port: 22
+ - Username: dein_username
+ - Password: dein_password
+3. Verbinden
+
+### Schritt 2: Synchronisieren
+1. Im Menü: Commands -> Synchronize
+2. Local directory: `C:\Users\Denis\Desktop\FilamentHub_Projekt\FilamentHub`
+3. Remote directory: `/home/filamenthub` (oder dein Pfad)
+4. Direction: Local -> Remote
+5. Klick auf "Options"
+6. Bei "Exclude mask" einfügen:
+
+```
+__pycache__/; *.pyc; .venv/; venv/; .pytest_cache/; .git/; logs/; data/*.db; Backup/; *.bak*; tests/; htmlcov/; .coverage
+```
+
+Oder nutze den Inhalt von `winscp_exclude.txt`
+
+7. OK -> Synchronize
+
+---
+
+## Methode 3: PowerShell Script (Für Profis)
+
+### Voraussetzung
+WinSCP .NET Assembly muss installiert sein.
+
+### Nutzung
+```powershell
+.\upload_with_winscp.ps1 -ServerHost "dein-server.de" -Username "denis" -Password "deinpasswort" -RemotePath "/home/filamenthub"
+```
+
+**WICHTIG:** Passe den SSH Fingerprint im Script an! Den Fingerprint erhältst du beim ersten Connect mit WinSCP.
+
+---
+
+## Was wird hochgeladen?
+
+### Haupt-Dateien
+- `run.py` - Haupt-Anwendung
+- `requirements.txt` - Python Dependencies
+- `config.json`, `config.yaml` - Konfiguration
+- `.env` - Umgebungsvariablen (ACHTUNG: Secrets!)
+- `Dockerfile`, `docker-compose.yml` - Container Config
+
+### Verzeichnisse
+- `app/` - Flask/FastAPI Anwendung
+- `services/` - Backend Services
+- `utils/` - Utilities
+- `frontend/` - Frontend Dateien
+- `alembic/` - Datenbank Migrations
+- `scripts/` - Deployment Scripts
+
+### Scripts
+- `*.sh` - Shell Scripts (entrypoint.sh, rebuild.sh, etc.)
+
+---
+
+## Was wird NICHT hochgeladen?
+
+- `.venv/` - Virtual Environment
+- `__pycache__/` - Python Cache
+- `tests/` - Test Dateien
+- `.git/` - Git Repository
+- `logs/` - Log Dateien
+- `data/*.db` - Lokale Datenbank
+- `Backup/` - Backup Ordner
+- `*.bak*` - Backup Dateien
+
+---
+
+## Nach dem Upload
+
+### Auf dem Server ausführen:
+
+```bash
+# In das Verzeichnis wechseln
+cd /home/filamenthub
+
+# Shell Scripts ausführbar machen
+chmod +x *.sh
+
+# Virtual Environment erstellen
+python3 -m venv .venv
+source .venv/bin/activate
+
+# Dependencies installieren
+pip install -r requirements.txt
+
+# Datenbank initialisieren (falls nötig)
+python create_db.py
+
+# Anwendung starten
+python run.py
+
+# ODER mit Docker:
+docker-compose up -d --build
+```
+
+---
+
+## Tipps
+
+### Sicherheit
+- **NIEMALS** `.env` Dateien mit echten Secrets in öffentliche Repos!
+- Nutze `.env.local` für lokale Secrets
+- Überschreibe auf dem Server die `.env` mit echten Produktionsdaten
+
+### Automatisierung
+Du kannst `upload.bat` in einen Scheduled Task einbinden für automatische Uploads.
+
+### Schneller Upload
+Nutze WinSCP's "Keep remote directory up to date" Feature für Live-Sync während der Entwicklung.
+
+---
+
+## Troubleshooting
+
+### "Host key wasn't verified"
+Bei PowerShell Script: SSH Fingerprint im Script anpassen
+
+### "Permission denied"
+- Prüfe Username/Password
+- Prüfe ob SSH Key benötigt wird
+- Prüfe Zielverzeichnis Berechtigungen
+
+### Dateien werden nicht gefunden
+- Prüfe ob du im richtigen Verzeichnis bist
+- Nutze absolute Pfade im Script
+
+### Upload dauert zu lange
+- Nutze Exclude-Liste um unnötige Dateien auszuschließen
+- Komprimiere große Dateien vorher
+
diff --git a/ANLEITUNG/Unraid_Image_bauen.md b/ANLEITUNG/Unraid_Image_bauen.md
new file mode 100644
index 0000000..5603050
--- /dev/null
+++ b/ANLEITUNG/Unraid_Image_bauen.md
@@ -0,0 +1,58 @@
+# Schritte: Unraid Image bauen
+
+Diese Anleitung beschreibt, wie du das Docker-Image für FilamentHub unter Unraid selbst baust und startest.
+
+## 1. Voraussetzungen
+- Unraid mit Docker-Unterstützung
+- Projektordner mit Dockerfile und entrypoint.sh
+
+## 2. Image lokal bauen
+Wechsle in das Projektverzeichnis und führe aus:
+
+```bash
+docker build -t filamenthub .
+```
+
+## 3. Container starten
+Starte den Container mit den gewünschten Volumes und Ports:
+
+```bash
+docker run -d \
+ --name filamenthub \
+ -p 8085:8085 \
+ -v /mnt/user/appdata/filamenthub/data:/app/data \
+ -v /mnt/user/appdata/filamenthub/logs:/app/logs \
+ filamenthub
+```
+
+## 4. Webinterface öffnen
+Rufe im Browser auf: [http://UNRAID-IP:8085](http://UNRAID-IP:8085)
+
+## 5. Hinweise
+- Die Volumes `/app/data` und `/app/logs` werden auf Unraid als persistente Ordner gemountet.
+- Die Datei `entrypoint.sh` muss im Image vorhanden sein.
+- Änderungen am Code erfordern ein erneutes Bauen des Images.
+
+## 6. Optional: Docker Compose
+Siehe Hauptanleitung für ein Compose-Beispiel.
+
+## 7. Vorgehen für ein sauberes Rebuild (Compose)
+Wenn du den Container immer neu und ohne Cache bauen willst:
+
+```bash
+# 1. Container stoppen
+docker-compose down
+
+# 2. Altes Image loeschen
+docker rmi filamenthub:latest
+
+# 3. Neues Image bauen (ohne Cache)
+docker build --no-cache -t filamenthub .
+
+# 4. Container starten
+docker-compose up -d
+
+# 5. Logs checken
+docker-compose logs -f
+```
+
diff --git a/ANLEITUNG/Update_Backup.md b/ANLEITUNG/Update_Backup.md
index 6976787..cd07601 100644
--- a/ANLEITUNG/Update_Backup.md
+++ b/ANLEITUNG/Update_Backup.md
@@ -1,4 +1,4 @@
-# Update & Backup
+# Update & Backup
## Update-Prozess
1. Prüfe, ob neue Versionen verfügbar sind (GitHub Releases oder Pull).
@@ -32,3 +32,4 @@
## Hinweise
- Vor jedem Update Backup anlegen!
- Bei Problemen siehe [Fehlermeldung.md](Fehlermeldung.md)
+
diff --git a/CHANGELOG.md b/CHANGELOG.md
deleted file mode 100644
index e7b42df..0000000
--- a/CHANGELOG.md
+++ /dev/null
@@ -1 +0,0 @@
-Keine Änderungen seit dem letzten Release.
diff --git a/CHANGELOG_EN.md b/CHANGELOG_EN.md
deleted file mode 100644
index 2f2aa4e..0000000
--- a/CHANGELOG_EN.md
+++ /dev/null
@@ -1 +0,0 @@
-No changes since the last release.
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
deleted file mode 100644
index ec00078..0000000
--- a/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,95 +0,0 @@
-# Verhaltenskodex für die FilamentHub-Community
-
-Dieser Verhaltenskodex soll sicherstellen, dass FilamentHub eine freundliche,
-respektvolle und produktive Gemeinschaft bleibt.
-Alle Mitwirkenden – Entwickler, Nutzer, Tester und Reviewer – verpflichten sich,
-diesen Kodex einzuhalten.
-
----
-
-## 🎯 Unser Ziel
-
-Wir möchten eine offene, einladende Community schaffen, in der jeder ohne Angst
-vor Diskriminierung oder Ausgrenzung mitwirken kann.
-FilamentHub soll ein Ort sein, an dem Ideen willkommen sind und Respekt
-grundsätzlich erwartet wird.
-
----
-
-## 🤝 Unser gemeinsames Verhalten
-
-### Wir erwarten von allen:
-- Respektvollen Umgang miteinander
-- Offenheit für Feedback
-- Sinnvolle Diskussionen ohne persönliche Angriffe
-- Hilfsbereitschaft gegenüber neuen Mitgliedern
-- Geduld und konstruktives Verhalten
-
-### Wir tolerieren NICHT:
-- Beleidigungen, Belästigung oder Diskriminierung
-- Aggressives oder provokantes Verhalten
-- Hate-Speech jeglicher Art
-- Veröffentlichung sensibler oder privater Informationen
-- Spam, Werbung oder destruktive Aktionen
-
----
-
-## 🧭 Beispiele für gutes Verhalten
-
-- „Können wir das noch einmal prüfen?“ statt „Das ist falsch.“
-- „Ich verstehe den Ansatz, aber…“ statt „Das macht keinen Sinn.“
-- Neue Nutzer freundlich einführen und unterstützen
-- Probleme klar beschreiben – nicht Personen kritisieren
-
----
-
-## 🚨 Melden von Verstößen
-
-Falls du Zeuge eines Verstoßes wirst oder selbst betroffen bist,
-kontaktiere bitte:
-
-**Projektbetreiber:**
-Denis (GitHub: `d3nn3s08`)
-
-Alle Meldungen werden vertraulich behandelt.
-Es wird keine Form von Vergeltung toleriert.
-
----
-
-## 🔧 Konsequenzen bei Verstößen
-
-Bei Verstößen kann es abhängig von der Schwere zu folgenden Schritten kommen:
-
-1. Freundliche Erinnerung
-2. Offizielle Warnung
-3. Sperrung für bestimmte Bereiche
-4. Ausschluss aus der Projektcommunity
-5. Rücknahme von PR-Rechten oder Issue-Rechten
-
-Das Ziel ist nicht Bestrafung – sondern eine sichere, freundliche Umgebung.
-
----
-
-## 📜 Geltungsbereich
-
-Dieser Verhaltenskodex gilt für:
-
-- GitHub Issues
-- Pull Requests
-- Diskussionen
-- Dokumentation
-- Direkte Kommunikation (z. B. per Kommentar, Nachricht)
-
----
-
-## ❤️ Unser Versprechen
-
-FilamentHub ist ein offenes Projekt.
-Jeder ist willkommen: Anfänger, Experten, Hobby-Drucker, Automations-Freaks,
-Entwickler und alle dazwischen.
-
-Respekt ist die Grundlage, auf der wir gemeinsam etwas Großes bauen.
-
----
-
-Danke, dass du Teil der Community bist!
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
deleted file mode 100644
index c0acc52..0000000
--- a/CONTRIBUTING.md
+++ /dev/null
@@ -1,152 +0,0 @@
-# Beitrag zu FilamentHub
-
-Vielen Dank, dass du überlegst, zu FilamentHub beizutragen!
-Dieses Projekt lebt davon, dass Nutzer Fehler melden, Ideen teilen und Code beitragen.
-Hier findest du alle Infos, um direkt loslegen zu können.
-
----
-
-## 🚀 Wie du beitragen kannst
-
-### 1. Fehler melden (Bug Reports)
-Wenn etwas nicht funktioniert:
-
-1. Öffne ein neues Issue: **Issues → New Issue**
-2. Wähle “Bug Report”
-3. Beschreibe:
- - Was ist passiert?
- - Erwartetes Verhalten?
- - Schritte zur Reproduktion
- - Version / OS / Docker / Druckertyp
-4. Logs oder Screenshots helfen immer.
-
-Bitte zunächst prüfen, ob der Fehler schon gemeldet wurde.
-
----
-
-### 2. Feature Requests
-Wenn du eine Idee für eine Funktion hast:
-
-1. Öffne ein neues Issue → “Feature Request”
-2. Beschreibe klar:
- - Was soll passieren?
- - Welches Problem löst es?
- - Optional: Wie stellst du dir die UI oder API vor?
-
-Große Features werden vorab im Issue diskutiert.
-
----
-
-### 3. Code beitragen (Pull Requests)
-
-Wenn du selbst programmieren möchtest:
-
-1. Forke das Repo
-2. Eigenen Branch erstellen:
- ```bash
- git checkout -b feature/mein-feature
-
-3. Code schreiben
-
-4. Lokal testen:
-python run.py
-
-5. Committen:
-git add .
-git commit -m "Add: Mein Feature"
-
-6. Pushen:
-git push origin feature/mein-feature
-
-7. Pull Request erstellen
-- Einen PR pro Feature
-- Kein gigantischer „Alles-auf-einmal“-PR
-- Struktur des Projektes einhalten
-
-Projektstruktur (Kurzüberblick)
-
-FilamentHub/
-├── app/
-│ ├── main.py
-│ ├── database.py
-│ ├── models/
-│ └── routes/
-├── services/
-├── frontend/
-├── data/
-├── docs/
-├── config.yaml
-├── Dockerfile
-└── run.py
-
-🧹 Code Richtlinien
-- Python 3.10+
-- Einheitliche Struktur beachten
-- Keine toten Dateien, kein Debug-Müll
-- Backend folgt FastAPI + SQLModel Best Practices
-- Externe Systeme (Bambu, Klipper) immer mocken
-- Kommentare bei komplexer Logik
-- Neue Modelle → PR muss DB-Änderungen erwähnen
-
-🔍 Tests
-
-Tests sind in diesem Stadium noch minimal.
-Wenn du Tests hinzufügst:
-
-pytest verwenden
-
-API-Funktionen isoliert testen
-
-Keine echten Drucker ansprechen
-
-Keine realen MQTT/Cloud-Aufrufe
-
-Bambu und Klipper über Mocks simulieren
-
-📝 Dokumentation
-
-Wenn du neue Funktionen hinzufügst:
-
-API-Endpunkte im PR erwähnen
-
-Kurz beschreiben, wie es benutzt wird
-
-Bei UI-Änderungen → Screenshot einfügen
-
-Bei Strukturänderungen → README anpassen
-
-❤️ Community & Support
-
-Wenn du Fragen hast:
-
-Issue öffnen
-
-Oder im PR kommentieren
-
-Feedback geben ist immer willkommen
-
-Jeder ist willkommen – Anfänger, Fortgeschrittene und Profis.
-
-📜 Lizenz
-
-Durch das Einsenden eines Pull Requests erklärst du dich einverstanden,
-dass dein Code unter der MIT-Lizenz veröffentlicht wird.
-
-
-
-Danke, dass du FilamentHub unterstützt! 🚀
-
----
-
-# ✔️ Datei ist fertig!
-Wenn du möchtest, packe ich sie dir direkt:
-
-👉 in eine ZIP
-👉 in deine Repo-Struktur
-👉 als GitHub-kompatibel formatierte Datei mit Badge
-👉 möchte ich sie direkt in *deiner* README verlinken?
-
-Sag einfach:
-
-**„Bitte in mein Projekt integrieren“** oder
-**„Mach mir das ZIP fertig“**
diff --git a/Dockerfile b/Dockerfile
index 3d27864..a2ef954 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -2,7 +2,7 @@ FROM python:3.12-slim-bookworm
WORKDIR /app
-# Build-Tools f�r ARM installieren
+# Build-Tools für ARM installieren + curl für healthcheck
RUN apt-get update && apt-get install -y \
gcc \
g++ \
@@ -10,20 +10,36 @@ RUN apt-get update && apt-get install -y \
python3-dev \
libffi-dev \
libssl-dev \
+ curl \
&& rm -rf /var/lib/apt/lists/*
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
-COPY . .
+# Copy application files explicitly
+COPY app/ /app/app/
+COPY utils/ /app/utils/
+COPY services/ /app/services/
+COPY alembic/ /app/alembic/
+COPY alembic.ini /app/
+COPY entrypoint.sh /app/
+COPY run.py /app/
+COPY config.yaml /app/
+COPY frontend/ /app/frontend/
+
EXPOSE 8085
ENV FILAMENTHUB_DB_PATH=/app/data/filamenthub.db
ENV PYTHONPATH=/app
-RUN mkdir -p /app/data /app/logs && \
+# Standard-Versionsbezeichnungen (überschreibbar via docker-compose.yml)
+ENV APP_VERSION="Beta v1.6 · FilamentHub"
+ENV DESIGN_VERSION="Design Beta-1.0"
+
+RUN mkdir -p /app/data /app/logs /app/app/logging && \
sed -i 's/\r$//' /app/entrypoint.sh && \
- chmod +x /app/entrypoint.sh
+ chmod +x /app/entrypoint.sh && \
+ python -m compileall /app/app -q || true
ENTRYPOINT ["./entrypoint.sh"]
diff --git a/LICENSE.md b/LICENSE.md
deleted file mode 100644
index e88fe9b..0000000
--- a/LICENSE.md
+++ /dev/null
@@ -1,21 +0,0 @@
-MIT License
-
-Copyright (c) 2025 Denis
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/README.en.md b/README.en.md
index ba70262..ca0b468 100644
--- a/README.en.md
+++ b/README.en.md
@@ -3,25 +3,27 @@
FilamentHub
-Open-source dashboard for filament, printer and system management. Local, independent, and inspired by the modern Unraid UI.
-
+
+Open-source dashboard for filament, printer, and system management – local, independent, and inspired by the Unraid UI.
+
-
-
+
+
+
-
+
-
-
+
+
-
-
+
+
@@ -33,28 +35,66 @@
-
+
-
+
+
+----------------------------------------------------
+
+
-
+
+
+
+
+
+
+
+> 🇩🇪 **German version:** [README.md](README.md)
+
+---
+
+## 🚧 Project Status – Public Beta
+
+⚠️ **FilamentHub is currently in a public beta phase (v1.6.0-beta).**
+
+- Core features are stable and ready for active use
+- Database migrations run automatically on startup
+- Job and filament tracking is production-ready
+- APIs, UI, and data models may still change
+- **Regular database backups are strongly recommended**
+
+This beta is intended for technically experienced users and early adopters.
+Feedback, bug reports, and suggestions are highly welcome.
+
---
# 🧩 Project Overview
-FilamentHub originally started as a small, lightweight filament management tool.
-Just local inventory, colors, material and spool usage.
+FilamentHub originally started as a **small, local filament management tool**
+for spools, colors, remaining material, and usage tracking.
-Over time it grew into something much larger:
-a complete **3D printing management dashboard** for printers, filament, analytics and system diagnostics — all running locally and without cloud dependencies.
+Over time, the project grew significantly.
+What began as a simple tool evolved step by step into a full
+**3D printing management dashboard**, covering much more than filament alone:
-The name remained.
-The project evolved.
+- Filament management
+- Printer monitoring
+- System diagnostics
+- MQTT integration
+- Debug and maintenance tools
+- Unraid-inspired web interface
+- Docker-based deployment
+
+The name stayed the same – the scope expanded.
+
+Today, FilamentHub is in a **public beta phase**, focused on
+**stability, data integrity, and a clean technical foundation**.
---
@@ -62,76 +102,115 @@ The project evolved.
## **Printer Management**
- Overview of all registered printers
-- Live status, temperatures, active job
-- Bambu LAN MQTT integration
-- Usage statistics & print history
-- WebSocket-based live monitoring with ping/pong debug tools
+- Live status, temperatures, and current job
+- Bambu LAN MQTT support
+- Print history and usage statistics
+- Stable WebSocket status with ping/pong analysis (debug view)
## **Filament Management**
-- Spool inventory with brand, color, material, remaining length
+- Spool management with manufacturer, color, material, and remaining amount
- Last usage per printer
-- Consumption by job / day / month
-- Cost estimation
-- Low-inventory warnings
+- Consumption per job / day / month
+- Cost estimations
+- Low-stock warnings
-## **Analytics**
+## **Analytics & Statistics**
- Print time per printer
-- Filament usage trends
-- Cost dashboards
-- Material and color distribution
+- Filament consumption over time
+- Cost analysis
+- Distribution by material, color, and machine
-## **Web UI (Unraid-style)**
-- Clean navigation: **Dashboard / Printer / Filament / System / Debug**
-- Cards, tables, status badges
-- Minimalistic and dark UI design
-- Fully local, no cloud dependency
+## **Web Interface (Unraid-inspired)**
+- Clear navigation: **Dashboard / Printer / Filament / System / Debug**
+- Cards, tables, icons, and status badges
+- Dark, calm UI
+- Responsive design for desktop and server environments
## **Database & Backups**
-- Local SQLite database (autogenerated)
-- One-click backup: ZIP (DB + logs) → `data/backups/filamenthub_backup_.zip`
-- DB tools: VACUUM, table browser, quick SQL testing
-
-## **Debug Center**
-- System monitor
-- Service overview
-- Log viewer
-- MQTT activity visualization with sparkline
-- Smoke tests & DB tests
-- Log rotation & cleanup
+- Local SQLite database (created automatically)
+- Integrated backup button
+ → creates a ZIP containing **database + logs**
+ → stored at:
+ `data/backups/filamenthub_backup_.zip`
+- Database tools: VACUUM, table viewer, test queries
+
+## **Debug & Maintenance**
+- Debug center including:
+ - System overview
+ - Service status
+ - Log viewer
+ - MQTT monitor with lock times, ping, last message, sparklines
+ - Test runner (DB tests, smoke tests)
+ - Log rotation and cleanup
+
+---
+
+# 🖼️ Screenshots
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Dashboard · Material · Spools
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Statistics · Details · Jobs
+
+
+
+
+
+
+
+
+
+ UI details · User menu
+
---
# 📅 Status & Roadmap
-- Target for stable release: **May 2026**
-- Roadmap: `ANLEITUNG/Roadmap.md`
-- Documentation: `ANLEITUNG/Handbuch.md`
+- Target for stable version 1.0: **May 2026**
+- Current state: **Public Beta – stable for use, actively developed**
+- Roadmap: [ANLEITUNG/Roadmap.md](ANLEITUNG/Roadmap.md)
+- Manual: [ANLEITUNG/Handbuch.md](ANLEITUNG/Handbuch.md)
---
# 🛠️ Quickstart (Development)
+> ⚠️ Note: This setup is intended for developers and beta testers.
+
```bash
python -m venv .venv
.venv/Scripts/pip install -r requirements.txt # Windows
# or
source .venv/bin/activate && pip install -r requirements.txt # Linux/Mac
-python run.py # Starts API + UI (Port 8080)
-```
-
-
-# Quickstart (Docker)
-```
-docker build -t filamenthub .
-docker run -d -p 8080:8080 -v $(pwd)/data:/app/data filamenthub
-```
-Unter Unraid nutzbar über Docker-Compose oder direkt im Docker-Template.
-
-📄 Lizenz
-
-MIT License
-
-👤 Kontakt
-
-Entwickelt von d3nn3s08
+python run.py # Starts API + UI (Port 8085)
diff --git a/README.md b/README.md
deleted file mode 100644
index 854b625..0000000
--- a/README.md
+++ /dev/null
@@ -1,175 +0,0 @@
-
-
-
-
-FilamentHub
-Open-Source Dashboard für Filament-, Drucker- und Systemverwaltung – lokal, unabhängig und im modernen Unraid-Stil.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-----------------------------------------------------
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-> 🇺🇸 **English version:** [README.en.md](README.en.md)
-
----
-
-# 🧩 Projektübersicht
-
-FilamentHub hat ursprünglich als **kleines, reines Filament-Verwaltungssystem** angefangen.
-Ein einfacher lokaler Manager für Spulen, Farben, Restmengen und Nutzungsdaten.
-
-Mit der Zeit ist das Projekt aber deutlich gewachsen.
-Aus dem ursprünglichen Tool wurde Stück für Stück ein komplettes **3D-Printing-Management-Dashboard**, das heute deutlich mehr abdeckt:
-
-- Filamentverwaltung
-- Druckerüberwachung
-- Systemdiagnose
-- MQTT-Integration
-- Debug-Tools
-- Weboberfläche im Unraid-Stil
-- Docker-Bereitstellung
-
-Der Name ist geblieben – das Projekt ist weitergewachsen.
-
----
-
-# 🚀 Funktionen
-
-## **Druckerverwaltung**
-- Übersicht über alle registrierten Drucker
-- Live-Status, Temperaturen, aktueller Job
-- LAN-MQTT für Bambu
-- Druckhistorie & Nutzungsdaten
-- Stabiler WebSocket-Status mit Ping/Pong-Analyse (Debug-Ansicht)
-
-## **Filamentverwaltung**
-- Spulenverwaltung mit Hersteller, Farbe, Material, Restmenge
-- Letzte Nutzung je Drucker
-- Verbrauch nach Job / Tag / Monat
-- Kostenabschätzungen
-- Warnungen bei niedrigem Bestand
-
-## **Analyse & Statistiken**
-- Druckzeit pro Drucker
-- Filamentverbrauch pro Zeitraum
-- Kostenübersichten
-- Verteilung nach Material, Farbe, Maschine
-
-## **Weboberfläche (Unraid-inspiriert)**
-- Klare Navigation: **Dashboard / Printer / Filament / System / Debug**
-- Karten, Tabellen, Icons, Statusbadges
-- Dunkles, ruhiges UI
-- Responsive Design für Desktop & Server-Umgebungen
-
-## **Datenbank & Backups**
-- Lokale SQLite-Datenbank (automatisch angelegt)
-- Integrierter Backup-Button
- → erstellt ZIP mit **DB + Logs**
- → Ablage unter: `data/backups/filamenthub_backup_.zip`
-- Datenbank-Tools: VACUUM, Tabellenviewer, Test-Selekte
-
-## **Debug & Wartung**
-- Debug-Center mit:
- - Systemübersicht
- - Service-Status
- - Log-Viewer
- - MQTT-Monitor mit Sperrzeiten, Ping, Last-Message, Sparkline
- - Test-Runner (DB-Tests, Smoke-Tests)
- - Logrotation & Säuberung
-# 🖼️ Screenshots
-
-## 📊 Dashboard / Alpa
-
-
-
-
----
-
-## 🖨️ Druckerübersicht
-
-
-
-
----
-
-## 🧵 Filamentverwaltung
-
-
-
-
----
-
-## 🧰 System / Debug-Center
-
-
-
- ---
-
-# 🧱 AMS Übersicht
-
-
-
-
-
-
----
-
-# 📅 Status & Roadmap
-
-- Ziel für stabile 1.0: **Mai 2026**
-- Aktueller Entwicklungsstand: funktionsfähig, viele Module im Aufbau
-- Roadmap: [ANLEITUNG/Roadmap.md](ANLEITUNG/Roadmap.md)
-- Handbuch: [ANLEITUNG/Handbuch.md](ANLEITUNG/Handbuch.md)
-
----
-
-# 🛠️ Quickstart (Development)
-
-```bash
-python -m venv .venv
-.venv/Scripts/pip install -r requirements.txt # Windows
-# oder
-source .venv/bin/activate && pip install -r requirements.txt # Linux/Mac
-
-python run.py # Startet API + UI (Port 8080)
diff --git a/UTF8 b/UTF8
new file mode 100644
index 0000000..56d27be
--- /dev/null
+++ b/UTF8
@@ -0,0 +1 @@
+-Encoding
diff --git a/alembic/README b/alembic/README
new file mode 100644
index 0000000..98e4f9c
--- /dev/null
+++ b/alembic/README
@@ -0,0 +1 @@
+Generic single-database configuration.
\ No newline at end of file
diff --git a/alembic/env.py b/alembic/env.py
new file mode 100644
index 0000000..fba6e12
--- /dev/null
+++ b/alembic/env.py
@@ -0,0 +1,79 @@
+from logging.config import fileConfig
+
+from sqlalchemy import engine_from_config
+from sqlalchemy import pool
+
+from alembic import context
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+if config.config_file_name is not None:
+ fileConfig(config.config_file_name)
+
+from sqlmodel import SQLModel
+from app.models.material import Material
+from app.models.spool import Spool
+from app.models.printer import Printer
+from app.models.job import Job
+target_metadata = SQLModel.metadata
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def run_migrations_offline() -> None:
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(
+ url=url,
+ target_metadata=target_metadata,
+ literal_binds=True,
+ dialect_opts={"paramstyle": "named"},
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def run_migrations_online() -> None:
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ connectable = engine_from_config(
+ config.get_section(config.config_ini_section, {}),
+ prefix="sqlalchemy.",
+ poolclass=pool.NullPool,
+ )
+
+ with connectable.connect() as connection:
+ context.configure(
+ connection=connection, target_metadata=target_metadata
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
diff --git a/alembic/script.py.mako b/alembic/script.py.mako
new file mode 100644
index 0000000..1101630
--- /dev/null
+++ b/alembic/script.py.mako
@@ -0,0 +1,28 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision: str = ${repr(up_revision)}
+down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
+branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
+depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
+
+
+def upgrade() -> None:
+ """Upgrade schema."""
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade() -> None:
+ """Downgrade schema."""
+ ${downgrades if downgrades else "pass"}
diff --git a/alembic/versions/11f74386f230_initial_tables.py b/alembic/versions/11f74386f230_initial_tables.py
new file mode 100644
index 0000000..c958645
--- /dev/null
+++ b/alembic/versions/11f74386f230_initial_tables.py
@@ -0,0 +1,87 @@
+"""Initial tables
+
+Revision ID: 11f74386f230
+Revises:
+Create Date: 2025-11-27 16:31:33.556174
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+
+# revision identifiers, used by Alembic.
+revision: str = '11f74386f230'
+down_revision: Union[str, Sequence[str], None] = None
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+def upgrade() -> None:
+ """Upgrade schema."""
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('material',
+ sa.Column('name', sa.String(), nullable=False),
+ sa.Column('brand', sa.String(), nullable=True),
+ sa.Column('color', sa.String(), nullable=True),
+ sa.Column('density', sa.Float(), nullable=False),
+ sa.Column('diameter', sa.Float(), nullable=False),
+ sa.Column('notes', sa.String(), nullable=True),
+ sa.Column('external_id', sa.String(), nullable=True),
+ sa.Column('printer_slot', sa.Integer(), nullable=True),
+ sa.Column('created_at', sa.String(), nullable=True),
+ sa.Column('updated_at', sa.String(), nullable=True),
+ sa.Column('id', sa.String(), nullable=False),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_table('printer',
+ sa.Column('name', sa.String(), nullable=False),
+ sa.Column('printer_type', sa.String(), nullable=False),
+ sa.Column('ip_address', sa.String(), nullable=True),
+ sa.Column('port', sa.Integer(), nullable=True),
+ sa.Column('cloud_serial', sa.String(), nullable=True),
+ sa.Column('api_key', sa.String(), nullable=True),
+ sa.Column('id', sa.String(), nullable=False),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_table('spool',
+ sa.Column('material_id', sa.String(), nullable=False),
+ sa.Column('vendor_id', sa.String(), nullable=True),
+ sa.Column('weight_full', sa.Float(), nullable=False),
+ sa.Column('weight_empty', sa.Float(), nullable=False),
+ sa.Column('weight_current', sa.Float(), nullable=True),
+ sa.Column('status', sa.String(), nullable=True),
+ sa.Column('location', sa.String(), nullable=True),
+ sa.Column('label', sa.String(), nullable=True),
+ sa.Column('external_id', sa.String(), nullable=True),
+ sa.Column('printer_slot', sa.Integer(), nullable=True),
+ sa.Column('created_at', sa.String(), nullable=True),
+ sa.Column('updated_at', sa.String(), nullable=True),
+ sa.Column('id', sa.String(), nullable=False),
+ sa.ForeignKeyConstraint(['material_id'], ['material.id'], ),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_table('job',
+ sa.Column('printer_id', sa.String(), nullable=False),
+ sa.Column('spool_id', sa.String(), nullable=True),
+ sa.Column('name', sa.String(), nullable=False),
+ sa.Column('filament_used_mm', sa.Float(), nullable=False),
+ sa.Column('filament_used_g', sa.Float(), nullable=False),
+ sa.Column('started_at', sa.DateTime(), nullable=False),
+ sa.Column('finished_at', sa.DateTime(), nullable=True),
+ sa.Column('id', sa.String(), nullable=False),
+ sa.ForeignKeyConstraint(['printer_id'], ['printer.id'], ),
+ sa.ForeignKeyConstraint(['spool_id'], ['spool.id'], ),
+ sa.PrimaryKeyConstraint('id')
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ """Downgrade schema."""
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_table('job')
+ op.drop_table('spool')
+ op.drop_table('printer')
+ op.drop_table('material')
+ # ### end Alembic commands ###
diff --git a/alembic/versions/20231202_add_spool_usage_fields.py b/alembic/versions/20231202_add_spool_usage_fields.py
new file mode 100644
index 0000000..cec187e
--- /dev/null
+++ b/alembic/versions/20231202_add_spool_usage_fields.py
@@ -0,0 +1,31 @@
+"""Add spool usage tracking fields
+
+Revision ID: 20231202_add_spool_usage_fields
+Revises: 20231202_ams_fields
+Create Date: 2025-12-02 22:00:00
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = "20231202_add_spool_usage_fields"
+down_revision = "20231202_ams_fields"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ op.add_column("spool", sa.Column("used_count", sa.Integer(), nullable=False, server_default="0"))
+ op.add_column("spool", sa.Column("first_seen", sa.Text(), nullable=True))
+ op.add_column("spool", sa.Column("last_slot", sa.Integer(), nullable=True))
+ # remove server_default after backfill
+ with op.batch_alter_table("spool") as batch_op:
+ batch_op.alter_column("used_count", server_default=None)
+
+
+def downgrade():
+ with op.batch_alter_table("spool") as batch_op:
+ batch_op.drop_column("first_seen")
+ batch_op.drop_column("used_count")
+ batch_op.drop_column("last_slot")
diff --git a/alembic/versions/20231202_ams_fields_spool.py b/alembic/versions/20231202_ams_fields_spool.py
new file mode 100644
index 0000000..629ebd2
--- /dev/null
+++ b/alembic/versions/20231202_ams_fields_spool.py
@@ -0,0 +1,47 @@
+"""add ams fields to spool
+
+Revision ID: 20231202_ams_fields
+Revises: 4e2c1c9d8b3e
+Create Date: 2023-12-02
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+# revision identifiers, used by Alembic.
+revision = "20231202_ams_fields"
+down_revision = "4e2c1c9d8b3e"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ with op.batch_alter_table("spool") as batch_op:
+ batch_op.add_column(sa.Column("printer_id", sa.String(), nullable=True))
+ batch_op.add_column(sa.Column("ams_slot", sa.Integer(), nullable=True))
+ batch_op.add_column(sa.Column("tag_uid", sa.String(), nullable=True))
+ batch_op.add_column(sa.Column("tray_uuid", sa.String(), nullable=True))
+ batch_op.add_column(sa.Column("tray_color", sa.String(), nullable=True))
+ batch_op.add_column(sa.Column("tray_type", sa.String(), nullable=True))
+ batch_op.add_column(sa.Column("remain_percent", sa.Float(), nullable=True))
+ batch_op.add_column(sa.Column("last_seen", sa.String(), nullable=True))
+ batch_op.create_foreign_key(
+ "fk_spool_printer_id_printer",
+ "printer",
+ ["printer_id"],
+ ["id"],
+ ondelete=None,
+ )
+
+
+def downgrade():
+ with op.batch_alter_table("spool") as batch_op:
+ batch_op.drop_constraint("fk_spool_printer_id_printer", type_="foreignkey")
+ batch_op.drop_column("last_seen")
+ batch_op.drop_column("remain_percent")
+ batch_op.drop_column("tray_type")
+ batch_op.drop_column("tray_color")
+ batch_op.drop_column("tray_uuid")
+ batch_op.drop_column("tag_uid")
+ batch_op.drop_column("ams_slot")
+ batch_op.drop_column("printer_id")
diff --git a/alembic/versions/20231205_add_job_spool_usage.py b/alembic/versions/20231205_add_job_spool_usage.py
new file mode 100644
index 0000000..e2a491f
--- /dev/null
+++ b/alembic/versions/20231205_add_job_spool_usage.py
@@ -0,0 +1,35 @@
+"""Add job_spool_usage table for multi-spool tracking
+
+Revision ID: 20231205_add_job_spool_usage
+Revises: a662aa086a07
+Create Date: 2025-12-05
+"""
+from alembic import op
+import sqlalchemy as sa
+
+# revision identifiers, used by Alembic.
+revision = "20231205_add_job_spool_usage"
+down_revision = "a662aa086a07"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ op.create_table(
+ "job_spool_usage",
+ sa.Column("id", sa.String(), primary_key=True),
+ sa.Column("job_id", sa.String(), sa.ForeignKey("job.id"), nullable=False),
+ sa.Column("spool_id", sa.String(), sa.ForeignKey("spool.id"), nullable=True),
+ sa.Column("slot", sa.Integer(), nullable=True),
+ sa.Column("used_mm", sa.Float(), nullable=False, server_default="0"),
+ sa.Column("used_g", sa.Float(), nullable=False, server_default="0"),
+ sa.Column("order_index", sa.Integer(), nullable=True),
+ )
+ # remove defaults after creation
+ with op.batch_alter_table("job_spool_usage") as batch_op:
+ batch_op.alter_column("used_mm", server_default=None)
+ batch_op.alter_column("used_g", server_default=None)
+
+
+def downgrade():
+ op.drop_table("job_spool_usage")
diff --git a/alembic/versions/20251204_01_add_settings_and_userflag.py b/alembic/versions/20251204_01_add_settings_and_userflag.py
new file mode 100644
index 0000000..b7e4720
--- /dev/null
+++ b/alembic/versions/20251204_01_add_settings_and_userflag.py
@@ -0,0 +1,29 @@
+"""
+Alembic migration: Add settings and userflag tables
+"""
+from typing import Sequence, Union
+# revision identifiers, used by Alembic.
+revision: str = '20251204_01_add_settings_and_userflag'
+down_revision: Union[str, Sequence[str], None] = '20231205_add_job_spool_usage'
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+from alembic import op
+import sqlalchemy as sa
+
+def upgrade():
+ op.create_table(
+ 'setting',
+ sa.Column('key', sa.String(), primary_key=True),
+ sa.Column('value', sa.String(), nullable=True),
+ )
+ op.create_table(
+ 'userflag',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('user_id', sa.String(), nullable=False),
+ sa.Column('flag', sa.String(), nullable=False),
+ sa.Column('value', sa.Boolean(), nullable=False, default=False),
+ )
+
+def downgrade():
+ op.drop_table('userflag')
+ op.drop_table('setting')
diff --git a/alembic/versions/20251206_182015_add_printer_model_and_mqtt_version.py b/alembic/versions/20251206_182015_add_printer_model_and_mqtt_version.py
new file mode 100644
index 0000000..cb03ae6
--- /dev/null
+++ b/alembic/versions/20251206_182015_add_printer_model_and_mqtt_version.py
@@ -0,0 +1,24 @@
+"""Add model and mqtt_version to printer"""
+from typing import Sequence, Union
+from alembic import op
+import sqlalchemy as sa
+
+# revision identifiers, used by Alembic.
+revision: str = "20251206_182015_add_printer_model_and_mqtt_version"
+down_revision: Union[str, Sequence[str], None] = "20251204_01_add_settings_and_userflag"
+branch_labels = None
+depends_on = None
+
+def upgrade():
+ # add with server_default for backfill
+ op.add_column("printer", sa.Column("model", sa.String(length=32), nullable=False, server_default="X1C"))
+ op.add_column("printer", sa.Column("mqtt_version", sa.String(length=8), nullable=False, server_default="311"))
+ # drop defaults after data is populated
+ with op.batch_alter_table("printer") as batch_op:
+ batch_op.alter_column("model", server_default=None)
+ batch_op.alter_column("mqtt_version", server_default=None)
+
+
+def downgrade():
+ op.drop_column("printer", "mqtt_version")
+ op.drop_column("printer", "model")
diff --git a/alembic/versions/20251219_000001_add_power_and_maintenance_to_printer.py b/alembic/versions/20251219_000001_add_power_and_maintenance_to_printer.py
new file mode 100644
index 0000000..12eb8fc
--- /dev/null
+++ b/alembic/versions/20251219_000001_add_power_and_maintenance_to_printer.py
@@ -0,0 +1,25 @@
+"""Add power and maintenance to printer
+
+Revision ID: 20251219_000001_add_power_and_maintenance_to_printer
+Revises: 20251206_182015_add_printer_model_and_mqtt_version
+Create Date: 2025-12-19
+"""
+from typing import Sequence, Union
+from alembic import op
+import sqlalchemy as sa
+
+# revision identifiers, used by Alembic.
+revision: str = "20251219_000001_add_power_and_maintenance_to_printer"
+down_revision: Union[str, Sequence[str], None] = "20251206_182015_add_printer_model_and_mqtt_version"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ op.add_column("printer", sa.Column("power_consumption_kw", sa.Float(), nullable=True))
+ op.add_column("printer", sa.Column("maintenance_cost_yearly", sa.Float(), nullable=True))
+
+
+def downgrade():
+ op.drop_column("printer", "maintenance_cost_yearly")
+ op.drop_column("printer", "power_consumption_kw")
diff --git a/alembic/versions/20251220_remove_material_color.py b/alembic/versions/20251220_remove_material_color.py
new file mode 100644
index 0000000..d9d2093
--- /dev/null
+++ b/alembic/versions/20251220_remove_material_color.py
@@ -0,0 +1,24 @@
+"""Remove material color column
+
+Revision ID: 20251220_remove_material_color
+Revises: 20251219_000001_add_power_and_maintenance_to_printer
+Create Date: 2025-12-20 00:00:00.000000
+"""
+from alembic import op
+import sqlalchemy as sa
+
+revision = "20251220_remove_material_color"
+down_revision = "20251219_000001_add_power_and_maintenance_to_printer"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ op.drop_column("material", "color")
+
+
+def downgrade() -> None:
+ op.add_column(
+ "material",
+ sa.Column("color", sa.String(), nullable=True),
+ )
diff --git a/alembic/versions/20251222_add_job_status.py b/alembic/versions/20251222_add_job_status.py
new file mode 100644
index 0000000..18f2f99
--- /dev/null
+++ b/alembic/versions/20251222_add_job_status.py
@@ -0,0 +1,38 @@
+"""add job status field
+
+Revision ID: 20251222_add_job_status
+Revises: 20251220_remove_material_color
+Create Date: 2025-12-22
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '20251222_add_job_status'
+down_revision = '20251220_remove_material_color'
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # Add status column to job table with default value 'running'
+ with op.batch_alter_table('job', schema=None) as batch_op:
+ batch_op.add_column(sa.Column('status', sa.String(), nullable=False, server_default='running'))
+
+ # Set status for existing jobs based on finished_at
+ # If finished_at is set, assume completed; otherwise running
+ op.execute("""
+ UPDATE job
+ SET status = CASE
+ WHEN finished_at IS NOT NULL THEN 'completed'
+ ELSE 'running'
+ END
+ """)
+
+
+def downgrade() -> None:
+ # Remove status column
+ with op.batch_alter_table('job', schema=None) as batch_op:
+ batch_op.drop_column('status')
diff --git a/alembic/versions/20251226_add_is_empty_manufacturer_spool_id.py b/alembic/versions/20251226_add_is_empty_manufacturer_spool_id.py
new file mode 100644
index 0000000..30f071e
--- /dev/null
+++ b/alembic/versions/20251226_add_is_empty_manufacturer_spool_id.py
@@ -0,0 +1,32 @@
+"""Add is_empty and manufacturer_spool_id to spool
+
+Revision ID: 20251226_add_is_empty_manufacturer_spool_id
+Revises: 20251222_add_job_status
+Create Date: 2025-12-26 00:00:00.000000
+"""
+from alembic import op
+import sqlalchemy as sa
+
+revision = "20251226_add_is_empty_manufacturer_spool_id"
+down_revision = "20251222_add_job_status"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # Add is_empty column with default False
+ op.add_column(
+ "spool",
+ sa.Column("is_empty", sa.Boolean(), nullable=False, server_default="0"),
+ )
+
+ # Add manufacturer_spool_id column
+ op.add_column(
+ "spool",
+ sa.Column("manufacturer_spool_id", sa.String(), nullable=True),
+ )
+
+
+def downgrade() -> None:
+ op.drop_column("spool", "manufacturer_spool_id")
+ op.drop_column("spool", "is_empty")
diff --git a/alembic/versions/20251227_add_spool_number_system.py b/alembic/versions/20251227_add_spool_number_system.py
new file mode 100644
index 0000000..956b7f1
--- /dev/null
+++ b/alembic/versions/20251227_add_spool_number_system.py
@@ -0,0 +1,191 @@
+"""Add spool number system and job snapshots
+
+Revision ID: 20251227_add_spool_number_system
+Revises: 20251226_add_is_empty_manufacturer_spool_id
+Create Date: 2025-12-27 12:00:00.000000
+
+Dieses System implementiert:
+1. Spulen-Nummern (#1, #2, #3...) mit Recycling
+2. Denormalisierte Felder für schnelle Suche (name, vendor, color)
+3. Job-Snapshots für korrekte Historie
+"""
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy import text
+
+revision = "20251227_add_spool_number_system"
+down_revision = "20251226_add_is_empty_manufacturer_spool_id"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ """
+ Upgrade-Migration:
+ - Fügt Spulen-Nummern-System hinzu
+ - Fügt denormalisierte Felder für Suche hinzu
+ - Fügt Job-Snapshot-Felder hinzu
+ - Migriert bestehende Daten
+ """
+
+ # ========================================
+ # TEIL 1: SPOOL TABLE
+ # ========================================
+
+ print(">> Fuege Spulen-Nummern-System hinzu...")
+
+ # 1. Add spool_number (unique, nullable initially for migration)
+ op.add_column(
+ "spool",
+ sa.Column("spool_number", sa.Integer(), nullable=True),
+ )
+
+ # 2. Add denormalized fields for fast search (ohne JOINs)
+ op.add_column(
+ "spool",
+ sa.Column("name", sa.String(100), nullable=True),
+ )
+ op.add_column(
+ "spool",
+ sa.Column("vendor", sa.String(100), nullable=True),
+ )
+ op.add_column(
+ "spool",
+ sa.Column("color", sa.String(50), nullable=True),
+ )
+
+ print(">> Erstelle Indizes fuer Performance...")
+
+ # 3. Create indexes for performance
+ op.create_index("idx_spool_number", "spool", ["spool_number"], unique=False)
+ op.create_index("idx_spool_name", "spool", ["name"], unique=False)
+ op.create_index("idx_spool_search", "spool", ["name", "vendor", "color"], unique=False)
+ op.create_index("idx_spool_printer_slot", "spool", ["printer_id", "ams_slot"], unique=False)
+
+ # ========================================
+ # TEIL 2: JOB TABLE (Snapshots)
+ # ========================================
+
+ print(">> Fuege Job-Snapshot-Felder hinzu...")
+
+ # 4. Add snapshot fields to job table
+ op.add_column(
+ "job",
+ sa.Column("spool_number", sa.Integer(), nullable=True),
+ )
+ op.add_column(
+ "job",
+ sa.Column("spool_name", sa.String(100), nullable=True),
+ )
+ op.add_column(
+ "job",
+ sa.Column("spool_vendor", sa.String(100), nullable=True),
+ )
+ op.add_column(
+ "job",
+ sa.Column("spool_color", sa.String(50), nullable=True),
+ )
+ op.add_column(
+ "job",
+ sa.Column("spool_created_at", sa.String(), nullable=True),
+ )
+
+ # 5. Create index for job spool_number
+ op.create_index("idx_job_spool_number", "job", ["spool_number"], unique=False)
+
+ # ========================================
+ # TEIL 3: DATA MIGRATION
+ # ========================================
+
+ print(">> Migriere bestehende Daten...")
+
+ connection = op.get_bind()
+
+ # 6. Populate spool_number for existing spools (sequential, by created_at)
+ print(" >> Vergebe Spulen-Nummern...")
+ connection.execute(text("""
+ WITH numbered AS (
+ SELECT id, ROW_NUMBER() OVER (ORDER BY created_at NULLS LAST, id) as num
+ FROM spool
+ )
+ UPDATE spool
+ SET spool_number = (SELECT num FROM numbered WHERE numbered.id = spool.id)
+ """))
+
+ # 7. Populate name, vendor from material table
+ print(" >> Kopiere Material-Daten (name, vendor)...")
+ connection.execute(text("""
+ UPDATE spool
+ SET
+ name = (SELECT name FROM material WHERE material.id = spool.material_id),
+ vendor = (SELECT brand FROM material WHERE material.id = spool.material_id)
+ WHERE spool.material_id IS NOT NULL
+ """))
+
+ # 8. Populate color from tray_color (simplified: use first 6 chars as hex)
+ print(" >> Extrahiere Farben aus tray_color...")
+ connection.execute(text("""
+ UPDATE spool
+ SET color = CASE
+ WHEN tray_color IS NOT NULL AND length(tray_color) >= 6 THEN substr(tray_color, 1, 6)
+ ELSE 'unknown'
+ END
+ WHERE color IS NULL
+ """))
+
+ # 9. Populate job snapshots for existing jobs
+ print(" >> Erstelle Job-Snapshots fuer bestehende Jobs...")
+ connection.execute(text("""
+ UPDATE job
+ SET
+ spool_number = (SELECT spool_number FROM spool WHERE spool.id = job.spool_id),
+ spool_name = (SELECT name FROM spool WHERE spool.id = job.spool_id),
+ spool_vendor = (SELECT vendor FROM spool WHERE spool.id = job.spool_id),
+ spool_color = (SELECT color FROM spool WHERE spool.id = job.spool_id),
+ spool_created_at = (SELECT created_at FROM spool WHERE spool.id = job.spool_id)
+ WHERE job.spool_id IS NOT NULL
+ AND job.spool_number IS NULL
+ """))
+
+ # 10. Add UNIQUE index on spool_number (SQLite doesn't support ADD CONSTRAINT)
+ print(">> Setze UNIQUE index auf spool_number...")
+ op.create_index("uq_spool_number", "spool", ["spool_number"], unique=True)
+
+ print("[OK] Migration erfolgreich abgeschlossen!")
+ print(" >> Spulen haben jetzt Nummern (#1, #2, #3...)")
+ print(" >> Jobs haben Snapshots fuer korrekte Historie")
+ print(" >> Schnelle Suche ohne JOINs aktiviert")
+
+
+def downgrade() -> None:
+ """
+ Downgrade-Migration:
+ - Entfernt alle Änderungen dieser Migration
+ """
+
+ print(">> Entferne Spulen-Nummern-System...")
+
+ # Drop unique index (not constraint, SQLite limitation)
+ op.drop_index("uq_spool_number", "spool")
+
+ # Drop indexes
+ op.drop_index("idx_job_spool_number", "job")
+ op.drop_index("idx_spool_printer_slot", "spool")
+ op.drop_index("idx_spool_search", "spool")
+ op.drop_index("idx_spool_name", "spool")
+ op.drop_index("idx_spool_number", "spool")
+
+ # Drop job columns
+ op.drop_column("job", "spool_created_at")
+ op.drop_column("job", "spool_color")
+ op.drop_column("job", "spool_vendor")
+ op.drop_column("job", "spool_name")
+ op.drop_column("job", "spool_number")
+
+ # Drop spool columns
+ op.drop_column("spool", "color")
+ op.drop_column("spool", "vendor")
+ op.drop_column("spool", "name")
+ op.drop_column("spool", "spool_number")
+
+ print("[OK] Downgrade erfolgreich abgeschlossen!")
diff --git a/alembic/versions/20251228_add_eta_seconds_to_job.py b/alembic/versions/20251228_add_eta_seconds_to_job.py
new file mode 100644
index 0000000..1702d4c
--- /dev/null
+++ b/alembic/versions/20251228_add_eta_seconds_to_job.py
@@ -0,0 +1,26 @@
+"""Add eta_seconds to job table
+
+Revision ID: 20251228_add_eta_seconds_to_job
+Revises: b6901f165641
+Create Date: 2025-12-28 23:10:00.000000
+"""
+from alembic import op
+import sqlalchemy as sa
+
+revision = "20251228_add_eta_seconds_to_job"
+down_revision = "b6901f165641"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ """Add eta_seconds column to job table."""
+ op.add_column(
+ "job",
+ sa.Column("eta_seconds", sa.Integer(), nullable=True),
+ )
+
+
+def downgrade() -> None:
+ """Remove eta_seconds column from job table."""
+ op.drop_column("job", "eta_seconds")
diff --git a/alembic/versions/20251228_add_filament_start_mm.py b/alembic/versions/20251228_add_filament_start_mm.py
new file mode 100644
index 0000000..b506554
--- /dev/null
+++ b/alembic/versions/20251228_add_filament_start_mm.py
@@ -0,0 +1,44 @@
+"""Add filament_start_mm to job table
+
+Revision ID: 20251228_add_filament_start_mm
+Revises: 20251227_add_spool_number_system
+Create Date: 2025-12-28 12:00:00.000000
+
+Fügt filament_start_mm Feld hinzu für sauberes Filament-Tracking ab layer_num >= 1
+"""
+from alembic import op
+import sqlalchemy as sa
+
+revision = "20251228_add_filament_start_mm"
+down_revision = "20251227_add_spool_number_system"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ """
+ Upgrade-Migration:
+ - Fügt filament_start_mm Feld zur job Tabelle hinzu
+ """
+ print(">> Fuege filament_start_mm Feld hinzu...")
+
+ op.add_column(
+ "job",
+ sa.Column("filament_start_mm", sa.Float(), nullable=True),
+ )
+
+ print("[OK] Migration erfolgreich abgeschlossen!")
+ print(" >> Job-Tabelle hat jetzt filament_start_mm Feld")
+
+
+def downgrade() -> None:
+ """
+ Downgrade-Migration:
+ - Entfernt filament_start_mm Feld
+ """
+ print(">> Entferne filament_start_mm Feld...")
+
+ op.drop_column("job", "filament_start_mm")
+
+ print("[OK] Downgrade erfolgreich abgeschlossen!")
+
diff --git a/alembic/versions/4e2c1c9d8b3e_add_active_to_printer.py b/alembic/versions/4e2c1c9d8b3e_add_active_to_printer.py
new file mode 100644
index 0000000..27ecbf7
--- /dev/null
+++ b/alembic/versions/4e2c1c9d8b3e_add_active_to_printer.py
@@ -0,0 +1,28 @@
+"""add active flag to printer
+
+Revision ID: 4e2c1c9d8b3e
+Revises: 11f74386f230
+Create Date: 2025-11-27 19:58:00
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+
+# revision identifiers, used by Alembic.
+revision: str = "4e2c1c9d8b3e"
+down_revision: Union[str, Sequence[str], None] = "11f74386f230"
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+def upgrade() -> None:
+ op.add_column(
+ "printer",
+ sa.Column("active", sa.Boolean(), nullable=False, server_default=sa.text("1")),
+ )
+
+
+def downgrade() -> None:
+ op.drop_column("printer", "active")
diff --git a/alembic/versions/a662aa086a07_add_auto_connect_to_printer.py b/alembic/versions/a662aa086a07_add_auto_connect_to_printer.py
new file mode 100644
index 0000000..d5c3eb0
--- /dev/null
+++ b/alembic/versions/a662aa086a07_add_auto_connect_to_printer.py
@@ -0,0 +1,45 @@
+"""Add auto_connect to printer
+
+Revision ID: a662aa086a07
+Revises: 20231202_add_spool_usage_fields
+Create Date: 2025-12-02 22:18:47.051671
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy import text
+
+
+# revision identifiers, used by Alembic.
+revision: str = 'a662aa086a07'
+down_revision: Union[str, Sequence[str], None] = '20231202_add_spool_usage_fields'
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+def upgrade() -> None:
+ """Upgrade schema."""
+ # ### commands auto generated by Alembic - please adjust! ###
+ # Prüfe, ob die Spalte schon existiert
+ conn = op.get_bind()
+ result = conn.execute(text("PRAGMA table_info(printer);"))
+ columns = [row[1] for row in result]
+ if 'auto_connect' not in columns:
+ op.add_column('printer', sa.Column('auto_connect', sa.Boolean(), nullable=False, server_default=sa.text('0')))
+ print("Spalte 'auto_connect' wurde hinzugefügt.")
+ else:
+ print("Spalte 'auto_connect' ist bereits vorhanden – Migration übersprungen.")
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ """Downgrade schema."""
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.alter_column('spool', 'first_seen',
+ existing_type=sqlmodel.sql.sqltypes.AutoString(),
+ type_=sa.TEXT(),
+ existing_nullable=True)
+ op.drop_column('printer', 'auto_connect')
+ # ### end Alembic commands ###
diff --git a/alembic/versions/b6901f165641_merge_heads.py b/alembic/versions/b6901f165641_merge_heads.py
new file mode 100644
index 0000000..236defc
--- /dev/null
+++ b/alembic/versions/b6901f165641_merge_heads.py
@@ -0,0 +1,28 @@
+"""merge heads
+
+Revision ID: b6901f165641
+Revises: ba95fb93b934, 20251228_add_filament_start_mm
+Create Date: 2025-12-28 21:50:09.045225
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision: str = 'b6901f165641'
+down_revision: Union[str, Sequence[str], None] = ('ba95fb93b934', '20251228_add_filament_start_mm')
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+def upgrade() -> None:
+ """Upgrade schema."""
+ pass
+
+
+def downgrade() -> None:
+ """Downgrade schema."""
+ pass
diff --git a/alembic/versions/ba95fb93b934_add_is_open_to_spool.py b/alembic/versions/ba95fb93b934_add_is_open_to_spool.py
new file mode 100644
index 0000000..aff8d86
--- /dev/null
+++ b/alembic/versions/ba95fb93b934_add_is_open_to_spool.py
@@ -0,0 +1,28 @@
+"""add_is_open_to_spool
+
+Revision ID: ba95fb93b934
+Revises: 20251227_add_spool_number_system
+Create Date: 2025-12-27 18:21:32.452987
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision: str = 'ba95fb93b934'
+down_revision: Union[str, Sequence[str], None] = '20251227_add_spool_number_system'
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+def upgrade() -> None:
+ """Add is_open column to spool table."""
+ op.add_column('spool', sa.Column('is_open', sa.Boolean(), nullable=False, server_default='1'))
+
+
+def downgrade() -> None:
+ """Remove is_open column from spool table."""
+ op.drop_column('spool', 'is_open')
diff --git a/app/__pycache__/__init__.cpython-313.pyc b/app/__pycache__/__init__.cpython-313.pyc
deleted file mode 100644
index 3c2f30b..0000000
Binary files a/app/__pycache__/__init__.cpython-313.pyc and /dev/null differ
diff --git a/app/__pycache__/database.cpython-313.pyc b/app/__pycache__/database.cpython-313.pyc
deleted file mode 100644
index 19c2775..0000000
Binary files a/app/__pycache__/database.cpython-313.pyc and /dev/null differ
diff --git a/app/__pycache__/main.cpython-313.pyc b/app/__pycache__/main.cpython-313.pyc
deleted file mode 100644
index 60972b8..0000000
Binary files a/app/__pycache__/main.cpython-313.pyc and /dev/null differ
diff --git a/app/admin.py b/app/admin.py
new file mode 100644
index 0000000..df7e7e0
--- /dev/null
+++ b/app/admin.py
@@ -0,0 +1,27 @@
+import logging
+from typing import Optional
+
+logger = logging.getLogger("app")
+
+_enabled: bool = False
+_admin_hash: Optional[str] = None
+
+
+def enable_admin(hash_value: str) -> None:
+ """Aktiviere optionalen Admin-Modus (nur beim Startup).
+
+ Diese Funktion speichert nur den gehashten Password-String und
+ setzt ein internes Flag. Keine weiteren Side-Effects.
+ """
+ global _enabled, _admin_hash
+ _admin_hash = hash_value
+ _enabled = True
+ logger.info("Admin enabled via environment variable")
+
+
+def is_admin_enabled() -> bool:
+ return _enabled
+
+
+def get_admin_hash() -> Optional[str]:
+ return _admin_hash
diff --git a/app/database.py b/app/database.py
index f13241d..eca44d0 100644
--- a/app/database.py
+++ b/app/database.py
@@ -1,22 +1,181 @@
-from sqlmodel import SQLModel, Session, create_engine
+import logging
import os
+import sys
+from typing import Dict, Iterable
+from sqlalchemy import inspect, text
+from sqlmodel import SQLModel, Session, create_engine
DB_PATH = os.environ.get("FILAMENTHUB_DB_PATH", "data/filamenthub.db")
-
engine = create_engine(f"sqlite:///{DB_PATH}", echo=False)
+logger = logging.getLogger("app")
+
+
+def verify_schema_or_exit(engine, required_schema: dict | None = None) -> None:
+ """
+ Prüft, ob die erwarteten Tabellen und Spalten vorhanden sind.
+ Bei fehlenden Einträgen wird ein Fehler geloggt und der Prozess beendet.
+
+ required_schema: Dict[str, List[str]]
+ z.B. {"job": ["id", "eta_seconds", "filament_start_mm"]}
+ """
+ logger = logging.getLogger("filamenthub.database")
+ # Minimal required schema: only fields that runtime code strictly depends on
+ DEFAULT_REQUIRED_SCHEMA: Dict[str, Iterable[str]] = {
+ "job": {
+ "id",
+ "started_at",
+ "finished_at",
+ "filament_used_mm",
+ "filament_start_mm",
+ "eta_seconds",
+ }
+ }
+
+ if required_schema is None:
+ required_schema = DEFAULT_REQUIRED_SCHEMA
+
+ try:
+ inspector = inspect(engine)
+ existing_tables = inspector.get_table_names()
+ except Exception as exc:
+ logger.error("[DB] Fehler beim Initialisieren des DB-Inspectors: %s", exc, exc_info=True)
+ logger.warning("[DB] Inspector nicht verfügbar — Schema-Check wird übersprungen. Falls dies in Produktion auftritt, prüfe die DB-Verbindung.")
+ logger.debug("[DB] Database file: %s", DB_PATH)
+ # Fallback: falls Inspector nicht nutzbar ist (z.B. in Tests mit monkeypatch),
+ # überspringen wir die schema-verification an dieser Stelle und lassen
+ # init_db() normal weiterlaufen. Ein späterer Fehler beim Zugriff auf
+ # spezifische Tabellen wird dann sichtbar.
+ return
+
+ missing = []
+ for table, cols in required_schema.items():
+ if table not in existing_tables:
+ missing.append(f"Missing table: {table}")
+ continue
+ try:
+ existing_cols = {c["name"] for c in inspector.get_columns(table)}
+ except Exception as exc:
+ logger.error("Fehler beim Lesen der Spalten fuer Tabelle %s: %s", table, exc, exc_info=True)
+ missing.append(f"Cannot inspect columns for table: {table}")
+ continue
+ for col in cols:
+ if col not in existing_cols:
+ missing.append(f"Missing column: {table}.{col}")
+
+ if missing:
+ logger.error("[DB] Schema validation failed")
+ for item in missing:
+ # item is either 'Missing table: X' or 'Missing column: X.Y' or inspect error
+ logger.error("[DB] %s", item)
+ logger.error("[DB] Database file: %s", DB_PATH)
+ logger.error("[DB] Fix: run `alembic upgrade head` or follow migrations in the project README. Server will exit.")
+ sys.exit(1)
+
+
+def run_migrations() -> None:
+ """Führt Alembic-Migrationen bis head aus (Baseline + Updates)."""
+ logging.info("Starte Alembic-Migrationen...")
+ try:
+ from alembic import command # type: ignore
+ from alembic.config import Config # type: ignore
+ except ImportError:
+ logging.warning("Alembic nicht installiert, Migrationen werden übersprungen.")
+ return
+
+ try:
+ base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
+ alembic_ini = os.path.join(base_dir, "alembic.ini")
+ if not os.path.exists(alembic_ini):
+ logging.warning("alembic.ini nicht gefunden, Migrationen werden übersprungen.")
+ return
+ cfg = Config(alembic_ini)
+ cfg.set_main_option("sqlalchemy.url", f"sqlite:///{DB_PATH}")
+ cfg.set_main_option("script_location", os.path.join(base_dir, "alembic"))
+
+ with engine.begin() as conn:
+ has_version = bool(
+ conn.exec_driver_sql(
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='alembic_version'"
+ ).fetchone()
+ )
+ has_material = bool(
+ conn.exec_driver_sql(
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='material'"
+ ).fetchone()
+ )
+ if not has_version and has_material:
+ logging.info("Bestehende Tabellen ohne alembic_version gefunden, setze Revision auf head.")
+ command.stamp(cfg, "head")
+ return
+
+ # Prüfe aktuelle Revision bevor upgrade ausgeführt wird
+ if has_version:
+ with engine.connect() as conn:
+ current = conn.exec_driver_sql(
+ "SELECT version_num FROM alembic_version"
+ ).fetchone()
+ if current:
+ logging.info(f"Aktuelle DB-Revision: {current[0]}")
+ # Prüfe ob bereits auf head
+ from alembic.script import ScriptDirectory
+ script = ScriptDirectory.from_config(cfg)
+ head_rev = script.get_current_head()
+ if current[0] == head_rev:
+ logging.info("Datenbank ist bereits auf der neuesten Version (head). Keine Migrationen nötig.")
+ return
+
+ logging.info("Führe Alembic upgrade head aus...")
+ command.upgrade(cfg, "head")
+ logging.info("Alembic-Migrationen erfolgreich abgeschlossen.")
+ except Exception as exc:
+ logging.error("Alembic-Migration fehlgeschlagen: %s", exc)
+ raise
def init_db() -> None:
"""
- Erstellt alle Tabellen, falls sie noch nicht existieren.
- Wichtig: Modelle müssen vor dem Aufruf importiert sein.
+ Setzt SQLite-Constraints und führt Migrationen aus.
+ Tabellen werden ausschließlich über Alembic verwaltet.
"""
- from app.models.material import Material # noqa: F401
- from app.models.spool import Spool # noqa: F401
- from app.models.printer import Printer # noqa: F401
- from app.models.job import Job # noqa: F401
+ logging.info("Initialisiere Datenbank...")
+ from sqlalchemy import text
+
+ try:
+ with engine.connect() as conn:
+ conn.execute(text("PRAGMA foreign_keys=ON"))
+ logging.info("Foreign Keys aktiviert.")
+ except Exception as exc:
+ logging.error("Fehler beim Aktivieren der Foreign Keys: %s", exc)
+ # Ensure DB directory exists and create an empty DB file if missing
+ db_dir = os.path.dirname(DB_PATH)
+ if db_dir and not os.path.exists(db_dir):
+ os.makedirs(db_dir, exist_ok=True)
+
+ if not os.path.exists(DB_PATH):
+ logging.info("[DB] Datenbank existiert nicht – erstelle leere SQLite-Datei.")
+ open(DB_PATH, "a").close()
+
+ try:
+ run_migrations()
+ except Exception as exc:
+ logging.error("Fehler bei Migrationen: %s", exc, exc_info=True)
+ logging.error("Server wird beendet, da Migrationen fehlgeschlagen sind.")
+ sys.exit(1)
+
+ # Nach Migrationen das Schema verifizieren (kritische Tabellen/Spalten)
+ try:
+ verify_schema_or_exit(engine)
+ except SystemExit:
+ # bereits geloggt in verify_schema_or_exit
+ raise
+ except Exception as exc:
+ logging.error("Unbekannter Fehler bei Schema-Pruefung: %s", exc, exc_info=True)
+ logging.error("Server wird beendet.")
+ sys.exit(1)
- SQLModel.metadata.create_all(engine)
+ logging.info("Datenbank-Initialisierung abgeschlossen.")
+ # Sichtbare Abschlussmeldung für Betreiber (Migrationen + Schema-Validierung sind durchlaufen)
+ logger.info("[DB] Migrationen abgeschlossen, Schema validiert – Datenbank bereit")
def get_session():
diff --git a/app/db/session.py b/app/db/session.py
new file mode 100644
index 0000000..8fd74cb
--- /dev/null
+++ b/app/db/session.py
@@ -0,0 +1,18 @@
+from contextlib import contextmanager
+from typing import Iterator
+
+from sqlmodel import Session
+
+from app.database import engine
+
+
+@contextmanager
+def session_scope() -> Iterator[Session]:
+ """Context manager to provide a SQLModel `Session` bound to the global engine.
+
+ Usage:
+ with session_scope() as session:
+ # use session
+ """
+ with Session(engine) as session:
+ yield session
diff --git a/app/logging/__init__.py b/app/logging/__init__.py
new file mode 100644
index 0000000..5f58988
--- /dev/null
+++ b/app/logging/__init__.py
@@ -0,0 +1 @@
+from .runtime import reconfigure_logging
diff --git a/app/logging/runtime.py b/app/logging/runtime.py
new file mode 100644
index 0000000..5fbe78a
--- /dev/null
+++ b/app/logging/runtime.py
@@ -0,0 +1,105 @@
+import logging
+import time
+from logging.handlers import RotatingFileHandler
+from pathlib import Path
+from typing import Dict, Tuple
+
+LOG_DIR = Path("logs/app")
+LOG_FILE = LOG_DIR / "app.log"
+MANAGED_LOGGERS = ["", "uvicorn.error", "uvicorn"]
+MODULES = ["app", "bambu", "errors", "klipper", "mqtt"]
+_CURRENT_HANDLERS: Dict[str, RotatingFileHandler] = {}
+LOG_FORMATTER = logging.Formatter("%(asctime)s [%(levelname)s] %(name)s - %(message)s")
+
+
+def _get_level(level_str: str) -> int:
+ lvl = (level_str or "").upper()
+ if lvl in {"DEBUG", "INFO", "WARNING", "ERROR"}:
+ return getattr(logging, lvl)
+ return logging.INFO
+
+
+def _ensure_log_dir() -> None:
+ LOG_DIR.mkdir(parents=True, exist_ok=True)
+
+
+def _cleanup_old_logs(keep_days: int) -> None:
+ if keep_days <= 0:
+ return
+ cutoff = time.time() - keep_days * 86400
+ pattern = LOG_DIR.glob("app.log.*")
+ for file_path in pattern:
+ try:
+ if file_path.is_file() and file_path.stat().st_mtime < cutoff:
+ file_path.unlink()
+ except Exception:
+ pass
+
+
+def _clear_handlers() -> None:
+ for logger_name, handler in list(_CURRENT_HANDLERS.items()):
+ logger_obj = logging.getLogger(logger_name)
+ if handler in logger_obj.handlers:
+ logger_obj.removeHandler(handler)
+ try:
+ handler.close()
+ except Exception:
+ pass
+ _CURRENT_HANDLERS.clear()
+
+
+def _build_handler(level: int, max_size_mb: int, backup_count: int) -> RotatingFileHandler:
+ handler = RotatingFileHandler(
+ LOG_FILE,
+ maxBytes=max_size_mb * 1024 * 1024,
+ backupCount=backup_count,
+ encoding="utf-8",
+ )
+ handler.setLevel(level)
+ handler.setFormatter(LOG_FORMATTER)
+ return handler
+
+
+def _install_handlers(level: int, max_size_mb: int, backup_count: int) -> None:
+ _ensure_log_dir()
+ for logger_name in MANAGED_LOGGERS:
+ handler = _build_handler(level, max_size_mb, backup_count)
+ logger_obj = logging.getLogger(logger_name)
+ logger_obj.addHandler(handler)
+ _CURRENT_HANDLERS[logger_name] = handler
+
+
+def _configure_modules(level: int, logging_enabled: bool, modules_cfg: dict) -> Dict[str, bool]:
+ statuses: Dict[str, bool] = {}
+ for module_name in MODULES:
+ module_entry = modules_cfg.get(module_name, {})
+ module_enabled = bool(module_entry.get("enabled", False))
+ final_enabled = logging_enabled and module_enabled
+ module_logger = logging.getLogger(module_name)
+ module_logger.disabled = not final_enabled
+ module_logger.setLevel(level if final_enabled else logging.CRITICAL + 10)
+ statuses[module_name] = final_enabled
+ return statuses
+
+
+def reconfigure_logging(logging_config: dict) -> Dict[str, bool]:
+ enabled = bool(logging_config.get("enabled", True))
+ level = _get_level(logging_config.get("level", "INFO"))
+ max_size_mb = max(1, int(logging_config.get("max_size_mb", 10)))
+ backup_count = max(1, int(logging_config.get("backup_count", 3)))
+ keep_days = int(logging_config.get("keep_days", 0))
+ modules_cfg = logging_config.get("modules", {})
+
+ _clear_handlers()
+ if enabled:
+ _install_handlers(level, max_size_mb, backup_count)
+ _cleanup_old_logs(keep_days)
+
+ root_logger = logging.getLogger()
+ root_logger.setLevel(level if enabled else logging.CRITICAL + 10)
+ for logger_name in ("uvicorn", "uvicorn.error"):
+ logging.getLogger(logger_name).setLevel(level if enabled else logging.CRITICAL + 10)
+
+ statuses = _configure_modules(level, enabled, modules_cfg)
+ statuses["app"] = enabled
+ return statuses
diff --git a/app/main.py b/app/main.py
index cdc32a0..ba6808d 100644
--- a/app/main.py
+++ b/app/main.py
@@ -1,35 +1,385 @@
-from fastapi import FastAPI, Request
+from fastapi import FastAPI, Query
+from app.routes.debug_routes import get_logs, delete_logs
+
+app = FastAPI()
+
+@app.get("/api/debug/logs")
+def api_get_logs(module: str = Query("app"), limit: int = Query(100)):
+ return get_logs(module=module, limit=limit)
+
+@app.delete("/api/debug/logs")
+def api_delete_logs(module: str = Query("app")):
+ return delete_logs(module=module)
+import logging
+import yaml
+import os
+from app.logging.runtime import reconfigure_logging
+
+# Logging-Konfiguration aus config.yaml
+def get_logging_config():
+ config_path = os.path.join(os.path.dirname(__file__), "..", "..", "config.yaml")
+ if not os.path.exists(config_path):
+ return {
+ "enabled": True,
+ "level": "INFO",
+ "keep_days": 14,
+ "max_size_mb": 10,
+ "backup_count": 3,
+ "modules": {},
+ }
+ with open(config_path, "r", encoding="utf-8") as f:
+ config = yaml.safe_load(f)
+ logging_cfg = config.get("logging", {})
+ return {
+ "enabled": logging_cfg.get("enabled", True),
+ "level": logging_cfg.get("level", "INFO"),
+ "keep_days": logging_cfg.get("keep_days", 14),
+ "max_size_mb": logging_cfg.get("max_size_mb", 10),
+ "backup_count": logging_cfg.get("backup_count", 3),
+ "modules": logging_cfg.get("modules", {}),
+ }
+
+log_settings = get_logging_config()
+reconfigure_logging(log_settings)
+log_formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(name)s - %(message)s")
+console_handler = logging.StreamHandler()
+console_handler.setFormatter(log_formatter)
+logging.getLogger().addHandler(console_handler)
+# WICHTIG: Access-Logs explizit NICHT in app.log
+for h in list(logging.getLogger("uvicorn.access").handlers):
+ logging.getLogger("uvicorn.access").removeHandler(h)
+from app.admin import enable_admin
+
+
+def init_admin():
+ import os
+
+ logger = logging.getLogger("app")
+ admin_hash = os.getenv("ADMIN_PASSWORD_HASH")
+ if admin_hash:
+ try:
+ enable_admin(admin_hash)
+ logger.info("Admin enabled via environment variable")
+ except Exception:
+ logger.exception("Failed to enable admin from environment variable")
+ else:
+ logger.info("Admin disabled (no ADMIN_PASSWORD_HASH)")
+
+
+# Initialisiere optionalen Admin-Modus (einmalig)
+init_admin()
+from fastapi import FastAPI, Request, WebSocket
from fastapi.templating import Jinja2Templates
from fastapi.responses import HTMLResponse
+from fastapi.staticfiles import StaticFiles
+from services.printer_service import PrinterService
+from app.services import mqtt_runtime
+from app.monitoring.runtime_monitor import record_request
+import time
+# -----------------------------------------------------
+# ROUTER & MODULE IMPORTS
+# -----------------------------------------------------
from app.database import init_db
+
from app.routes.hello import router as hello_router
from app.routes.materials import router as materials_router
from app.routes.spools import router as spools_router
+from app.routes.spool_numbers import router as spool_numbers_router # NEU: Spulen-Nummern-System
+from app.routes.log_routes import router as log_router
+from app.routes.system_routes import router as system_router
+from app.routes.debug_routes import router as debug_router
+from app.routes.service_routes import router as service_router
+from app.routes.database_routes import router as database_router
+from app.routes.scanner_routes import router as scanner_router, debug_printer_router
+from app.routes.mqtt_routes import router as mqtt_router
+from app.routes.performance_routes import router as performance_router
+from app.routes.printers import router as printers_router
+from app.routes.jobs import router as jobs_router
+from app.routes.statistics_routes import router as statistics_router
+
+from app.routes.bambu_routes import router as bambu_router
+from app.routes.admin_routes import router as admin_router
+from app.routes.admin_coverage_routes import router as admin_coverage_router
+from app.routes.settings_routes import router as settings_router
+from app.routes.debug_ams_routes import router as debug_ams_router
+from app.routes.debug_system_routes import router as debug_system_router
+from app.routes.debug_performance_routes import router as debug_performance_router
+from app.routes.debug_network_routes import router as debug_network_router
+from app.routes.notification_routes import router as notification_router
+from app.routes.config_routes import router as config_router
+from app.routes import debug_log_routes
+from app.routes import mqtt_runtime_routes
+from app.routes.live_state_routes import router as live_state_router
+from app.routes.ams_routes import router as ams_router
+
+from app.websocket.log_stream import stream_log
+from sqlmodel import Session, select
+from app.database import engine
+from app.models.printer import Printer
+
+
+# -----------------------------------------------------
+# FASTAPI APP
+# -----------------------------------------------------
app = FastAPI(
title="FilamentHub",
- description="Filament Management System für Bambu, Klipper & Standalone",
- version="0.1.0",
+ description="Filament Management System fuer Bambu, Klipper & Standalone",
+ version="0.1.0"
)
-templates = Jinja2Templates(directory="frontend/templates")
+# -----------------------------------------------------
+# MIDDLEWARE: RUNTIME / REQUEST MONITORING
+# -----------------------------------------------------
+@app.middleware("http")
+async def runtime_metrics_middleware(request: Request, call_next):
+ start = time.perf_counter()
+ response = await call_next(request)
+ duration_ms = (time.perf_counter() - start) * 1000.0
+ try:
+ record_request(duration_ms)
+ except Exception:
+ pass
+ return response
+
+# -----------------------------------------------------
+# TESTENDPUNKT & HEALTH CHECK
+# -----------------------------------------------------
+@app.get('/ping')
+async def ping():
+ return {'status': 'ok'}
+
+@app.get('/health')
+async def health():
+ """Health check endpoint for Docker container monitoring"""
+ return {'status': 'healthy', 'service': 'filamenthub'}
+
+app.add_event_handler("startup", init_db)
@app.on_event("startup")
-def on_startup():
- init_db()
+def log_startup_complete():
+ logger = logging.getLogger("app")
+ logger.info("[APP] Startup abgeschlossen – FilamentHub ist bereit")
-# Router registrieren
+# -----------------------------------------------------
+# STATIC + TEMPLATES
+# -----------------------------------------------------
+app.mount("/static", StaticFiles(directory="app/static"), name="static")
+app.mount("/frontend", StaticFiles(directory="frontend/static"), name="frontend_static")
+templates = Jinja2Templates(directory="frontend/templates")
+
+
+
+# -----------------------------------------------------
+# ROUTES - API
+# -----------------------------------------------------
app.include_router(hello_router)
app.include_router(materials_router)
app.include_router(spools_router)
+app.include_router(spool_numbers_router) # NEU: Spulen-Nummern-System
+app.include_router(log_router)
+app.include_router(system_router)
+app.include_router(debug_router)
+app.include_router(service_router)
+app.include_router(database_router)
+app.include_router(scanner_router)
+app.include_router(mqtt_router)
+app.include_router(performance_router)
+app.include_router(printers_router)
+app.include_router(jobs_router)
+app.include_router(statistics_router)
+
+app.include_router(bambu_router)
+app.include_router(admin_router)
+app.include_router(admin_coverage_router, prefix="/api/admin")
+app.include_router(settings_router)
+app.include_router(debug_ams_router)
+app.include_router(debug_system_router)
+app.include_router(debug_performance_router)
+app.include_router(debug_network_router)
+app.include_router(debug_printer_router)
+app.include_router(notification_router)
+app.include_router(config_router)
+app.include_router(debug_log_routes.router, prefix="/api/debug", tags=["debug"])
+
+# Runtime MQTT control endpoints (separate from legacy mqtt_routes to avoid collisions)
+app.include_router(mqtt_runtime_routes.router, prefix="/api/mqtt/runtime", tags=["mqtt"])
+
+# Live state endpoints for real-time device data
+app.include_router(live_state_router)
+app.include_router(ams_router)
+
+
+@app.on_event("startup")
+def apply_auto_connect_on_startup():
+ logger = logging.getLogger("app")
+ try:
+ with Session(engine) as session:
+ printers = session.exec(select(Printer)).all()
+ except Exception as exc:
+ logger.exception("Failed to load printers for auto-connect startup: %s", exc)
+ return
+
+ for printer in printers:
+ if getattr(printer, "auto_connect", False):
+ logger.info("Applying auto-connect for printer %s (%s)", printer.name, printer.id)
+ try:
+ mqtt_runtime.apply_auto_connect(printer)
+ except Exception as exc:
+ logger.exception("Auto-connect startup failed for printer %s: %s", printer.id, exc)
-@app.get("/", response_class=HTMLResponse)
+# -----------------------------------------------------
+# ROUTES - FRONTEND
+# -----------------------------------------------------
+@app.get('/', response_class=HTMLResponse)
async def index(request: Request):
return templates.TemplateResponse(
- "index.html",
- {"request": request, "title": "FilamentHub – Dashboard"},
+ 'dashboard.html',
+ {
+ 'request': request,
+ 'title': 'FilamentHub - Dashboard',
+ 'active_page': 'dashboard'
+ },
+ )
+
+
+@app.get('/materials', response_class=HTMLResponse)
+async def materials_page(request: Request):
+ return templates.TemplateResponse(
+ 'materials.html',
+ {
+ 'request': request,
+ 'title': 'Materialien - FilamentHub',
+ 'active_page': 'materials'
+ },
+ )
+
+
+@app.get('/spools', response_class=HTMLResponse)
+async def spools_page(request: Request):
+ return templates.TemplateResponse(
+ 'spools.html',
+ {
+ 'request': request,
+ 'title': 'Spulen - FilamentHub',
+ 'active_page': 'spools'
+ },
+ )
+
+
+@app.get('/ams', response_class=HTMLResponse)
+async def ams_page(request: Request):
+ return templates.TemplateResponse(
+ 'ams.html',
+ {
+ 'request': request,
+ 'title': 'AMS - FilamentHub',
+ 'active_page': 'ams'
+ },
)
+
+
+@app.get('/printers', response_class=HTMLResponse)
+async def printers_page(request: Request):
+ return templates.TemplateResponse(
+ 'printers.html',
+ {
+ 'request': request,
+ 'title': 'Drucker - FilamentHub',
+ 'active_page': 'printers'
+ },
+ )
+
+
+@app.get('/jobs', response_class=HTMLResponse)
+async def jobs_page(request: Request):
+ return templates.TemplateResponse(
+ 'jobs.html',
+ {
+ 'request': request,
+ 'title': 'Druckauftraege - FilamentHub',
+ 'active_page': 'jobs'
+ },
+ )
+
+
+@app.get('/statistics', response_class=HTMLResponse)
+async def statistics_page(request: Request):
+ return templates.TemplateResponse(
+ 'statistics.html',
+ {
+ 'request': request,
+ 'title': 'Statistiken - FilamentHub',
+ 'active_page': 'statistics'
+ },
+ )
+
+
+@app.get('/settings', response_class=HTMLResponse)
+async def settings_page(request: Request):
+ return templates.TemplateResponse(
+ 'settings.html',
+ {
+ 'request': request,
+ 'title': 'Settings - FilamentHub',
+ 'active_page': 'settings'
+ },
+ )
+
+
+@app.get('/logs', response_class=HTMLResponse)
+async def logs_page(request: Request):
+ # logs.html bleibt in app/templates
+ logs_templates = Jinja2Templates(directory='app/templates')
+ return logs_templates.TemplateResponse(
+ 'logs.html',
+ {'request': request},
+ )
+
+
+@app.get('/debug', response_class=HTMLResponse)
+async def debug_page(request: Request):
+ from app.routes.settings_routes import get_setting, DEFAULTS
+
+ debug_templates = Jinja2Templates(directory='app/templates')
+ printers = []
+ debug_center_mode = "lite"
+
+ try:
+ with Session(engine) as session:
+ printers = session.exec(select(Printer)).all()
+ debug_center_mode = get_setting(session, "debug_center_mode", DEFAULTS.get("debug_center_mode", "lite")) or "lite"
+ except Exception:
+ printers = []
+
+ return debug_templates.TemplateResponse(
+ 'debug.html',
+ {
+ 'request': request,
+ 'title': 'FilamentHub Debug Center',
+ 'active_page': 'debug',
+ 'printers': printers,
+ 'data_mode': debug_center_mode
+ },
+ )
+
+
+@app.get('/ams-help', response_class=HTMLResponse)
+async def ams_help_page(request: Request):
+ """Simple helper page to visualize AMS slots from the latest report message."""
+ help_templates = Jinja2Templates(directory='app/templates')
+ return help_templates.TemplateResponse(
+ 'ams_help.html',
+ {'request': request, 'title': 'AMS Helper'},
+ )
+
+
+
+
+# Zentraler PrinterService für MQTT → UniversalMapper → PrinterData Pipeline
+if not hasattr(app.state, "printer_service"):
+ app.state.printer_service = PrinterService()
diff --git a/app/models/__pycache__/__init__.cpython-313.pyc b/app/models/__pycache__/__init__.cpython-313.pyc
deleted file mode 100644
index a6e5265..0000000
Binary files a/app/models/__pycache__/__init__.cpython-313.pyc and /dev/null differ
diff --git a/app/models/__pycache__/job.cpython-313.pyc b/app/models/__pycache__/job.cpython-313.pyc
deleted file mode 100644
index 9c1d1db..0000000
Binary files a/app/models/__pycache__/job.cpython-313.pyc and /dev/null differ
diff --git a/app/models/__pycache__/material.cpython-313.pyc b/app/models/__pycache__/material.cpython-313.pyc
deleted file mode 100644
index 054741f..0000000
Binary files a/app/models/__pycache__/material.cpython-313.pyc and /dev/null differ
diff --git a/app/models/__pycache__/printer.cpython-313.pyc b/app/models/__pycache__/printer.cpython-313.pyc
deleted file mode 100644
index 154ca22..0000000
Binary files a/app/models/__pycache__/printer.cpython-313.pyc and /dev/null differ
diff --git a/app/models/__pycache__/spool.cpython-313.pyc b/app/models/__pycache__/spool.cpython-313.pyc
deleted file mode 100644
index e67c36a..0000000
Binary files a/app/models/__pycache__/spool.cpython-313.pyc and /dev/null differ
diff --git a/app/models/job.py b/app/models/job.py
index 593d8c5..9e18642 100644
--- a/app/models/job.py
+++ b/app/models/job.py
@@ -1,5 +1,5 @@
from sqlmodel import SQLModel, Field
-from typing import Optional
+from typing import Optional, List, ClassVar
from uuid import uuid4
from datetime import datetime
@@ -15,6 +15,22 @@ class JobBase(SQLModel):
started_at: datetime = Field(default_factory=datetime.utcnow)
finished_at: Optional[datetime] = None
+ # Status: running, completed, failed, cancelled, aborted
+ status: str = Field(default="running")
+
+ # Spulen-Snapshot-System (NEU - Teil der Spezifikation v4)
+ # Speichert Spulen-Daten zum Zeitpunkt des Job-Starts
+ spool_number: Optional[int] = None
+ spool_name: Optional[str] = None
+ spool_vendor: Optional[str] = None
+ spool_color: Optional[str] = None
+ spool_created_at: Optional[str] = None
+ # Optionales, berechnetes Feld für UI: ETA in Sekunden (oder None)
+ eta_seconds: Optional[int] = None
+
+ # Filament-Tracking: Startwert beim ersten Auftreten von layer_num >= 1
+ filament_start_mm: Optional[float] = None
+
class Job(JobBase, table=True):
id: str = Field(default_factory=lambda: str(uuid4()), primary_key=True)
@@ -26,3 +42,25 @@ class JobCreate(JobBase):
class JobRead(JobBase):
id: str
+
+
+class JobSpoolUsageBase(SQLModel):
+ job_id: str = Field(foreign_key="job.id")
+ spool_id: Optional[str] = Field(default=None, foreign_key="spool.id")
+ slot: Optional[int] = None
+ used_mm: float = 0
+ used_g: float = 0
+ order_index: Optional[int] = None
+
+
+class JobSpoolUsage(JobSpoolUsageBase, table=True):
+ __tablename__ = "job_spool_usage" # type: ignore[reportAssignmentType]
+ id: str = Field(default_factory=lambda: str(uuid4()), primary_key=True)
+
+
+class JobSpoolUsageCreate(JobSpoolUsageBase):
+ pass
+
+
+class JobSpoolUsageRead(JobSpoolUsageBase):
+ id: str
diff --git a/app/models/material.py b/app/models/material.py
index 816d83d..7238d26 100644
--- a/app/models/material.py
+++ b/app/models/material.py
@@ -6,19 +6,72 @@
class MaterialBase(SQLModel):
name: str
brand: Optional[str] = None
- color: Optional[str] = None # HEX oder Text
density: float = 1.24 # g/cm³, Standard PLA
diameter: float = 1.75 # mm
notes: Optional[str] = None
+ external_id: Optional[str] = None # Für Cloud/AMS
+ printer_slot: Optional[int] = None # Für spätere Slot-Zuordnung
+ created_at: Optional[str] = None # ISO-Timestamp
+ updated_at: Optional[str] = None # ISO-Timestamp
class Material(MaterialBase, table=True):
id: str = Field(default_factory=lambda: str(uuid4()), primary_key=True)
-class MaterialCreate(MaterialBase):
- pass
+# Pydantic-Schemas
+from pydantic import BaseModel, Field, ConfigDict, field_validator
+
+class MaterialCreateSchema(BaseModel):
+ model_config = ConfigDict(extra="ignore", populate_by_name=True)
+
+ name: str
+ brand: str | None = Field(None, alias="manufacturer")
+ density: float = Field(1.24, gt=0)
+ diameter: float = Field(1.75, ge=1.5, le=3.0)
+ notes: str | None = None
+ external_id: str | None = None
+ printer_slot: int | str | None = None
+ material_type: str | None = Field(None, alias="type")
+
+ @field_validator("name")
+ def name_not_empty(cls, v: str) -> str:
+ if not v or not v.strip():
+ raise ValueError("Name darf nicht leer sein")
+ return v.strip()
+
+ @field_validator("printer_slot")
+ def normalize_printer_slot(cls, v):
+ if v is None:
+ return None
+ if isinstance(v, int):
+ return v
+ digits = "".join(filter(str.isdigit, str(v)))
+ return int(digits) if digits else None
+
+class MaterialUpdateSchema(BaseModel):
+ model_config = ConfigDict(extra="ignore", populate_by_name=True)
+
+ name: str | None = None
+ brand: str | None = Field(None, alias="manufacturer")
+ density: float | None = Field(None, gt=0)
+ diameter: float | None = Field(None, ge=1.5, le=3.0)
+ notes: str | None = None
+ external_id: str | None = None
+ printer_slot: int | str | None = None
+ material_type: str | None = Field(None, alias="type")
+
+class MaterialReadSchema(BaseModel):
+ model_config = ConfigDict(from_attributes=True)
-class MaterialRead(MaterialBase):
id: str
+ name: str
+ brand: str | None = None
+ density: float
+ diameter: float
+ notes: str | None = None
+ external_id: str | None = None
+ printer_slot: int | None = None
+ created_at: str | None = None
+ updated_at: str | None = None
diff --git a/app/models/printer.py b/app/models/printer.py
index de3e6e9..0ac2744 100644
--- a/app/models/printer.py
+++ b/app/models/printer.py
@@ -8,9 +8,16 @@ class PrinterBase(SQLModel):
printer_type: str # "bambu", "klipper", "manual"
ip_address: Optional[str] = None
port: Optional[int] = None
+ model: str = Field(default="X1C", max_length=32) # z.B. X1C, A1MINI, P1S, H2D
+ mqtt_version: str = Field(default="311", max_length=8) # MQTT Protocol Version
+
+ power_consumption_kw: Optional[float] = Field(default=None) # Durchschnittliche Leistungsaufnahme
+ maintenance_cost_yearly: Optional[float] = Field(default=None) # Wartungskosten pro Jahr
cloud_serial: Optional[str] = None # Bambu Cloud Seriennummer
api_key: Optional[str] = None # z.B. Moonraker Token
+ active: bool = True # wird beim Start berücksichtigt
+ auto_connect: bool = False # Automatische MQTT-Verbindung
class Printer(PrinterBase, table=True):
@@ -23,3 +30,6 @@ class PrinterCreate(PrinterBase):
class PrinterRead(PrinterBase):
id: str
+ online: bool = False
+ active: bool = True
+ image_url: Optional[str] = None # Nur Ausgabe, kein DB-Feld
diff --git a/app/models/settings.py b/app/models/settings.py
new file mode 100644
index 0000000..2c5a564
--- /dev/null
+++ b/app/models/settings.py
@@ -0,0 +1,12 @@
+from sqlmodel import SQLModel, Field
+from typing import Optional
+
+class Setting(SQLModel, table=True):
+ key: str = Field(primary_key=True)
+ value: Optional[str] = None
+
+class UserFlag(SQLModel, table=True):
+ id: Optional[int] = Field(default=None, primary_key=True)
+ user_id: str
+ flag: str
+ value: bool = False
diff --git a/app/models/spool.py b/app/models/spool.py
index 63a7ff8..887ea7c 100644
--- a/app/models/spool.py
+++ b/app/models/spool.py
@@ -1,29 +1,200 @@
-from sqlmodel import SQLModel, Field
from typing import Optional
from uuid import uuid4
+from pydantic import BaseModel, ConfigDict, Field, field_validator
+from sqlmodel import SQLModel, Field as SQLField
+
class SpoolBase(SQLModel):
- material_id: str = Field(foreign_key="material.id")
- weight_full: float = 1000 # g, Filament ohne Leerspule
- weight_empty: float = 250 # g, nur Leerspule
- weight_remaining: Optional[float] = None
+ material_id: str = SQLField(foreign_key="material.id")
+ vendor_id: Optional[str] = None
- label: Optional[str] = None # Nutzerdefinierter Name
- manufacturer_spool_id: Optional[str] = None # optional RFID/Chip
- ams_slot: Optional[int] = None
+ # Spulen-Nummern-System (NEU - Teil der Spezifikation v4)
+ spool_number: Optional[int] = None # Unique, user-friendly number (#1, #2, #3...)
+ name: Optional[str] = None # Denormalized from material.name
+ vendor: Optional[str] = None # Denormalized from material.brand
+ color: Optional[str] = None # Spulen-spezifische Farbe
+ weight_full: float = 1000
+ weight_empty: float = 250
+ weight_current: Optional[float] = None
+ status: Optional[str] = None
+ location: Optional[str] = None
+ label: Optional[str] = None
+ external_id: Optional[str] = None
+ printer_id: Optional[str] = SQLField(default=None, foreign_key="printer.id")
+ printer_slot: Optional[int] = None
+ ams_slot: Optional[int] = None
+ tag_uid: Optional[str] = None
+ tray_uuid: Optional[str] = None
+ tray_color: Optional[str] = None
+ tray_type: Optional[str] = None
+ remain_percent: Optional[float] = None
+ last_seen: Optional[str] = None
+ first_seen: Optional[str] = None
+ used_count: int = 0
+ last_slot: Optional[int] = None
is_open: bool = True
is_empty: bool = False
+ manufacturer_spool_id: Optional[str] = None
+ created_at: Optional[str] = None
+ updated_at: Optional[str] = None
class Spool(SpoolBase, table=True):
- id: str = Field(default_factory=lambda: str(uuid4()), primary_key=True)
+ id: str = SQLField(default_factory=lambda: str(uuid4()), primary_key=True)
+
+
+class SpoolCreateSchema(BaseModel):
+ model_config = ConfigDict(extra="ignore", populate_by_name=True)
+
+ material_id: str
+ vendor_id: str | None = Field(None, alias="manufacturer")
+ weight: float | None = Field(None, gt=0)
+ weight_full: float = Field(1000, gt=0)
+ weight_empty: float = Field(250, gt=0)
+ weight_current: float | None = Field(None, gt=0)
+ status: str | None = None
+ location: str | None = None
+ label: str | None = None
+ external_id: str | None = None
+ printer_slot: int | str | None = None
+ ams_slot: int | str | None = None
+ printer_id: str | None = None
+ tag_uid: str | None = None
+ tray_uuid: str | None = None
+ tray_color: str | None = None
+ tray_type: str | None = None
+ remain_percent: float | None = None
+ last_seen: str | None = None
+ color: str | None = None # JETZT persistiert (Teil des Nummern-Systems)
+ name: str | None = None # NEU: Kopie von material.name
+ vendor: str | None = None # NEU: Kopie von material.brand
+ spool_number: int | None = None # NEU: User-friendly Nummer
+ first_seen: str | None = None
+ used_count: int = 0
+ last_slot: int | None = None
+ is_open: bool = True
+ is_empty: bool = False
+ manufacturer_spool_id: str | None = None
+
+ @field_validator("material_id")
+ def material_id_not_empty(cls, v: str) -> str:
+ if not v or not v.strip():
+ raise ValueError("material_id darf nicht leer sein")
+ return v.strip()
+
+ @field_validator("printer_slot")
+ def normalize_printer_slot(cls, v):
+ if v is None:
+ return None
+ if isinstance(v, int):
+ return v
+ digits = "".join(filter(str.isdigit, str(v)))
+ return int(digits) if digits else None
+ @field_validator("ams_slot")
+ def normalize_ams_slot(cls, v):
+ if v is None:
+ return None
+ if isinstance(v, int):
+ return v
+ digits = "".join(filter(str.isdigit, str(v)))
+ return int(digits) if digits else None
-class SpoolCreate(SpoolBase):
- pass
+class SpoolUpdateSchema(BaseModel):
+ model_config = ConfigDict(extra="ignore", populate_by_name=True)
+
+ material_id: str | None = None
+ vendor_id: str | None = Field(None, alias="manufacturer")
+ weight: float | None = Field(None, gt=0)
+ weight_full: float | None = Field(None, gt=0)
+ weight_empty: float | None = Field(None, gt=0)
+ weight_current: float | None = Field(None, gt=0)
+ status: str | None = None
+ location: str | None = None
+ label: str | None = None
+ external_id: str | None = None
+ printer_slot: int | str | None = None
+ ams_slot: int | str | None = None
+ printer_id: str | None = None
+ tag_uid: str | None = None
+ tray_uuid: str | None = None
+ tray_color: str | None = None
+ tray_type: str | None = None
+ remain_percent: float | None = None
+ last_seen: str | None = None
+ color: str | None = None
+ spool_number: int | None = None
+ first_seen: str | None = None
+ used_count: int | None = None
+ last_slot: int | None = None
+ is_open: bool | None = None
+ is_empty: bool | None = None
+ manufacturer_spool_id: str | None = None
+
+ @field_validator("material_id")
+ def material_id_not_empty(cls, v: str | None) -> str | None:
+ if v is None:
+ return v
+ if not v.strip():
+ raise ValueError("material_id darf nicht leer sein")
+ return v.strip()
+
+ @field_validator("printer_slot")
+ def normalize_printer_slot_update(cls, v):
+ if v is None:
+ return None
+ if isinstance(v, int):
+ return v
+ digits = "".join(filter(str.isdigit, str(v)))
+ return int(digits) if digits else None
+
+ @field_validator("ams_slot")
+ def normalize_ams_slot_update(cls, v):
+ if v is None:
+ return None
+ if isinstance(v, int):
+ return v
+ digits = "".join(filter(str.isdigit, str(v)))
+ return int(digits) if digits else None
+
+
+class SpoolReadSchema(BaseModel):
+ model_config = ConfigDict(from_attributes=True, populate_by_name=True)
-class SpoolRead(SpoolBase):
id: str
+ material_id: str
+ vendor_id: str | None = Field(None, serialization_alias="manufacturer")
+
+ # Spulen-Nummern-System (NEU)
+ spool_number: int | None = None
+ name: str | None = None
+ vendor: str | None = None
+ color: str | None = None
+
+ weight_full: float
+ weight_empty: float
+ weight_current: float | None = Field(None, serialization_alias="weight")
+ status: str | None = None
+ location: str | None = None
+ label: str | None = None
+ external_id: str | None = None
+ printer_slot: int | None = None
+ ams_slot: int | None = None
+ printer_id: str | None = None
+ tag_uid: str | None = None
+ tray_uuid: str | None = None
+ tray_color: str | None = None
+ tray_type: str | None = None
+ remain_percent: float | None = None
+ last_seen: str | None = None
+ first_seen: str | None = None
+ used_count: int = 0
+ last_slot: int | None = None
+ is_open: bool = True
+ is_empty: bool = False
+ manufacturer_spool_id: str | None = None
+ created_at: str | None = None
+ updated_at: str | None = None
diff --git a/app/monitoring/runtime_monitor.py b/app/monitoring/runtime_monitor.py
new file mode 100644
index 0000000..05d5488
--- /dev/null
+++ b/app/monitoring/runtime_monitor.py
@@ -0,0 +1,47 @@
+import time
+import threading
+from collections import deque
+from typing import Deque, Tuple, Dict
+
+# Rolling window of 60s for request timestamps and durations (ms)
+WINDOW_SECONDS = 60
+_lock = threading.Lock()
+_requests: Deque[Tuple[float, float]] = deque() # (timestamp, duration_ms)
+
+
+def record_request(duration_ms: float) -> None:
+ """Record a single HTTP request duration in milliseconds."""
+ now = time.time()
+ with _lock:
+ _requests.append((now, float(duration_ms)))
+ _prune_locked(now)
+
+
+def _prune_locked(now: float) -> None:
+ """Remove entries older than the rolling window. Caller must hold _lock."""
+ cutoff = now - WINDOW_SECONDS
+ while _requests and _requests[0][0] < cutoff:
+ _requests.popleft()
+
+
+def get_runtime_metrics() -> Dict[str, float | str]:
+ """Return requests/min and avg response time in ms with defensive defaults."""
+ now = time.time()
+ with _lock:
+ _prune_locked(now)
+ count = len(_requests)
+ if count > 0:
+ total_ms = sum(d for _, d in _requests)
+ avg_ms = round(total_ms / count, 2)
+ req_per_min = round((count / WINDOW_SECONDS) * 60, 2)
+ else:
+ avg_ms = 0.0
+ req_per_min = 0.0
+
+ state = "active" if req_per_min > 0 else "idle"
+ return {
+ "requests_per_minute": req_per_min,
+ "avg_response_ms": avg_ms,
+ "state": state,
+ }
+
diff --git a/app/routes/__pycache__/__init__.cpython-313.pyc b/app/routes/__pycache__/__init__.cpython-313.pyc
deleted file mode 100644
index 9dc7905..0000000
Binary files a/app/routes/__pycache__/__init__.cpython-313.pyc and /dev/null differ
diff --git a/app/routes/__pycache__/hello.cpython-313.pyc b/app/routes/__pycache__/hello.cpython-313.pyc
deleted file mode 100644
index 9eb4a5a..0000000
Binary files a/app/routes/__pycache__/hello.cpython-313.pyc and /dev/null differ
diff --git a/app/routes/__pycache__/materials.cpython-313.pyc b/app/routes/__pycache__/materials.cpython-313.pyc
deleted file mode 100644
index 1a14412..0000000
Binary files a/app/routes/__pycache__/materials.cpython-313.pyc and /dev/null differ
diff --git a/app/routes/__pycache__/spools.cpython-313.pyc b/app/routes/__pycache__/spools.cpython-313.pyc
deleted file mode 100644
index ca249fe..0000000
Binary files a/app/routes/__pycache__/spools.cpython-313.pyc and /dev/null differ
diff --git a/app/routes/admin_coverage_routes.py b/app/routes/admin_coverage_routes.py
new file mode 100644
index 0000000..c1e7b71
--- /dev/null
+++ b/app/routes/admin_coverage_routes.py
@@ -0,0 +1,223 @@
+from fastapi import APIRouter, Request, HTTPException
+from fastapi.responses import FileResponse, JSONResponse, Response, HTMLResponse
+import subprocess
+import threading
+from pathlib import Path
+import os
+import tempfile
+from uuid import uuid4
+import mimetypes
+import shutil
+
+
+from app.routes.admin_routes import admin_required, audit, client_ip
+from fastapi.templating import Jinja2Templates
+from fastapi import Depends
+
+router = APIRouter()
+
+templates = Jinja2Templates(directory="frontend/templates")
+
+ROOT_DIR = Path(__file__).resolve().parents[2]
+_coverage_lock = threading.Lock()
+
+
+@router.post("/coverage/run")
+# DEV-FEATURE: Code-Coverage darf nur im Entwicklungsmodus ausgeführt werden
+def run_coverage(request: Request):
+ admin_required(request)
+ # DEV-Mode Guard: nur erlauben, wenn FILAMENTHUB_DEV_FEATURES=="1"
+ if os.environ.get("FILAMENTHUB_DEV_FEATURES") != "1":
+ audit(
+ "admin_coverage_blocked_production",
+ {
+ "ip": client_ip(request),
+ "reason": "Prod block",
+ },
+ )
+ return JSONResponse(
+ {"success": False, "message": "Coverage ist ein Entwickler-Feature und im Produktivmodus deaktiviert"},
+ status_code=403,
+ )
+
+ # --- Backend-Guard: pytest vorhanden? ---
+ if not shutil.which("pytest"):
+ audit(
+ "admin_coverage_error",
+ {
+ "ip": client_ip(request),
+ "message": "pytest nicht installiert"
+ },
+ )
+ return JSONResponse(
+ {"success": False, "message": "pytest ist nicht installiert (Coverage nicht möglich)"},
+ status_code=500,
+ )
+ # --- Ende Guard ---
+
+ if not _coverage_lock.acquire(blocking=False):
+ return JSONResponse({"success": False, "message": "Coverage läuft bereits"}, status_code=200)
+ temp_db = Path(tempfile.gettempdir()) / f"filamenthub_cov_{uuid4().hex}.db"
+ try:
+ cmd = ["pytest", "--cov=app", "--cov-report=html", "--ignore=Backup"]
+ temp_db = Path(tempfile.gettempdir()) / f"filamenthub_cov_{uuid4().hex}.db"
+ env = os.environ.copy()
+ env["PYTHONPATH"] = str(ROOT_DIR)
+ env["FILAMENTHUB_DB_PATH"] = str(temp_db)
+ result = subprocess.run(
+ cmd,
+ cwd=str(ROOT_DIR),
+ capture_output=True,
+ text=True,
+ timeout=120,
+ env=env,
+ )
+ success = result.returncode == 0
+ message = "Coverage erfolgreich ausgeführt" if success else "Coverage fehlgeschlagen"
+ audit(
+ "admin_coverage_run",
+ {
+ "ip": client_ip(request),
+ "success": success,
+ "stdout": (result.stdout or "")[:1000],
+ "stderr": (result.stderr or "")[:1000],
+ },
+ )
+ # Wenn HTML-Report erzeugt wurde, Gesamtwert parsen und als Verlauf speichern
+ try:
+ index_file = ROOT_DIR / "htmlcov" / "index.html"
+ if index_file.exists():
+ with open(index_file, 'r', encoding='utf-8') as f:
+ html = f.read()
+ import re
+ m = re.search(r'Coverage report:\s*(\d+)%', html)
+ overall = None
+ if m:
+ overall = int(m.group(1))
+ else:
+ m2 = re.search(r'coverage.*?(\d+)%', html, re.IGNORECASE)
+ if m2:
+ overall = int(m2.group(1))
+
+ if overall is not None:
+ hist_folder = ROOT_DIR / 'data'
+ hist_folder.mkdir(parents=True, exist_ok=True)
+ hist_file = hist_folder / 'coverage_history.json'
+ import json, time
+ entry = {"ts": int(time.time()), "percent": overall}
+ try:
+ if hist_file.exists():
+ with open(hist_file, 'r', encoding='utf-8') as hf:
+ arr = json.load(hf)
+ else:
+ arr = []
+ except Exception:
+ arr = []
+ arr.append(entry)
+ arr = arr[-200:]
+ with open(hist_file, 'w', encoding='utf-8') as hf:
+ json.dump(arr, hf)
+ except Exception:
+ pass
+ return {"success": success, "message": message}
+ except subprocess.TimeoutExpired as exc:
+ audit(
+ "admin_coverage_timeout",
+ {"ip": client_ip(request), "message": "Timeout bei Coverage", "details": str(exc)},
+ )
+ return {"success": False, "message": "Coverage läuft zu lange (Timeout)"}
+ except Exception as exc:
+ audit("admin_coverage_error", {"ip": client_ip(request), "message": str(exc)})
+ return {"success": False, "message": "Coverage fehlgeschlagen"}
+ finally:
+ try:
+ if temp_db.exists():
+ temp_db.unlink()
+ except Exception:
+ pass
+ _coverage_lock.release()
+
+
+@router.get("/coverage/report")
+def coverage_report(request: Request):
+ """Serve the main coverage report HTML page."""
+ admin_required(request)
+ report_path = ROOT_DIR / "htmlcov" / "index.html"
+ if not report_path.is_file():
+ raise HTTPException(status_code=404, detail="Coverage noch nicht ausgeführt")
+ audit("admin_coverage_report", {"ip": client_ip(request)})
+
+ # Read and inject tag to fix relative paths
+ with open(report_path, 'r', encoding='utf-8') as f:
+ content = f.read()
+
+ # Inject after tag
+ # This makes all relative links work correctly
+ content = content.replace(
+ '',
+ '\n '
+ )
+
+ return HTMLResponse(
+ content=content,
+ headers={
+ "Cache-Control": "no-cache, no-store, must-revalidate",
+ "Pragma": "no-cache",
+ "Expires": "0"
+ }
+ )
+
+
+@router.get("/coverage/history")
+def coverage_history(request: Request):
+ """Return stored coverage history as JSON."""
+ admin_required(request)
+ hist_file = ROOT_DIR / 'data' / 'coverage_history.json'
+ if not hist_file.exists():
+ return JSONResponse([], status_code=200)
+ try:
+ import json
+ with open(hist_file, 'r', encoding='utf-8') as f:
+ arr = json.load(f)
+ return JSONResponse(arr)
+ except Exception:
+ return JSONResponse([], status_code=200)
+
+
+@router.get("/coverage/ui")
+def coverage_ui(request: Request):
+ """Render a wrapper UI that shows the coverage report in an iframe and a small chart."""
+ admin_required(request)
+ return templates.TemplateResponse("coverage_wrapper.html", {"request": request})
+
+
+@router.head("/coverage/report")
+def coverage_report_head(request: Request):
+ """Check if coverage report exists (used by frontend to enable/disable button)."""
+ admin_required(request)
+ report_path = ROOT_DIR / "htmlcov" / "index.html"
+ if not report_path.is_file():
+ raise HTTPException(status_code=404, detail="Coverage noch nicht ausgeführt")
+ return Response(status_code=200)
+
+
+@router.get("/coverage/report/{file_path:path}")
+def coverage_report_file(request: Request, file_path: str):
+ """Serve static files from htmlcov directory (CSS, JS, other HTML files)."""
+ admin_required(request)
+
+ # Security: prevent directory traversal
+ if ".." in file_path or file_path.startswith("/"):
+ raise HTTPException(status_code=403, detail="Invalid file path")
+
+ full_path = ROOT_DIR / "htmlcov" / file_path
+
+ if not full_path.is_file():
+ raise HTTPException(status_code=404, detail=f"File not found: {file_path}")
+
+ # Determine MIME type
+ mime_type, _ = mimetypes.guess_type(str(full_path))
+ if mime_type is None:
+ mime_type = "application/octet-stream"
+
+ return FileResponse(str(full_path), media_type=mime_type)
diff --git a/app/routes/admin_routes.py b/app/routes/admin_routes.py
new file mode 100644
index 0000000..3866276
--- /dev/null
+++ b/app/routes/admin_routes.py
@@ -0,0 +1,378 @@
+from fastapi import APIRouter, Request, HTTPException, Depends
+from fastapi.responses import HTMLResponse, JSONResponse
+from fastapi.templating import Jinja2Templates
+from starlette.datastructures import UploadFile
+from starlette.status import HTTP_401_UNAUTHORIZED
+from sqlmodel import Session, select
+from app.database import get_session
+from app.models.settings import Setting, UserFlag
+import importlib
+import json
+import logging
+import os
+import subprocess
+import secrets
+import time
+from datetime import datetime
+import bcrypt
+from typing import Dict, Optional, Tuple, Union
+
+router = APIRouter()
+
+# Admin Panel (vollständig, geschützt)
+@router.get("/admin", response_class=HTMLResponse)
+def admin_panel_page(request: Request):
+ token = request.cookies.get("admin_token")
+ _cleanup_expired_tokens()
+ if not is_token_active(token):
+ audit("admin_access_denied", {"path": "/admin", "ip": client_ip(request)})
+ return templates.TemplateResponse("admin_login.html", {"request": request})
+ audit("admin_access", {"path": "/admin", "ip": client_ip(request)})
+ return templates.TemplateResponse("admin_panel.html", {"request": request})
+
+templates = Jinja2Templates(directory="frontend/templates")
+
+# --- SECURITY CONFIG ---
+
+# Load and normalize ADMIN_PASSWORD_HASH from environment. Must be present.
+def load_admin_password_hash() -> bytes:
+ try:
+ raw = os.environ["ADMIN_PASSWORD_HASH"]
+ except KeyError:
+ raise RuntimeError("ADMIN_PASSWORD_HASH must be set in environment")
+ if not isinstance(raw, str):
+ # If already bytes-like, ensure bytes
+ return raw if isinstance(raw, (bytes, bytearray)) else str(raw).encode("utf-8")
+ # Normalize $2y$ -> $2b$ without changing rest of the hash
+ if raw.startswith("$2y$"):
+ raw = "$2b$" + raw[4:]
+ return raw.encode("utf-8")
+
+# Ensure loaded at import time (app start) and fail fast if missing
+ADMIN_PASSWORD_HASH = load_admin_password_hash()
+
+# token store: token -> expiry_timestamp
+admin_tokens: Dict[str, int] = {}
+TOKEN_TTL = 3600 # seconds
+
+# In-memory rate limit store for login attempts per IP
+# Structure: { ip: {"count": int, "last_ts": float, "blocked_until": float} }
+failed_logins: Dict[str, dict] = {}
+# max 5 failed attempts -> block for 10 minutes
+MAX_FAILED = 5
+BLOCK_SECONDS = 10 * 60
+
+# Cookie security: read from environment, default to False for local/HTTP deployments
+COOKIE_SECURE = os.environ.get("ADMIN_COOKIE_SECURE", "false").lower() in ("true", "1", "yes")
+
+# --- Audit Logger ---
+_audit_logger = logging.getLogger("admin.audit")
+if not _audit_logger.handlers:
+ os.makedirs(os.path.join("logs", "admin"), exist_ok=True)
+ from logging.handlers import TimedRotatingFileHandler
+ audit_file = os.path.join("logs", "admin", "admin_audit.log")
+ audit_handler = TimedRotatingFileHandler(
+ audit_file,
+ when="midnight",
+ backupCount=14,
+ encoding="utf-8",
+ utc=False,
+ )
+ audit_handler.setFormatter(logging.Formatter("%(message)s"))
+ _audit_logger.addHandler(audit_handler)
+ _audit_logger.setLevel(logging.INFO)
+def audit(event: str, details: dict):
+ try:
+ payload = {
+ "ts": datetime.utcnow().isoformat(timespec="seconds") + "Z",
+ "event": event,
+ "details": details or {}
+ }
+ _audit_logger.info(json.dumps(payload, ensure_ascii=False))
+ except Exception:
+ pass
+
+def client_ip(request: Request) -> str:
+ forwarded = request.headers.get("x-forwarded-for")
+ if forwarded:
+ return forwarded
+ client = request.client
+ return client.host if client else "unknown"
+# --- Migration auslösen (Alembic upgrade head) ---
+@router.post("/api/admin/migrate")
+def run_migration(request: Request):
+ admin_required(request)
+ audit("admin_migrate_start", {"ip": client_ip(request)})
+ try:
+ result = subprocess.run(["alembic", "upgrade", "head"], capture_output=True, text=True, check=True)
+ audit("admin_migrate_success", {"ip": client_ip(request), "stdout": (result.stdout or "")[:500]})
+ return {"success": True, "output": result.stdout}
+ except subprocess.CalledProcessError as e:
+ audit("admin_migrate_error", {"ip": client_ip(request), "stderr": (e.stderr or str(e))[:500]})
+ return {"success": False, "error": e.stderr or str(e)}
+
+
+# --- Security helper functions ---
+def verify_admin_password(password: str) -> bool:
+ try:
+ if not password:
+ return False
+ return bool(bcrypt.checkpw(password.encode("utf-8"), ADMIN_PASSWORD_HASH))
+ except Exception:
+ # don't leak errors or secrets
+ return False
+
+
+def create_admin_token() -> Tuple[str, int]:
+ token = secrets.token_hex(16)
+ expiry = int(time.time()) + TOKEN_TTL
+ admin_tokens[token] = expiry
+ return token, expiry
+
+
+def _cleanup_expired_tokens():
+ now = int(time.time())
+ expired = [t for t, exp in admin_tokens.items() if exp <= now]
+ for t in expired:
+ del admin_tokens[t]
+
+
+def is_token_active(token: Optional[str]) -> bool:
+ if not token:
+ return False
+ expiry = admin_tokens.get(token)
+ if not expiry:
+ return False
+ if expiry <= int(time.time()):
+ # expired: remove
+ try:
+ del admin_tokens[token]
+ except KeyError:
+ pass
+ return False
+ return True
+
+
+def admin_required(request: Request):
+ token = request.cookies.get("admin_token")
+ _cleanup_expired_tokens()
+ if not is_token_active(token):
+ audit("admin_auth_required_failed", {"ip": client_ip(request)})
+ raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail="Nicht autorisiert")
+
+# --- Eintrag löschen (Tabelle + ID) ---
+@router.post("/api/admin/delete")
+async def delete_entry(request: Request, session: Session = Depends(get_session)):
+ admin_required(request)
+ data = await request.json()
+ table = data.get("table")
+ id_ = data.get("id")
+ if not table or not id_:
+ audit("admin_delete_invalid", {"ip": client_ip(request), "table": table, "id": id_})
+ return {"success": False, "error": "Tabelle und ID erforderlich."}
+ # Mapping Tabellenname zu Model
+ model_map = {
+ "material": "app.models.material.Material",
+ "spool": "app.models.spool.Spool",
+ "printer": "app.models.printer.Printer",
+ "job": "app.models.job.Job",
+ "userflag": "app.models.settings.UserFlag",
+ "setting": "app.models.settings.Setting"
+ }
+ if table not in model_map:
+ audit("admin_delete_unknown_table", {"ip": client_ip(request), "table": table})
+ return {"success": False, "error": "Unbekannte Tabelle."}
+ module_name, class_name = model_map[table].rsplit('.', 1)
+ model_cls = getattr(importlib.import_module(module_name), class_name)
+ obj = session.get(model_cls, id_)
+ if not obj:
+ audit("admin_delete_not_found", {"ip": client_ip(request), "table": table, "id": id_})
+ return {"success": False, "error": f"Kein Eintrag mit ID {id_} in {table}."}
+ session.delete(obj)
+ session.commit()
+ audit("admin_delete_success", {"ip": client_ip(request), "table": table, "id": id_})
+ return {"success": True}
+
+
+# Admin Notifications Seite schützen
+@router.get("/admin/notifications", response_class=HTMLResponse)
+def admin_notifications_page(request: Request):
+ token = request.cookies.get("admin_token")
+ if not is_token_active(token):
+ audit("admin_access_denied", {"path": "/admin/notifications", "ip": client_ip(request)})
+ return templates.TemplateResponse("admin_login.html", {"request": request})
+ audit("admin_access", {"path": "/admin/notifications", "ip": client_ip(request)})
+ return templates.TemplateResponse("admin_notifications.html", {"request": request})
+
+@router.post("/api/admin/login")
+async def admin_login(request: Request):
+ form = await request.form()
+ password_data: Union[str, UploadFile, None] = form.get("password")
+ ip = client_ip(request)
+
+ if isinstance(password_data, UploadFile):
+ await password_data.close()
+ audit("admin_login_failed", {"ip": ip, "reason": "password_invalid_type"})
+ return JSONResponse({"success": False, "error": "Falsches Passwort"}, status_code=HTTP_401_UNAUTHORIZED)
+
+ password: Optional[str] = password_data
+
+ # check block
+ entry = failed_logins.get(ip, {"count": 0, "last_ts": 0.0, "blocked_until": 0.0})
+ now = time.time()
+ if entry.get("blocked_until", 0) and entry["blocked_until"] > now:
+ audit("admin_login_blocked", {"ip": ip, "blocked_until": entry["blocked_until"]})
+ return JSONResponse({"success": False, "error": "Zu viele Fehlversuche, bitte später erneut versuchen."}, status_code=429)
+
+ # reset counter if last attempt older than block window
+ if entry.get("last_ts") and now - entry.get("last_ts", 0) > BLOCK_SECONDS:
+ entry["count"] = 0
+
+ if not password:
+ audit("admin_login_failed", {"ip": ip, "reason": "no_password"})
+ return JSONResponse({"success": False, "error": "Falsches Passwort"}, status_code=HTTP_401_UNAUTHORIZED)
+
+ if verify_admin_password(password):
+ token, expiry = create_admin_token()
+ response = JSONResponse({"success": True})
+ response.set_cookie("admin_token", token, httponly=True, secure=COOKIE_SECURE, samesite="lax", max_age=TOKEN_TTL)
+ # reset failed attempts on success
+ if ip in failed_logins:
+ try:
+ del failed_logins[ip]
+ except KeyError:
+ pass
+ audit("admin_login_success", {"ip": ip})
+ return response
+
+ # failed attempt
+ entry["count"] = entry.get("count", 0) + 1
+ entry["last_ts"] = now
+ if entry["count"] >= MAX_FAILED:
+ entry["blocked_until"] = now + BLOCK_SECONDS
+ entry["count"] = 0
+ audit("admin_login_locked", {"ip": ip, "blocked_until": entry["blocked_until"]})
+ else:
+ audit("admin_login_failed", {"ip": ip, "count": entry["count"]})
+
+ failed_logins[ip] = entry
+ return JSONResponse({"success": False, "error": "Falsches Passwort"}, status_code=HTTP_401_UNAUTHORIZED)
+
+
+# Logout: HttpOnly-Cookie serverseitig löschen und Token invalidieren
+@router.post("/api/admin/logout")
+async def admin_logout(request: Request):
+ token = request.cookies.get("admin_token")
+ if token and token in admin_tokens:
+ try:
+ del admin_tokens[token]
+ except KeyError:
+ pass
+ response = JSONResponse({"success": True})
+ response.delete_cookie("admin_token", path="/")
+ return response
+
+# Begrüßungstext laden (öffentlich lesbar)
+@router.get("/api/admin/greeting")
+def get_greeting_text(request: Request, session: Session = Depends(get_session)):
+ # Kein admin_required - jeder darf den Begrüßungstext lesen
+ setting = session.exec(select(Setting).where(Setting.key == "greeting_text")).first()
+ return {"greeting_text": setting.value if setting else ""}
+
+# Begrüßungstext speichern
+@router.post("/api/admin/greeting")
+async def set_greeting_text(request: Request, session: Session = Depends(get_session)):
+ admin_required(request)
+ data = await request.json()
+ text = data.get("greeting_text", "")
+ audit("admin_greeting_set", {"ip": client_ip(request), "len": len(text or "")})
+ setting = session.exec(select(Setting).where(Setting.key == "greeting_text")).first()
+ if setting:
+ setting.value = text
+ else:
+ setting = Setting(key="greeting_text", value=text)
+ session.add(setting)
+ session.commit()
+ return {"success": True}
+
+# Welcome-Status abfragen (globales Flag für Popup)
+@router.get("/api/admin/welcome-status")
+def get_welcome_status(session: Session = Depends(get_session)):
+ # Kein admin_required - jeder darf den Welcome-Status lesen
+ setting = session.exec(select(Setting).where(Setting.key == "welcome_shown")).first()
+ return {"welcome_shown": setting.value == "true" if setting else False}
+
+# Welcome-Status setzen (nach erstem Popup)
+@router.post("/api/admin/welcome-status")
+async def set_welcome_status(request: Request, session: Session = Depends(get_session)):
+ # Kein admin_required - jeder darf setzen (nur einmal beim ersten Besuch)
+ data = await request.json()
+ shown = data.get("shown", True)
+ setting = session.exec(select(Setting).where(Setting.key == "welcome_shown")).first()
+ if setting:
+ setting.value = "true" if shown else "false"
+ else:
+ setting = Setting(key="welcome_shown", value="true" if shown else "false")
+ session.add(setting)
+ session.commit()
+ audit("welcome_status_set", {"ip": client_ip(request), "shown": shown})
+ return {"success": True}
+
+# App-Version (Sidebar) laden - NUR ENV
+@router.get("/api/admin/app-version")
+def get_app_version():
+ env_version = os.environ.get("APP_VERSION")
+ if env_version:
+ return {"app_version": env_version}
+ return {"app_version": "Alpha v1 · FilamentHub"}
+
+# Design-Version (User-Menü) laden - NUR ENV
+@router.get("/api/admin/design-version")
+def get_design_version():
+ env_version = os.environ.get("DESIGN_VERSION")
+ if env_version:
+ return {"design_version": env_version}
+ return {"design_version": "Design Alpha-0.1"}
+
+# User-Flag abfragen (ob Popup schon gesehen)
+@router.get("/api/user/flag/{user_id}/{flag}")
+def get_user_flag(user_id: str, flag: str, session: Session = Depends(get_session)):
+ userflag = session.exec(select(UserFlag).where(UserFlag.user_id == user_id, UserFlag.flag == flag)).first()
+ audit("admin_userflag_get", {"user_id": user_id, "flag": flag})
+ return {"value": userflag.value if userflag else False}
+
+# User-Flag setzen (z.B. nach erstem Popup)
+@router.post("/api/user/flag/{user_id}/{flag}")
+async def set_user_flag(user_id: str, flag: str, request: Request, session: Session = Depends(get_session)):
+ data = await request.json()
+ value = data.get("value", True)
+ userflag = session.exec(select(UserFlag).where(UserFlag.user_id == user_id, UserFlag.flag == flag)).first()
+ if userflag:
+ userflag.value = value
+ else:
+ userflag = UserFlag(user_id=user_id, flag=flag, value=value)
+ session.add(userflag)
+ session.commit()
+ audit("admin_userflag_set", {"user_id": user_id, "flag": flag, "value": bool(value)})
+ return {"success": True}
+
+# Reset Pro-Mode Warnung (Admin)
+@router.post("/api/admin/debug/reset-pro-mode")
+def reset_pro_mode_warning(request: Request, session: Session = Depends(get_session)):
+ """Setzt die Pro-Mode Warnung zurück (Admin-Funktion)"""
+ token = request.cookies.get("admin_token")
+ _cleanup_expired_tokens()
+ if not is_token_active(token):
+ audit("admin_reset_promode_denied", {"ip": client_ip(request)})
+ raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail="Nicht authentifiziert")
+
+ setting = session.get(Setting, "debug.pro_mode_accepted")
+ if setting:
+ session.delete(setting)
+ session.commit()
+ audit("admin_reset_promode", {"ip": client_ip(request), "previous_value": setting.value})
+ logging.getLogger("app").info("Pro-Mode Warnung wurde vom Admin zurückgesetzt")
+ return {"success": True, "message": "Pro-Mode Warnung wurde zurückgesetzt"}
+ else:
+ audit("admin_reset_promode", {"ip": client_ip(request), "previous_value": None})
+ return {"success": True, "message": "Pro-Mode Warnung war bereits zurückgesetzt"}
diff --git a/app/routes/ams_routes.py b/app/routes/ams_routes.py
new file mode 100644
index 0000000..5246792
--- /dev/null
+++ b/app/routes/ams_routes.py
@@ -0,0 +1,25 @@
+from fastapi import APIRouter, HTTPException
+from typing import Any
+import logging
+
+import app.services.live_state as live_state_module
+from app.services.ams_normalizer import normalize_live_state, normalize_device
+
+router = APIRouter(prefix="/api/ams", tags=["AMS"])
+logger = logging.getLogger(__name__)
+
+
+@router.get("/")
+async def list_ams() -> Any:
+ logger.debug("Listing normalized AMS live state")
+ live = live_state_module.get_all_live_state()
+ return normalize_live_state(live)
+
+
+@router.get("/{device}")
+async def get_ams_device(device: str) -> Any:
+ logger.debug("Getting normalized AMS for device %s", device)
+ st = live_state_module.get_live_state(device)
+ if not st:
+ raise HTTPException(status_code=404, detail="Live state not found")
+ return normalize_device(st)
diff --git a/app/routes/bambu_routes.py b/app/routes/bambu_routes.py
new file mode 100644
index 0000000..d2d038e
--- /dev/null
+++ b/app/routes/bambu_routes.py
@@ -0,0 +1,38 @@
+from fastapi import APIRouter, HTTPException
+import json
+
+# Nutzt den in mqtt_routes gefüllten Message-Puffer
+try:
+ from app.routes.mqtt_routes import message_buffer, MQTTMessage # type: ignore
+except Exception:
+ # Fallback, falls mqtt_routes den Import verweigert
+ message_buffer = []
+ MQTTMessage = None # type: ignore
+
+router = APIRouter(prefix="/api/bambu", tags=["Bambu"])
+
+
+@router.get("/ams/latest")
+def get_latest_ams():
+ """
+ Liefert die letzte MQTT-Nachricht zum AMS (Topic enthält '/ams') aus dem lokalen Puffer.
+ Nur als schneller Status-Snapshot gedacht; benötigt laufenden MQTT-Listener.
+ """
+ # Suche nach der letzten AMS-Nachricht (rückwärts durchs Buffer)
+ for msg in reversed(message_buffer):
+ if "/ams" in (msg.topic or ""):
+ parsed = None
+ try:
+ parsed = json.loads(msg.payload)
+ except Exception:
+ parsed = None
+
+ return {
+ "found": True,
+ "topic": msg.topic,
+ "timestamp": msg.timestamp,
+ "raw_payload": msg.payload,
+ "parsed": parsed,
+ }
+
+ raise HTTPException(status_code=404, detail="Keine AMS-MQTT-Nachricht im Puffer gefunden")
diff --git a/app/routes/config_routes.py b/app/routes/config_routes.py
new file mode 100644
index 0000000..21b09aa
--- /dev/null
+++ b/app/routes/config_routes.py
@@ -0,0 +1,658 @@
+import json
+import logging
+from copy import deepcopy
+from pathlib import Path
+from fastapi import APIRouter, Depends, HTTPException
+from sqlmodel import Session, select
+from app.database import get_session
+from app.logging.runtime import reconfigure_logging
+from app.models.settings import Setting
+
+
+router = APIRouter()
+logger = logging.getLogger(__name__)
+
+
+DEFAULT_CONFIG = {
+ "name": "FilamentHub",
+ "version": "0.0.0",
+ "debug": {
+ "system_health": {
+ "enabled": True,
+ "warn_latency_ms": 600,
+ "error_latency_ms": 1200,
+ },
+ "runtime": {
+ "enabled": True,
+ "poll_interval_ms": 2000,
+ },
+ },
+ "logging": {
+ "enabled": True,
+ "level": "DEBUG",
+ "keep_days": 14,
+ "max_size_mb": 10,
+ "backup_count": 3,
+ "modules": {
+ "app": {"enabled": True},
+ "bambu": {"enabled": True},
+ "errors": {"enabled": True},
+ "klipper": {"enabled": False},
+ "mqtt": {"enabled": False},
+ },
+ },
+ "logging_status": {},
+ "scanner": {
+ "pro": {
+ "deep_probe": False,
+ "fingerprint_enabled": False,
+ }
+ },
+ "fingerprint": {
+ "enabled": False,
+ "ports": [8883, 6000, 7125],
+ "timeout_ms": 1500,
+ },
+ "json_inspector": {
+ "max_size_mb": 5,
+ "max_depth": 50,
+ "allow_override": False,
+ },
+}
+
+
+def _merge_dict(defaults: dict, override: dict) -> dict:
+ merged = deepcopy(defaults)
+ if not isinstance(override, dict):
+ return merged
+ for key, value in override.items():
+ if isinstance(value, dict) and isinstance(merged.get(key), dict):
+ merged[key] = _merge_dict(merged.get(key, {}), value)
+ else:
+ merged[key] = value
+ return merged
+
+
+TRUE_VALUES = {"1", "true", "yes", "on"}
+FALSE_VALUES = {"0", "false", "no", "off"}
+
+
+def _to_bool(val, default: bool, key: str):
+ if isinstance(val, bool):
+ return val
+ if isinstance(val, str):
+ v = val.strip().lower()
+ if v in TRUE_VALUES:
+ return True
+ if v in FALSE_VALUES:
+ return False
+ logger.warning("Config fallback applied for %s", key)
+ return default
+
+
+def _to_int(val, default: int, key: str):
+ try:
+ return int(val)
+ except Exception:
+ logger.warning("Config fallback applied for %s", key)
+ return default
+
+
+def _get_ports_from_str(val, default_ports: list[int]) -> list[int]:
+ if isinstance(val, list):
+ raw = val
+ elif isinstance(val, str):
+ raw = [p.strip() for p in val.split(",")]
+ else:
+ raw = []
+ ports_valid = []
+ for p in raw:
+ try:
+ port_int = int(p)
+ if 1 <= port_int <= 65535:
+ ports_valid.append(port_int)
+ except Exception:
+ continue
+ return ports_valid or default_ports
+
+
+def _persist_setting(session: Session, key: str, value: str, overwrite: bool = False) -> None:
+ existing = session.exec(select(Setting).where(Setting.key == key)).first()
+ if existing:
+ if overwrite:
+ existing.value = value
+ else:
+ return
+ else:
+ session.add(Setting(key=key, value=value))
+ session.commit()
+
+
+def _ensure_settings_seed(session: Session, merged: dict) -> None:
+ runtime = merged.get("debug", {}).get("runtime", {})
+ scanner_pro = merged.get("scanner", {}).get("pro", {})
+ fp = merged.get("fingerprint", {})
+ log_cfg = merged.get("logging", {})
+ health_cfg = merged.get("debug", {}).get("system_health", {})
+ json_inspector = merged.get("json_inspector", {})
+ seeds = {
+ "debug.system_health.enabled": "true" if health_cfg.get("enabled", True) else "false",
+ "debug.system_health.warn_latency_ms": str(health_cfg.get("warn_latency_ms", 600)),
+ "debug.system_health.error_latency_ms": str(health_cfg.get("error_latency_ms", 1200)),
+ "debug.runtime.enabled": "true" if runtime.get("enabled", True) else "false",
+ "debug.runtime.poll_interval_ms": str(runtime.get("poll_interval_ms", 2000)),
+ "scanner.pro.deep_probe": "true" if scanner_pro.get("deep_probe", False) else "false",
+ "scanner.pro.fingerprint_enabled": "true" if scanner_pro.get("fingerprint_enabled", False) else "false",
+ "fingerprint.enabled": "true" if fp.get("enabled", False) else "false",
+ "fingerprint.timeout_ms": str(fp.get("timeout_ms", 1500)),
+ "fingerprint.ports": json.dumps(fp.get("ports", DEFAULT_CONFIG["fingerprint"]["ports"])),
+ "logging.enabled": "true" if log_cfg.get("enabled", DEFAULT_CONFIG["logging"]["enabled"]) else "false",
+ "logging.level": log_cfg.get("level", DEFAULT_CONFIG["logging"]["level"]),
+ "logging.keep_days": str(log_cfg.get("keep_days", DEFAULT_CONFIG["logging"]["keep_days"])),
+ "logging.max_size_mb": str(log_cfg.get("max_size_mb", DEFAULT_CONFIG["logging"]["max_size_mb"])),
+ "logging.backup_count": str(log_cfg.get("backup_count", DEFAULT_CONFIG["logging"]["backup_count"])),
+ "json_inspector.max_size_mb": str(json_inspector.get("max_size_mb", DEFAULT_CONFIG["json_inspector"]["max_size_mb"])),
+ "json_inspector.max_depth": str(json_inspector.get("max_depth", DEFAULT_CONFIG["json_inspector"]["max_depth"])),
+ "json_inspector.allow_override": "true" if json_inspector.get("allow_override", False) else "false",
+ }
+ modules_cfg = log_cfg.get("modules", DEFAULT_CONFIG["logging"]["modules"])
+ if not isinstance(modules_cfg, dict):
+ modules_cfg = DEFAULT_CONFIG["logging"]["modules"]
+ for module_name, module_def in DEFAULT_CONFIG["logging"]["modules"].items():
+ raw_entry = modules_cfg.get(module_name, module_def)
+ if isinstance(raw_entry, dict):
+ module_enabled = raw_entry.get("enabled", module_def["enabled"])
+ else:
+ module_enabled = raw_entry
+ seeds[f"logging.modules.{module_name}"] = "true" if module_enabled else "false"
+ for k, v in seeds.items():
+ _persist_setting(session, k, v, overwrite=False)
+
+
+def _validate_config(raw: dict) -> dict:
+ cfg = _merge_dict(DEFAULT_CONFIG, raw if isinstance(raw, dict) else {})
+
+ # System Health
+ sh = cfg.get("debug", {}).get("system_health", {})
+ enabled = _to_bool(sh.get("enabled"), DEFAULT_CONFIG["debug"]["system_health"]["enabled"], "debug.system_health.enabled")
+ warn_latency = _to_int(sh.get("warn_latency_ms"), DEFAULT_CONFIG["debug"]["system_health"]["warn_latency_ms"], "debug.system_health.warn_latency_ms")
+ if warn_latency < 100:
+ logger.warning("Config fallback applied for debug.system_health.warn_latency_ms")
+ warn_latency = DEFAULT_CONFIG["debug"]["system_health"]["warn_latency_ms"]
+ error_latency = _to_int(
+ sh.get("error_latency_ms"),
+ DEFAULT_CONFIG["debug"]["system_health"]["error_latency_ms"],
+ "debug.system_health.error_latency_ms",
+ )
+ min_error = max(warn_latency + 100, DEFAULT_CONFIG["debug"]["system_health"]["error_latency_ms"])
+ if error_latency <= warn_latency:
+ logger.warning("Config fallback applied for debug.system_health.error_latency_ms")
+ error_latency = min_error
+ cfg["debug"]["system_health"] = {
+ "enabled": enabled,
+ "warn_latency_ms": warn_latency,
+ "error_latency_ms": error_latency,
+ }
+
+ # Logging
+ log_cfg = cfg.get("logging", {})
+ logging_enabled = _to_bool(log_cfg.get("enabled"), DEFAULT_CONFIG["logging"]["enabled"], "logging.enabled")
+ level_raw = (log_cfg.get("level") or "").upper()
+ if level_raw not in {"DEBUG", "INFO", "WARNING", "ERROR"}:
+ logger.warning("Config fallback applied for logging.level")
+ level_raw = DEFAULT_CONFIG["logging"]["level"]
+ keep_days = _to_int(log_cfg.get("keep_days"), DEFAULT_CONFIG["logging"]["keep_days"], "logging.keep_days")
+ if keep_days < 1:
+ logger.warning("Config fallback applied for logging.keep_days")
+ keep_days = DEFAULT_CONFIG["logging"]["keep_days"]
+ max_size_mb = _to_int(log_cfg.get("max_size_mb"), DEFAULT_CONFIG["logging"]["max_size_mb"], "logging.max_size_mb")
+ if max_size_mb < 1:
+ logger.warning("Config fallback applied for logging.max_size_mb")
+ max_size_mb = DEFAULT_CONFIG["logging"]["max_size_mb"]
+ backup_count = _to_int(log_cfg.get("backup_count"), DEFAULT_CONFIG["logging"]["backup_count"], "logging.backup_count")
+ if backup_count < 1:
+ logger.warning("Config fallback applied for logging.backup_count")
+ backup_count = DEFAULT_CONFIG["logging"]["backup_count"]
+ modules_input = log_cfg.get("modules", {})
+ if not isinstance(modules_input, dict):
+ modules_input = {}
+ normalized_modules = {}
+ for module_name, module_def in DEFAULT_CONFIG["logging"]["modules"].items():
+ raw_entry = modules_input.get(module_name, module_def)
+ if isinstance(raw_entry, dict):
+ module_value = raw_entry.get("enabled")
+ else:
+ module_value = raw_entry
+ module_enabled = _to_bool(module_value, module_def["enabled"], f"logging.modules.{module_name}.enabled")
+ normalized_modules[module_name] = {"enabled": module_enabled}
+ cfg["logging"] = {
+ "enabled": logging_enabled,
+ "level": level_raw,
+ "keep_days": keep_days,
+ "max_size_mb": max_size_mb,
+ "backup_count": backup_count,
+ "modules": normalized_modules,
+ }
+
+ if isinstance(raw, dict) and "logging" in raw and isinstance(raw["logging"], dict):
+ incoming_modules = raw["logging"].get("modules")
+ if isinstance(incoming_modules, dict):
+ determined_modules = {}
+ for module_name in cfg["logging"]["modules"].keys():
+ incoming_entry = incoming_modules.get(module_name, {})
+ if isinstance(incoming_entry, dict):
+ candidate = incoming_entry.get("enabled", cfg["logging"]["modules"][module_name]["enabled"])
+ else:
+ candidate = incoming_entry
+ determined_modules[module_name] = {
+ "enabled": bool(candidate if candidate is not None else cfg["logging"]["modules"][module_name]["enabled"])
+ }
+ cfg["logging"]["modules"] = determined_modules
+ cfg["logging_status"] = {}
+
+ # Runtime
+ rt = cfg.get("debug", {}).get("runtime", {})
+ runtime_enabled = _to_bool(rt.get("enabled"), DEFAULT_CONFIG["debug"]["runtime"]["enabled"], "debug.runtime.enabled")
+ poll_interval = _to_int(rt.get("poll_interval_ms"), DEFAULT_CONFIG["debug"]["runtime"]["poll_interval_ms"], "debug.runtime.poll_interval_ms")
+ if poll_interval < 500:
+ logger.warning("Config fallback applied for debug.runtime.poll_interval_ms")
+ poll_interval = DEFAULT_CONFIG["debug"]["runtime"]["poll_interval_ms"]
+ cfg["debug"]["runtime"] = {
+ "enabled": runtime_enabled,
+ "poll_interval_ms": poll_interval,
+ }
+
+ # Scanner Pro
+ scanner_pro = cfg.get("scanner", {}).get("pro", {})
+ deep_probe = _to_bool(scanner_pro.get("deep_probe"), DEFAULT_CONFIG["scanner"]["pro"]["deep_probe"], "scanner.pro.deep_probe")
+ fingerprint_enabled = _to_bool(
+ scanner_pro.get("fingerprint_enabled"),
+ DEFAULT_CONFIG["scanner"]["pro"]["fingerprint_enabled"],
+ "scanner.pro.fingerprint_enabled",
+ )
+ cfg["scanner"]["pro"] = {
+ "deep_probe": deep_probe,
+ "fingerprint_enabled": fingerprint_enabled,
+ }
+
+ # Fingerprint intern
+ fp_cfg = cfg.get("fingerprint", {})
+ fp_enabled = _to_bool(fp_cfg.get("enabled"), DEFAULT_CONFIG["fingerprint"]["enabled"], "fingerprint.enabled")
+ ports_raw = fp_cfg.get("ports")
+ default_ports = DEFAULT_CONFIG["fingerprint"]["ports"]
+ ports_valid = []
+ if isinstance(ports_raw, list):
+ for p in ports_raw:
+ try:
+ port_int = int(p)
+ if 1 <= port_int <= 65535:
+ ports_valid.append(port_int)
+ except Exception:
+ continue
+ if not ports_valid:
+ logger.warning("Config fallback applied for fingerprint.ports")
+ ports_valid = default_ports
+ timeout_ms = _to_int(fp_cfg.get("timeout_ms"), DEFAULT_CONFIG["fingerprint"]["timeout_ms"], "fingerprint.timeout_ms")
+ if timeout_ms < 500:
+ logger.warning("Config fallback applied for fingerprint.timeout_ms")
+ timeout_ms = DEFAULT_CONFIG["fingerprint"]["timeout_ms"]
+ cfg["fingerprint"] = {
+ "enabled": fp_enabled,
+ "ports": ports_valid,
+ "timeout_ms": timeout_ms,
+ }
+
+ # JSON Inspector
+ json_cfg = cfg.get("json_inspector", {})
+ max_size_mb = _to_int(json_cfg.get("max_size_mb"), DEFAULT_CONFIG["json_inspector"]["max_size_mb"], "json_inspector.max_size_mb")
+ if max_size_mb < 1 or max_size_mb > 100:
+ logger.warning("Config fallback applied for json_inspector.max_size_mb")
+ max_size_mb = DEFAULT_CONFIG["json_inspector"]["max_size_mb"]
+ max_depth = _to_int(json_cfg.get("max_depth"), DEFAULT_CONFIG["json_inspector"]["max_depth"], "json_inspector.max_depth")
+ if max_depth < 1 or max_depth > 500:
+ logger.warning("Config fallback applied for json_inspector.max_depth")
+ max_depth = DEFAULT_CONFIG["json_inspector"]["max_depth"]
+ allow_override = _to_bool(json_cfg.get("allow_override"), DEFAULT_CONFIG["json_inspector"]["allow_override"], "json_inspector.allow_override")
+ cfg["json_inspector"] = {
+ "max_size_mb": max_size_mb,
+ "max_depth": max_depth,
+ "allow_override": allow_override,
+ }
+
+ # Derived block for UI (back-compat, read-only)
+ cfg["config_manager"] = {
+ "health_enabled": cfg["debug"]["system_health"]["enabled"],
+ "health_latency_warn_ms": cfg["debug"]["system_health"]["warn_latency_ms"],
+ "health_latency_error_ms": cfg["debug"]["system_health"]["error_latency_ms"],
+ "log_level": cfg["logging"]["level"],
+ "log_to_file": cfg["logging"]["enabled"],
+ "runtime_enabled": cfg["debug"]["runtime"]["enabled"],
+ "runtime_poll_interval_ms": cfg["debug"]["runtime"]["poll_interval_ms"],
+ "scanner_deep_probe": cfg["scanner"]["pro"]["deep_probe"],
+ "scanner_fingerprint": cfg["scanner"]["pro"]["fingerprint_enabled"],
+ }
+
+ return cfg
+
+
+def _settings_map(session: Session) -> dict:
+ data = {}
+ for row in session.exec(select(Setting)).all():
+ data[row.key] = row.value
+ return data
+
+
+def _load_config(session: Session | None = None) -> dict:
+ config_path = Path(__file__).resolve().parents[2] / "config.json"
+ if not config_path.exists():
+ return deepcopy(DEFAULT_CONFIG)
+ try:
+ data = json.loads(config_path.read_text(encoding="utf-8"))
+ merged = _validate_config(data)
+ if session:
+ settings = _settings_map(session)
+ # Overlay runtime settings from DB if present
+ runtime_enabled = _to_bool(
+ settings.get("debug.runtime.enabled"),
+ merged["debug"]["runtime"]["enabled"],
+ "debug.runtime.enabled",
+ )
+ runtime_poll = _to_int(
+ settings.get("debug.runtime.poll_interval_ms"),
+ merged["debug"]["runtime"]["poll_interval_ms"],
+ "debug.runtime.poll_interval_ms",
+ )
+ if runtime_poll < 500:
+ runtime_poll = DEFAULT_CONFIG["debug"]["runtime"]["poll_interval_ms"]
+ merged["debug"]["runtime"] = {"enabled": runtime_enabled, "poll_interval_ms": runtime_poll}
+
+ # System health overlay
+ health_enabled = _to_bool(
+ settings.get("debug.system_health.enabled"),
+ merged["debug"]["system_health"]["enabled"],
+ "debug.system_health.enabled",
+ )
+ warn_latency = _to_int(
+ settings.get("debug.system_health.warn_latency_ms"),
+ merged["debug"]["system_health"]["warn_latency_ms"],
+ "debug.system_health.warn_latency_ms",
+ )
+ if warn_latency < 100:
+ warn_latency = merged["debug"]["system_health"]["warn_latency_ms"]
+ error_latency = _to_int(
+ settings.get("debug.system_health.error_latency_ms"),
+ merged["debug"]["system_health"]["error_latency_ms"],
+ "debug.system_health.error_latency_ms",
+ )
+ min_error = max(warn_latency + 100, DEFAULT_CONFIG["debug"]["system_health"]["error_latency_ms"])
+ if error_latency <= warn_latency:
+ error_latency = min_error
+ merged["debug"]["system_health"] = {
+ "enabled": health_enabled,
+ "warn_latency_ms": warn_latency,
+ "error_latency_ms": error_latency,
+ }
+
+ deep_probe = _to_bool(
+ settings.get("scanner.pro.deep_probe"), merged["scanner"]["pro"]["deep_probe"], "scanner.pro.deep_probe"
+ )
+ fp_enabled = _to_bool(
+ settings.get("scanner.pro.fingerprint_enabled"),
+ merged["scanner"]["pro"]["fingerprint_enabled"],
+ "scanner.pro.fingerprint_enabled",
+ )
+ merged["scanner"]["pro"] = {"deep_probe": deep_probe, "fingerprint_enabled": fp_enabled}
+
+ fp_enabled_setting = _to_bool(
+ settings.get("fingerprint.enabled"), merged["fingerprint"]["enabled"], "fingerprint.enabled"
+ )
+ fp_ports = _get_ports_from_str(settings.get("fingerprint.ports"), merged["fingerprint"]["ports"])
+ fp_timeout = _to_int(
+ settings.get("fingerprint.timeout_ms"), merged["fingerprint"]["timeout_ms"], "fingerprint.timeout_ms"
+ )
+ if fp_timeout < 500:
+ fp_timeout = merged["fingerprint"]["timeout_ms"]
+ merged["fingerprint"] = {"enabled": fp_enabled_setting, "ports": fp_ports, "timeout_ms": fp_timeout}
+
+ # Logging overlay
+ log_cfg = merged.get("logging", {})
+ enabled_setting = settings.get("logging.enabled")
+ if enabled_setting is not None:
+ log_cfg["enabled"] = _to_bool(enabled_setting, log_cfg["enabled"], "logging.enabled")
+ level_setting = settings.get("logging.level")
+ if isinstance(level_setting, str):
+ level_upper = level_setting.upper()
+ if level_upper in {"DEBUG", "INFO", "WARNING", "ERROR"}:
+ log_cfg["level"] = level_upper
+ keep_days_setting = _to_int(settings.get("logging.keep_days"), log_cfg.get("keep_days", DEFAULT_CONFIG["logging"]["keep_days"]), "logging.keep_days")
+ if keep_days_setting < 1:
+ keep_days_setting = log_cfg.get("keep_days", DEFAULT_CONFIG["logging"]["keep_days"])
+ log_cfg["keep_days"] = keep_days_setting
+ max_size_setting = _to_int(settings.get("logging.max_size_mb"), log_cfg.get("max_size_mb", DEFAULT_CONFIG["logging"]["max_size_mb"]), "logging.max_size_mb")
+ if max_size_setting < 1:
+ max_size_setting = log_cfg.get("max_size_mb", DEFAULT_CONFIG["logging"]["max_size_mb"])
+ log_cfg["max_size_mb"] = max_size_setting
+ backup_count_setting = _to_int(settings.get("logging.backup_count"), log_cfg.get("backup_count", DEFAULT_CONFIG["logging"]["backup_count"]), "logging.backup_count")
+ if backup_count_setting < 1:
+ backup_count_setting = log_cfg.get("backup_count", DEFAULT_CONFIG["logging"]["backup_count"])
+ log_cfg["backup_count"] = backup_count_setting
+ modules_cfg = log_cfg.get("modules", {})
+ if not isinstance(modules_cfg, dict):
+ modules_cfg = {}
+ for module_name, module_def in DEFAULT_CONFIG["logging"]["modules"].items():
+ key = f"logging.modules.{module_name}"
+ module_enabled = _to_bool(settings.get(key), modules_cfg.get(module_name, {}).get("enabled", module_def["enabled"]), key)
+ modules_cfg.setdefault(module_name, {})["enabled"] = module_enabled
+ log_cfg["modules"] = modules_cfg
+ merged["logging"] = log_cfg
+
+ # JSON Inspector overlay
+ json_max_size = _to_int(
+ settings.get("json_inspector.max_size_mb"),
+ merged["json_inspector"]["max_size_mb"],
+ "json_inspector.max_size_mb"
+ )
+ if json_max_size < 1 or json_max_size > 100:
+ json_max_size = merged["json_inspector"]["max_size_mb"]
+ json_max_depth = _to_int(
+ settings.get("json_inspector.max_depth"),
+ merged["json_inspector"]["max_depth"],
+ "json_inspector.max_depth"
+ )
+ if json_max_depth < 1 or json_max_depth > 500:
+ json_max_depth = merged["json_inspector"]["max_depth"]
+ json_allow_override = _to_bool(
+ settings.get("json_inspector.allow_override"),
+ merged["json_inspector"]["allow_override"],
+ "json_inspector.allow_override"
+ )
+ merged["json_inspector"] = {
+ "max_size_mb": json_max_size,
+ "max_depth": json_max_depth,
+ "allow_override": json_allow_override,
+ }
+
+ merged["config_manager"] = {
+ "health_enabled": merged["debug"]["system_health"]["enabled"],
+ "health_latency_warn_ms": merged["debug"]["system_health"]["warn_latency_ms"],
+ "health_latency_error_ms": merged["debug"]["system_health"]["error_latency_ms"],
+ "log_level": merged["logging"]["level"],
+ "log_to_file": merged["logging"]["enabled"],
+ "runtime_enabled": merged["debug"]["runtime"]["enabled"],
+ "runtime_poll_interval_ms": merged["debug"]["runtime"]["poll_interval_ms"],
+ "scanner_deep_probe": merged["scanner"]["pro"]["deep_probe"],
+ "scanner_fingerprint": merged["scanner"]["pro"]["fingerprint_enabled"],
+ }
+ _ensure_settings_seed(session, merged)
+ return merged
+ except Exception:
+ logger.warning("Config fallback applied for config.json")
+ return deepcopy(DEFAULT_CONFIG)
+
+
+@router.get("/api/config/current")
+async def get_current_config(session: Session = Depends(get_session)):
+ """
+ Read-only config export for the Config Manager (Pro).
+ """
+ return _load_config(session)
+
+
+# GET alias for /api/config (same payload as /current)
+@router.get("/api/config")
+async def get_config_alias(session: Session = Depends(get_session)):
+ return await get_current_config(session)
+
+
+def _validate_payload(payload: dict) -> dict:
+ data = payload or {}
+ out = {}
+ if "logging" in data and isinstance(data.get("logging"), dict):
+ logging_payload = data["logging"]
+ flat_modules = {}
+ if isinstance(logging_payload.get("modules"), dict):
+ flat_modules = {
+ f"logging.modules.{module_name}": logging_payload["modules"].get(module_name)
+ for module_name in DEFAULT_CONFIG["logging"]["modules"].keys()
+ }
+ data = {
+ **data,
+ "logging.enabled": logging_payload.get("enabled"),
+ "logging.level": logging_payload.get("level"),
+ "logging.keep_days": logging_payload.get("keep_days"),
+ "logging.max_size_mb": logging_payload.get("max_size_mb"),
+ "logging.backup_count": logging_payload.get("backup_count"),
+ **flat_modules,
+ }
+ if "debug.system_health" in data and isinstance(data.get("debug.system_health"), dict):
+ data = {
+ **data,
+ "debug.system_health.enabled": data["debug.system_health"].get("enabled"),
+ "debug.system_health.warn_latency_ms": data["debug.system_health"].get("warn_latency_ms"),
+ "debug.system_health.error_latency_ms": data["debug.system_health"].get("error_latency_ms"),
+ }
+ if "debug.system_health.enabled" in data:
+ out["debug.system_health.enabled"] = _to_bool(data.get("debug.system_health.enabled"), True, "debug.system_health.enabled")
+ if "debug.system_health.warn_latency_ms" in data:
+ warn = _to_int(data.get("debug.system_health.warn_latency_ms"), 600, "debug.system_health.warn_latency_ms")
+ if warn < 100:
+ warn = 600
+ out["debug.system_health.warn_latency_ms"] = warn
+ if "debug.system_health.error_latency_ms" in data:
+ err = _to_int(data.get("debug.system_health.error_latency_ms"), 1200, "debug.system_health.error_latency_ms")
+ warn = out.get("debug.system_health.warn_latency_ms", 600)
+ min_err = max(warn + 100, 1200)
+ if err <= warn:
+ err = min_err
+ out["debug.system_health.error_latency_ms"] = err
+
+ if "logging.enabled" in data:
+ out["logging.enabled"] = _to_bool(data.get("logging.enabled"), DEFAULT_CONFIG["logging"]["enabled"], "logging.enabled")
+ if "logging.level" in data:
+ level_raw = (data.get("logging.level") or "").upper()
+ out["logging.level"] = level_raw if level_raw in {"DEBUG", "INFO", "WARNING", "ERROR"} else DEFAULT_CONFIG["logging"]["level"]
+ if "logging.keep_days" in data:
+ keep_days_val = _to_int(data.get("logging.keep_days"), DEFAULT_CONFIG["logging"]["keep_days"], "logging.keep_days")
+ if keep_days_val < 1:
+ keep_days_val = DEFAULT_CONFIG["logging"]["keep_days"]
+ out["logging.keep_days"] = keep_days_val
+ if "logging.max_size_mb" in data:
+ max_size_val = _to_int(data.get("logging.max_size_mb"), DEFAULT_CONFIG["logging"]["max_size_mb"], "logging.max_size_mb")
+ if max_size_val < 1:
+ max_size_val = DEFAULT_CONFIG["logging"]["max_size_mb"]
+ out["logging.max_size_mb"] = max_size_val
+ if "logging.backup_count" in data:
+ backup_val = _to_int(data.get("logging.backup_count"), DEFAULT_CONFIG["logging"]["backup_count"], "logging.backup_count")
+ if backup_val < 1:
+ backup_val = DEFAULT_CONFIG["logging"]["backup_count"]
+ out["logging.backup_count"] = backup_val
+ for module_name in DEFAULT_CONFIG["logging"]["modules"].keys():
+ key = f"logging.modules.{module_name}"
+ if key in data:
+ out[key] = _to_bool(data.get(key), DEFAULT_CONFIG["logging"]["modules"][module_name]["enabled"], f"{key}")
+
+ if "debug.runtime.enabled" in data:
+ out["debug.runtime.enabled"] = _to_bool(data.get("debug.runtime.enabled"), True, "debug.runtime.enabled")
+ if "debug.runtime.poll_interval_ms" in data:
+ poll = _to_int(data.get("debug.runtime.poll_interval_ms"), 2000, "debug.runtime.poll_interval_ms")
+ if poll < 500:
+ poll = 2000
+ out["debug.runtime.poll_interval_ms"] = poll
+
+ if "scanner.pro.deep_probe" in data:
+ out["scanner.pro.deep_probe"] = _to_bool(data.get("scanner.pro.deep_probe"), False, "scanner.pro.deep_probe")
+ if "scanner.pro.fingerprint_enabled" in data:
+ out["scanner.pro.fingerprint_enabled"] = _to_bool(
+ data.get("scanner.pro.fingerprint_enabled"), False, "scanner.pro.fingerprint_enabled"
+ )
+
+ if "fingerprint.enabled" in data:
+ out["fingerprint.enabled"] = _to_bool(data.get("fingerprint.enabled"), False, "fingerprint.enabled")
+ if "fingerprint.timeout_ms" in data:
+ to_val = _to_int(data.get("fingerprint.timeout_ms"), 1500, "fingerprint.timeout_ms")
+ if to_val < 500:
+ to_val = 1500
+ out["fingerprint.timeout_ms"] = to_val
+ if "fingerprint.ports" in data:
+ ports_val = data.get("fingerprint.ports")
+ ports = []
+ if isinstance(ports_val, list):
+ ports = [int(p) for p in ports_val if isinstance(p, (int, float)) and 1 <= int(p) <= 65535]
+ else:
+ try:
+ ports = [int(x.strip()) for x in str(ports_val).split(",") if x.strip()]
+ except Exception:
+ ports = []
+ ports = [p for p in ports if 1 <= p <= 65535]
+ if not ports:
+ ports = DEFAULT_CONFIG["fingerprint"]["ports"]
+ out["fingerprint.ports"] = ports
+
+ if "json_inspector.max_size_mb" in data:
+ size_val = _to_int(data.get("json_inspector.max_size_mb"), DEFAULT_CONFIG["json_inspector"]["max_size_mb"], "json_inspector.max_size_mb")
+ if size_val < 1 or size_val > 100:
+ size_val = DEFAULT_CONFIG["json_inspector"]["max_size_mb"]
+ out["json_inspector.max_size_mb"] = size_val
+ if "json_inspector.max_depth" in data:
+ depth_val = _to_int(data.get("json_inspector.max_depth"), DEFAULT_CONFIG["json_inspector"]["max_depth"], "json_inspector.max_depth")
+ if depth_val < 1 or depth_val > 500:
+ depth_val = DEFAULT_CONFIG["json_inspector"]["max_depth"]
+ out["json_inspector.max_depth"] = depth_val
+ if "json_inspector.allow_override" in data:
+ out["json_inspector.allow_override"] = _to_bool(data.get("json_inspector.allow_override"), False, "json_inspector.allow_override")
+
+ return out
+
+
+def _persist_payload(session: Session, validated: dict) -> None:
+ for key, val in validated.items():
+ if isinstance(val, list):
+ _persist_setting(session, key, json.dumps(val), overwrite=True)
+ elif isinstance(val, bool):
+ _persist_setting(session, key, "true" if val else "false", overwrite=True)
+ else:
+ _persist_setting(session, key, str(val), overwrite=True)
+
+
+
+def _apply_logging_settings(merged: dict) -> dict:
+ statuses = reconfigure_logging(merged.get("logging", {}))
+ merged["logging_status"] = statuses
+ return statuses
+
+
+@router.put("/api/config")
+async def update_config(payload: dict, session: Session = Depends(get_session)):
+ validated = _validate_payload(payload)
+ if not validated:
+ raise HTTPException(status_code=400, detail="No valid keys provided")
+ _persist_payload(session, validated)
+ merged = _load_config(session)
+ _apply_logging_settings(merged)
+ return merged
+
+
+# alias for trailing slash
+@router.put("/api/config/")
+async def update_config_alias(payload: dict, session: Session = Depends(get_session)):
+ return await update_config(payload, session)
diff --git a/app/routes/database_routes.py b/app/routes/database_routes.py
new file mode 100644
index 0000000..0688629
--- /dev/null
+++ b/app/routes/database_routes.py
@@ -0,0 +1,1031 @@
+import os
+import logging
+import shutil
+import subprocess
+from datetime import datetime
+from pathlib import Path
+from typing import List, Dict, Any
+
+from fastapi import APIRouter, HTTPException
+from pydantic import BaseModel
+from sqlalchemy import text
+
+from app.db.session import session_scope
+from app.database import engine
+
+
+router = APIRouter(prefix="/api/database", tags=["Database"])
+logger = logging.getLogger("app")
+
+# Default DB path; tests may override this by setting module variable
+DB_PATH = "data/filamenthub.db"
+
+
+class SQLEditorRequest(BaseModel):
+ sql: str
+
+
+@router.get("/info")
+def get_database_info():
+ """Gibt Basisinformationen über die Datenbank zurück."""
+ exists = os.path.exists(DB_PATH)
+ tables: List[str] = []
+ file_stats = None
+ size_mb = None
+
+ if exists:
+ try:
+ file_stats = os.stat(DB_PATH)
+ size_mb = round(os.path.getsize(DB_PATH) / 1024 / 1024, 3)
+ except Exception as exc:
+ logger.debug("Could not stat DB file: %s", exc, exc_info=True)
+
+ try:
+ with session_scope() as session:
+ res = session.exec(
+ text("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%';")
+ )
+ for row in res.all():
+ tables.append(row[0])
+ except Exception as exc:
+ logger.debug("Failed to list DB tables: %s", exc, exc_info=True)
+
+ return {
+ "exists": exists,
+ "size_mb": size_mb,
+ "tables": tables,
+ "created": getattr(file_stats, "st_ctime", None),
+ "modified": getattr(file_stats, "st_mtime", None),
+ }
+
+
+@router.get("/tables")
+def get_table_info():
+ """Gibt detaillierte Informationen über alle Tabellen zurück."""
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ table_info: List[Dict[str, Any]] = []
+ try:
+ with session_scope() as session:
+ res = session.exec(text("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%';"))
+ names = [r[0] for r in res.all()]
+ for table in names:
+ # row count
+ cnt_res = session.exec(text(f"SELECT COUNT(*) FROM {table}"))
+ cnt_row = cnt_res.first()
+ row_count = int(cnt_row[0]) if cnt_row else 0
+
+ # columns
+ cols_res = session.exec(text(f"PRAGMA table_info({table})"))
+ columns_raw = cols_res.all()
+ columns = [
+ {
+ "name": col[1],
+ "type": col[2],
+ "not_null": bool(col[3]),
+ "primary_key": bool(col[5]),
+ }
+ for col in columns_raw
+ ]
+
+ # preview
+ preview_res = session.exec(text(f"SELECT * FROM {table} LIMIT 5"))
+ preview_rows = [tuple(r) for r in preview_res.all()]
+ preview_headers = [col[1] for col in columns_raw]
+
+ table_info.append(
+ {
+ "name": table,
+ "row_count": row_count,
+ "column_count": len(columns),
+ "columns": columns,
+ "preview": {"headers": preview_headers, "rows": preview_rows},
+ }
+ )
+ except Exception as exc:
+ logger.error("Failed to gather table info: %s", exc, exc_info=True)
+ raise HTTPException(status_code=500, detail="Fehler beim Lesen der Tabellen")
+
+ return {"tables": table_info}
+
+
+@router.get("/stats")
+def get_database_stats():
+ """Gibt einfache Zählstatistiken zurück."""
+ if not os.path.exists(DB_PATH):
+ return {
+ "materials_count": 0,
+ "spools_count": 0,
+ "printers_count": 0,
+ "jobs_count": 0,
+ "spools_open": 0,
+ "spools_empty": 0,
+ }
+
+ stats: Dict[str, int] = {}
+ with session_scope() as session:
+ # Materials
+ try:
+ res = session.exec(text("SELECT COUNT(*) FROM material"))
+ r = res.first()
+ stats["materials_count"] = int(r[0]) if r else 0
+ except Exception as exc:
+ logger.debug("Failed to read material count: %s", exc, exc_info=True)
+ stats["materials_count"] = 0
+
+ # Spools
+ try:
+ res = session.exec(text("SELECT COUNT(*) FROM spool"))
+ r = res.first()
+ stats["spools_count"] = int(r[0]) if r else 0
+
+ res = session.exec(text("SELECT COUNT(*) FROM spool WHERE is_open = 1"))
+ r = res.first()
+ stats["spools_open"] = int(r[0]) if r else 0
+
+ res = session.exec(text("SELECT COUNT(*) FROM spool WHERE is_empty = 1"))
+ r = res.first()
+ stats["spools_empty"] = int(r[0]) if r else 0
+ except Exception as exc:
+ logger.debug("Failed to read spool stats: %s", exc, exc_info=True)
+ stats["spools_count"] = 0
+ stats["spools_open"] = 0
+ stats["spools_empty"] = 0
+
+ # Printers
+ try:
+ res = session.exec(text("SELECT COUNT(*) FROM printer"))
+ r = res.first()
+ stats["printers_count"] = int(r[0]) if r else 0
+ except Exception as exc:
+ logger.debug("Failed to read printers count: %s", exc, exc_info=True)
+ stats["printers_count"] = 0
+
+ # Jobs
+ try:
+ res = session.exec(text("SELECT COUNT(*) FROM job"))
+ r = res.first()
+ stats["jobs_count"] = int(r[0]) if r else 0
+ except Exception as exc:
+ logger.debug("Failed to read jobs count: %s", exc, exc_info=True)
+ stats["jobs_count"] = 0
+
+ return stats
+
+
+@router.get("/query")
+def execute_query(sql: str):
+ """Führt eine SELECT-Query aus (nur Entwicklung)."""
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ if not sql.strip().upper().startswith("SELECT"):
+ raise HTTPException(status_code=403, detail="Nur SELECT Queries erlaubt")
+
+ try:
+ with session_scope() as session:
+ res = session.exec(text(sql))
+ rows = res.mappings().all()
+ result = [dict(r) for r in rows]
+ return {"success": True, "row_count": len(result), "data": result}
+ except Exception as exc:
+ logger.error("Query execution failed: %s", exc, exc_info=True)
+ raise HTTPException(status_code=400, detail=f"Query Fehler: {str(exc)}")
+
+
+@router.post("/editor")
+def execute_editor_query(payload: SQLEditorRequest):
+ """Führt nicht-SELECT SQL-Befehle aus (INSERT/UPDATE/DELETE/CREATE/ALTER/DROP)."""
+ sql = payload.sql.strip()
+ if not sql:
+ raise HTTPException(status_code=400, detail="Kein SQL-Befehl übergeben")
+
+ allowed = ("INSERT", "UPDATE", "DELETE", "CREATE", "ALTER", "DROP")
+ if not any(sql.upper().startswith(cmd) for cmd in allowed):
+ raise HTTPException(status_code=403, detail="Nur INSERT, UPDATE, DELETE, CREATE, ALTER, DROP erlaubt")
+
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ try:
+ with session_scope() as session:
+ session.exec(text(sql))
+ session.commit()
+ return {"success": True, "message": "Befehl erfolgreich ausgeführt"}
+ except Exception as exc:
+ logger.error("Editor query failed: %s", exc, exc_info=True)
+ raise HTTPException(status_code=400, detail=f"Query Fehler: {str(exc)}")
+
+
+@router.post("/vacuum")
+def vacuum_database():
+ """Führt VACUUM aus und liefert Vorher/Nachher-Größe."""
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ try:
+ size_before = os.path.getsize(DB_PATH)
+ # VACUUM outside explicit transaction
+ with engine.connect().execution_options(isolation_level="AUTOCOMMIT") as conn:
+ conn.exec_driver_sql("VACUUM")
+ size_after = os.path.getsize(DB_PATH)
+ saved_kb = round((size_before - size_after) / 1024, 2)
+ return {
+ "success": True,
+ "message": "Datenbank optimiert",
+ "size_before_mb": round(size_before / 1024 / 1024, 3),
+ "size_after_mb": round(size_after / 1024 / 1024, 3),
+ "saved_kb": saved_kb,
+ }
+ except Exception as exc:
+ logger.error("VACUUM failed: %s", exc, exc_info=True)
+ raise HTTPException(status_code=500, detail=f"VACUUM Fehler: {str(exc)}")
+
+
+@router.post("/backup")
+def backup_database():
+ """Erstellt ein Dateisystem-Backup der DB (kopiert die Datei)."""
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ try:
+ backup_dir = "data/backups"
+ os.makedirs(backup_dir, exist_ok=True)
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
+ backup_path = os.path.join(backup_dir, f"filamenthub_backup_{timestamp}.db")
+ shutil.copy2(DB_PATH, backup_path)
+ backup_size = os.path.getsize(backup_path)
+ return {
+ "success": True,
+ "message": "Backup erstellt",
+ "backup_path": os.path.abspath(backup_path),
+ "backup_size_mb": round(backup_size / 1024 / 1024, 3),
+ "timestamp": timestamp,
+ }
+ except Exception as exc:
+ logger.error("Backup failed: %s", exc, exc_info=True)
+ raise HTTPException(status_code=500, detail=f"Backup Fehler: {str(exc)}")
+
+
+@router.get("/backups/list")
+def list_backups():
+ backup_dir = "data/backups"
+ if not os.path.exists(backup_dir):
+ return {"backups": [], "count": 0}
+
+ backups = []
+ try:
+ for file in os.listdir(backup_dir):
+ if file.endswith('.db'):
+ file_path = os.path.join(backup_dir, file)
+ file_stats = os.stat(file_path)
+ backups.append({
+ "filename": file,
+ "path": os.path.abspath(file_path),
+ "size_mb": round(file_stats.st_size / 1024 / 1024, 3),
+ "created": file_stats.st_ctime,
+ })
+ backups.sort(key=lambda x: x["created"], reverse=True)
+ except Exception as exc:
+ logger.error("Failed to list backups: %s", exc, exc_info=True)
+ raise HTTPException(status_code=500, detail="Fehler beim Lesen der Backups")
+
+ return {"backups": backups, "count": len(backups)}
+
+
+@router.post("/migrate")
+def migrate_database():
+ """Führt Alembic-Migrationen (upgrade head) aus."""
+ project_root = Path(__file__).resolve().parents[2]
+ venv_alembic = project_root / ".venv" / "Scripts" / "alembic.exe"
+ cmd = [str(venv_alembic if venv_alembic.exists() else "alembic"), "upgrade", "head"]
+ try:
+ result = subprocess.run(cmd, capture_output=True, text=True, check=True, cwd=project_root)
+ return {"success": True, "message": "Migration erfolgreich", "stdout": result.stdout, "stderr": result.stderr}
+ except FileNotFoundError:
+ raise HTTPException(status_code=500, detail="Alembic CLI nicht gefunden")
+ except subprocess.CalledProcessError as exc:
+ output = (exc.stdout or "") + (exc.stderr or "")
+ if "No changes detected" in output or "Keine Änderungen erkannt" in output:
+ return {"success": True, "message": "Migration übersprungen (keine Änderungen)", "stdout": exc.stdout, "stderr": exc.stderr}
+ logger.error("Migration failed: %s", exc, exc_info=True)
+ raise HTTPException(status_code=500, detail=f"Migration fehlgeschlagen: {exc.stderr or exc.stdout}")
+
+
+@router.delete("/row")
+def delete_row(table: str, id: str):
+ """Löscht eine Zeile anhand der ID aus einer erlaubten Tabelle."""
+ allowed_tables = {"material", "spool", "printer", "job"}
+ if table not in allowed_tables:
+ raise HTTPException(status_code=400, detail="Tabelle nicht erlaubt")
+
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ try:
+ with session_scope() as session:
+ res = session.exec(text(f"DELETE FROM {table} WHERE id = :id"), {"id": id})
+ # attempt to get affected rows; fall back to existence check
+ affected = getattr(res, "rowcount", None)
+ if affected is None:
+ # check if row still exists
+ chk = session.exec(text(f"SELECT COUNT(*) FROM {table} WHERE id = :id"), {"id": id})
+ chk_row = chk.first()
+ affected = 0 if (chk_row and int(chk_row[0]) > 0) else 1
+ session.commit()
+
+ if not affected:
+ raise HTTPException(status_code=404, detail="Kein Eintrag mit dieser ID gefunden")
+
+ return {"success": True, "message": f"Eintrag gelöscht ({table}, id={id})", "affected": affected}
+ except HTTPException:
+ raise
+ except Exception as exc:
+ logger.error("Delete error: %s", exc, exc_info=True)
+ raise HTTPException(status_code=500, detail=f"Delete-Fehler: {exc}")
+
+import os
+import logging
+import shutil
+import subprocess
+from datetime import datetime
+from pathlib import Path
+from typing import List, Dict, Any
+
+from fastapi import APIRouter, HTTPException
+from pydantic import BaseModel
+from sqlalchemy import text
+
+from app.db.session import session_scope
+from app.database import engine
+
+
+router = APIRouter(prefix="/api/database", tags=["Database"])
+logger = logging.getLogger("app")
+
+# Default DB path; tests may override this by setting module variable
+DB_PATH = "data/filamenthub.db"
+
+
+class SQLEditorRequest(BaseModel):
+ sql: str
+
+
+@router.get("/info")
+def get_database_info():
+ """Gibt Basisinformationen über die Datenbank zurück."""
+ exists = os.path.exists(DB_PATH)
+ tables: List[str] = []
+ file_stats = None
+ size_mb = None
+
+ if exists:
+ try:
+ file_stats = os.stat(DB_PATH)
+ size_mb = round(os.path.getsize(DB_PATH) / 1024 / 1024, 3)
+ except Exception as exc:
+ logger.debug("Could not stat DB file: %s", exc, exc_info=True)
+
+ try:
+ with session_scope() as session:
+ res = session.exec(
+ text("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%';")
+ )
+ for row in res.all():
+ tables.append(row[0])
+ except Exception as exc:
+ logger.debug("Failed to list DB tables: %s", exc, exc_info=True)
+
+ return {
+ "exists": exists,
+ "size_mb": size_mb,
+ "tables": tables,
+ "created": getattr(file_stats, "st_ctime", None),
+ "modified": getattr(file_stats, "st_mtime", None),
+ }
+
+
+@router.get("/tables")
+def get_table_info():
+ """Gibt detaillierte Informationen über alle Tabellen zurück."""
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ table_info: List[Dict[str, Any]] = []
+ try:
+ with session_scope() as session:
+ res = session.exec(text("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%';"))
+ names = [r[0] for r in res.all()]
+ for table in names:
+ # row count
+ cnt_res = session.exec(text(f"SELECT COUNT(*) FROM {table}"))
+ cnt_row = cnt_res.first()
+ row_count = int(cnt_row[0]) if cnt_row else 0
+
+ # columns
+ cols_res = session.exec(text(f"PRAGMA table_info({table})"))
+ columns_raw = cols_res.all()
+ columns = [
+ {
+ "name": col[1],
+ "type": col[2],
+ "not_null": bool(col[3]),
+ "primary_key": bool(col[5]),
+ }
+ for col in columns_raw
+ ]
+
+ # preview
+ preview_res = session.exec(text(f"SELECT * FROM {table} LIMIT 5"))
+ preview_rows = [tuple(r) for r in preview_res.all()]
+ preview_headers = [col[1] for col in columns_raw]
+
+ table_info.append(
+ {
+ "name": table,
+ "row_count": row_count,
+ "column_count": len(columns),
+ "columns": columns,
+ "preview": {"headers": preview_headers, "rows": preview_rows},
+ }
+ )
+ except Exception as exc:
+ logger.error("Failed to gather table info: %s", exc, exc_info=True)
+ raise HTTPException(status_code=500, detail="Fehler beim Lesen der Tabellen")
+
+ return {"tables": table_info}
+
+
+@router.get("/stats")
+def get_database_stats():
+ """Gibt einfache Zählstatistiken zurück."""
+ if not os.path.exists(DB_PATH):
+ return {
+ "materials_count": 0,
+ "spools_count": 0,
+ "printers_count": 0,
+ "jobs_count": 0,
+ "spools_open": 0,
+ "spools_empty": 0,
+ }
+
+ stats: Dict[str, int] = {}
+ with session_scope() as session:
+ # Materials
+ try:
+ res = session.exec(text("SELECT COUNT(*) FROM material"))
+ r = res.first()
+ stats["materials_count"] = int(r[0]) if r else 0
+ except Exception as exc:
+ logger.debug("Failed to read material count: %s", exc, exc_info=True)
+ stats["materials_count"] = 0
+
+ # Spools
+ try:
+ res = session.exec(text("SELECT COUNT(*) FROM spool"))
+ r = res.first()
+ stats["spools_count"] = int(r[0]) if r else 0
+
+ res = session.exec(text("SELECT COUNT(*) FROM spool WHERE is_open = 1"))
+ r = res.first()
+ stats["spools_open"] = int(r[0]) if r else 0
+
+ res = session.exec(text("SELECT COUNT(*) FROM spool WHERE is_empty = 1"))
+ r = res.first()
+ stats["spools_empty"] = int(r[0]) if r else 0
+ except Exception as exc:
+ logger.debug("Failed to read spool stats: %s", exc, exc_info=True)
+ stats["spools_count"] = 0
+ stats["spools_open"] = 0
+ stats["spools_empty"] = 0
+
+ # Printers
+ try:
+ res = session.exec(text("SELECT COUNT(*) FROM printer"))
+ r = res.first()
+ stats["printers_count"] = int(r[0]) if r else 0
+ except Exception as exc:
+ logger.debug("Failed to read printers count: %s", exc, exc_info=True)
+ stats["printers_count"] = 0
+
+ # Jobs
+ try:
+ res = session.exec(text("SELECT COUNT(*) FROM job"))
+ r = res.first()
+ stats["jobs_count"] = int(r[0]) if r else 0
+ except Exception as exc:
+ logger.debug("Failed to read jobs count: %s", exc, exc_info=True)
+ stats["jobs_count"] = 0
+
+ return stats
+
+
+@router.get("/query")
+def execute_query(sql: str):
+ """Führt eine SELECT-Query aus (nur Entwicklung)."""
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ if not sql.strip().upper().startswith("SELECT"):
+ raise HTTPException(status_code=403, detail="Nur SELECT Queries erlaubt")
+
+ try:
+ with session_scope() as session:
+ res = session.exec(text(sql))
+ rows = res.mappings().all()
+ result = [dict(r) for r in rows]
+ return {"success": True, "row_count": len(result), "data": result}
+ except Exception as exc:
+ logger.error("Query execution failed: %s", exc, exc_info=True)
+ raise HTTPException(status_code=400, detail=f"Query Fehler: {str(exc)}")
+
+
+@router.post("/editor")
+def execute_editor_query(payload: SQLEditorRequest):
+ """Führt nicht-SELECT SQL-Befehle aus (INSERT/UPDATE/DELETE/CREATE/ALTER/DROP)."""
+ sql = payload.sql.strip()
+ if not sql:
+ raise HTTPException(status_code=400, detail="Kein SQL-Befehl übergeben")
+
+ allowed = ("INSERT", "UPDATE", "DELETE", "CREATE", "ALTER", "DROP")
+ if not any(sql.upper().startswith(cmd) for cmd in allowed):
+ raise HTTPException(status_code=403, detail="Nur INSERT, UPDATE, DELETE, CREATE, ALTER, DROP erlaubt")
+
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ try:
+ with session_scope() as session:
+ session.exec(text(sql))
+ session.commit()
+ return {"success": True, "message": "Befehl erfolgreich ausgeführt"}
+ except Exception as exc:
+ logger.error("Editor query failed: %s", exc, exc_info=True)
+ raise HTTPException(status_code=400, detail=f"Query Fehler: {str(exc)}")
+
+
+@router.post("/vacuum")
+def vacuum_database():
+ """Führt VACUUM aus und liefert Vorher/Nachher-Größe."""
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ try:
+ size_before = os.path.getsize(DB_PATH)
+ # VACUUM outside explicit transaction
+ with engine.connect().execution_options(isolation_level="AUTOCOMMIT") as conn:
+ conn.exec_driver_sql("VACUUM")
+ size_after = os.path.getsize(DB_PATH)
+ saved_kb = round((size_before - size_after) / 1024, 2)
+ return {
+ "success": True,
+ "message": "Datenbank optimiert",
+ "size_before_mb": round(size_before / 1024 / 1024, 3),
+ "size_after_mb": round(size_after / 1024 / 1024, 3),
+ "saved_kb": saved_kb,
+ }
+ except Exception as exc:
+ logger.error("VACUUM failed: %s", exc, exc_info=True)
+ raise HTTPException(status_code=500, detail=f"VACUUM Fehler: {str(exc)}")
+
+
+@router.post("/backup")
+def backup_database():
+ """Erstellt ein Dateisystem-Backup der DB (kopiert die Datei)."""
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ try:
+ backup_dir = "data/backups"
+ os.makedirs(backup_dir, exist_ok=True)
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
+ backup_path = os.path.join(backup_dir, f"filamenthub_backup_{timestamp}.db")
+ shutil.copy2(DB_PATH, backup_path)
+ backup_size = os.path.getsize(backup_path)
+ return {
+ "success": True,
+ "message": "Backup erstellt",
+ "backup_path": os.path.abspath(backup_path),
+ "backup_size_mb": round(backup_size / 1024 / 1024, 3),
+ "timestamp": timestamp,
+ }
+ except Exception as exc:
+ logger.error("Backup failed: %s", exc, exc_info=True)
+ raise HTTPException(status_code=500, detail=f"Backup Fehler: {str(exc)}")
+
+
+@router.get("/backups/list")
+def list_backups():
+ backup_dir = "data/backups"
+ if not os.path.exists(backup_dir):
+ return {"backups": [], "count": 0}
+
+ backups = []
+ try:
+ for file in os.listdir(backup_dir):
+ if file.endswith('.db'):
+ file_path = os.path.join(backup_dir, file)
+ file_stats = os.stat(file_path)
+ backups.append({
+ "filename": file,
+ "path": os.path.abspath(file_path),
+ "size_mb": round(file_stats.st_size / 1024 / 1024, 3),
+ "created": file_stats.st_ctime,
+ })
+ backups.sort(key=lambda x: x["created"], reverse=True)
+ except Exception as exc:
+ logger.error("Failed to list backups: %s", exc, exc_info=True)
+ raise HTTPException(status_code=500, detail="Fehler beim Lesen der Backups")
+
+ return {"backups": backups, "count": len(backups)}
+
+
+@router.post("/migrate")
+def migrate_database():
+ """Führt Alembic-Migrationen (upgrade head) aus."""
+ project_root = Path(__file__).resolve().parents[2]
+ venv_alembic = project_root / ".venv" / "Scripts" / "alembic.exe"
+ cmd = [str(venv_alembic if venv_alembic.exists() else "alembic"), "upgrade", "head"]
+ try:
+ result = subprocess.run(cmd, capture_output=True, text=True, check=True, cwd=project_root)
+ return {"success": True, "message": "Migration erfolgreich", "stdout": result.stdout, "stderr": result.stderr}
+ except FileNotFoundError:
+ raise HTTPException(status_code=500, detail="Alembic CLI nicht gefunden")
+ except subprocess.CalledProcessError as exc:
+ output = (exc.stdout or "") + (exc.stderr or "")
+ if "No changes detected" in output or "Keine Änderungen erkannt" in output:
+ return {"success": True, "message": "Migration übersprungen (keine Änderungen)", "stdout": exc.stdout, "stderr": exc.stderr}
+ logger.error("Migration failed: %s", exc, exc_info=True)
+ raise HTTPException(status_code=500, detail=f"Migration fehlgeschlagen: {exc.stderr or exc.stdout}")
+
+
+@router.delete("/row")
+def delete_row(table: str, id: str):
+ """Löscht eine Zeile anhand der ID aus einer erlaubten Tabelle."""
+ allowed_tables = {"material", "spool", "printer", "job"}
+ if table not in allowed_tables:
+ raise HTTPException(status_code=400, detail="Tabelle nicht erlaubt")
+
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ try:
+ with session_scope() as session:
+ res = session.exec(text(f"DELETE FROM {table} WHERE id = :id"), {"id": id})
+ # attempt to get affected rows; fall back to existence check
+ affected = getattr(res, "rowcount", None)
+ if affected is None:
+ # check if row still exists
+ chk = session.exec(text(f"SELECT COUNT(*) FROM {table} WHERE id = :id"), {"id": id})
+ chk_row = chk.first()
+ affected = 0 if (chk_row and int(chk_row[0]) > 0) else 1
+ session.commit()
+
+ if not affected:
+ raise HTTPException(status_code=404, detail="Kein Eintrag mit dieser ID gefunden")
+
+ return {"success": True, "message": f"Eintrag gelöscht ({table}, id={id})", "affected": affected}
+ except HTTPException:
+ raise
+ except Exception as exc:
+ logger.error("Delete error: %s", exc, exc_info=True)
+ raise HTTPException(status_code=500, detail=f"Delete-Fehler: {exc}")
+@router.get("/tables")
+def get_table_info():
+ """Gibt detaillierte Informationen über alle Tabellen zurück"""
+
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ with session_scope() as session:
+ table_info = []
+ for table in tables:
+ # Row Count
+ cnt_res = session.exec(text(f"SELECT COUNT(*) as c FROM {table}"))
+ cnt_row = cnt_res.first()
+ row_count = int(cnt_row[0]) if cnt_row else 0
+
+ # Columns
+ cols_res = session.exec(text(f"PRAGMA table_info({table})"))
+ columns = cols_res.all()
+
+ # Preview Rows
+ preview_res = session.exec(text(f"SELECT * FROM {table} LIMIT 5"))
+ preview_rows = [tuple(r) for r in preview_res.all()]
+
+ preview_headers = [col[1] for col in columns]
+ table_info.append({
+ "name": table,
+ "row_count": row_count,
+ "column_count": len(columns),
+ "columns": [
+ {
+ "name": col[1],
+ "type": col[2],
+ "not_null": bool(col[3]),
+ "primary_key": bool(col[5])
+ }
+ for col in columns
+ ],
+ "preview": {
+ "headers": preview_headers,
+ "rows": preview_rows
+ }
+ })
+
+ return {"tables": table_info}
+
+
+@router.get("/stats")
+def get_database_stats():
+ """Gibt Statistiken über die Datenbank zurück"""
+
+ if not os.path.exists(DB_PATH):
+ return {
+ "materials_count": 0,
+ "spools_count": 0,
+ "printers_count": 0,
+ "jobs_count": 0,
+ "spools_open": 0,
+ "spools_empty": 0
+ }
+
+ with session_scope() as session:
+ stats = {}
+
+ # Materials
+ try:
+ res = session.exec(text("SELECT COUNT(*) FROM material"))
+ r = res.first()
+ stats["materials_count"] = int(r[0]) if r else 0
+ except Exception as exc:
+ logger.debug("Failed to read material count: %s", exc, exc_info=True)
+ stats["materials_count"] = 0
+
+ # Spools
+ try:
+ res = session.exec(text("SELECT COUNT(*) FROM spool"))
+ r = res.first()
+ stats["spools_count"] = int(r[0]) if r else 0
+
+ res = session.exec(text("SELECT COUNT(*) FROM spool WHERE is_open = 1"))
+ r = res.first()
+ stats["spools_open"] = int(r[0]) if r else 0
+
+ res = session.exec(text("SELECT COUNT(*) FROM spool WHERE is_empty = 1"))
+ r = res.first()
+ stats["spools_empty"] = int(r[0]) if r else 0
+ except Exception as exc:
+ logger.debug("Failed to read spool stats: %s", exc, exc_info=True)
+ stats["spools_count"] = 0
+ stats["spools_open"] = 0
+ stats["spools_empty"] = 0
+
+ # Printers
+ try:
+ res = session.exec(text("SELECT COUNT(*) FROM printer"))
+ r = res.first()
+ stats["printers_count"] = int(r[0]) if r else 0
+ except Exception as exc:
+ logger.debug("Failed to read printers count: %s", exc, exc_info=True)
+ stats["printers_count"] = 0
+
+ # Jobs
+ try:
+ res = session.exec(text("SELECT COUNT(*) FROM job"))
+ r = res.first()
+ stats["jobs_count"] = int(r[0]) if r else 0
+ except Exception as exc:
+ logger.debug("Failed to read jobs count: %s", exc, exc_info=True)
+ stats["jobs_count"] = 0
+
+ return stats
+
+
+@router.get("/query")
+def execute_query(sql: str):
+ """Führt eine SELECT Query aus (nur für Entwicklung!)"""
+
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ # Sicherheit: Nur SELECT erlauben
+ if not sql.strip().upper().startswith("SELECT"):
+ raise HTTPException(status_code=403, detail="Nur SELECT Queries erlaubt")
+
+ try:
+ with session_scope() as session:
+ res = session.exec(text(sql))
+ rows = res.mappings().all()
+ result = [dict(r) for r in rows]
+ return {
+ "success": True,
+ "row_count": len(result),
+ "data": result
+ }
+ except Exception as e:
+ raise HTTPException(status_code=400, detail=f"Query Fehler: {str(e)}")
+
+
+@router.post("/vacuum")
+def vacuum_database():
+ """Führt VACUUM aus (komprimiert und optimiert die Datenbank)"""
+
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ try:
+ size_before = os.path.getsize(DB_PATH)
+
+ conn = sqlite3.connect(DB_PATH)
+ conn.execute("VACUUM")
+ conn.close()
+
+ size_after = os.path.getsize(DB_PATH)
+ saved_kb = round((size_before - size_after) / 1024, 2)
+
+ return {
+ "success": True,
+ "message": "Datenbank optimiert",
+ "size_before_mb": round(size_before / 1024 / 1024, 3),
+ "size_after_mb": round(size_after / 1024 / 1024, 3),
+ "saved_kb": saved_kb
+ }
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=f"VACUUM Fehler: {str(e)}")
+
+
+@router.post("/backup")
+def backup_database():
+ """Erstellt ein Backup der Datenbank"""
+
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+
+ try:
+ from datetime import datetime
+ import shutil
+
+ # Backup Ordner
+ backup_dir = "data/backups"
+ os.makedirs(backup_dir, exist_ok=True)
+
+ # Backup Dateiname mit Timestamp
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
+ backup_path = os.path.join(backup_dir, f"filamenthub_backup_{timestamp}.db")
+
+ # Copy
+ shutil.copy2(DB_PATH, backup_path)
+
+ backup_size = os.path.getsize(backup_path)
+
+ return {
+ "success": True,
+ "message": "Backup erstellt",
+ "backup_path": os.path.abspath(backup_path),
+ "backup_size_mb": round(backup_size / 1024 / 1024, 3),
+ "timestamp": timestamp
+ }
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=f"Backup Fehler: {str(e)}")
+
+
+@router.get("/backups/list")
+def list_backups():
+ """Listet alle Backups auf"""
+
+ backup_dir = "data/backups"
+
+ if not os.path.exists(backup_dir):
+ return {"backups": [], "count": 0}
+
+ backups = []
+
+ for file in os.listdir(backup_dir):
+ if file.endswith('.db'):
+ file_path = os.path.join(backup_dir, file)
+ file_stats = os.stat(file_path)
+
+ backups.append({
+ "filename": file,
+ "path": os.path.abspath(file_path),
+ "size_mb": round(file_stats.st_size / 1024 / 1024, 3),
+ "created": file_stats.st_ctime
+ })
+
+ # Sortiere nach Datum (neueste zuerst)
+ backups.sort(key=lambda x: x["created"], reverse=True)
+
+ return {"backups": backups, "count": len(backups)}
+
+
+@router.post("/migrate")
+def migrate_database():
+ """Führt Alembic-Migrationen (upgrade head) aus."""
+ project_root = Path(__file__).resolve().parents[2]
+ venv_alembic = project_root / ".venv" / "Scripts" / "alembic.exe"
+ cmd = [str(venv_alembic if venv_alembic.exists() else "alembic"), "upgrade", "head"]
+ try:
+ # Prüfe aktuelle Revision
+ current_cmd = [str(venv_alembic if venv_alembic.exists() else "alembic"), "current"]
+ current_result = subprocess.run(
+ current_cmd,
+ capture_output=True,
+ text=True,
+ cwd=project_root,
+ )
+ current_output = (current_result.stdout or "") + (current_result.stderr or "")
+ # Hole HEAD-Revision
+ head_cmd = [str(venv_alembic if venv_alembic.exists() else "alembic"), "heads"]
+ head_result = subprocess.run(
+ head_cmd,
+ capture_output=True,
+ text=True,
+ cwd=project_root,
+ )
+ head_output = (head_result.stdout or "") + (head_result.stderr or "")
+ # Extrahiere Revisionen
+ import re
+ current_rev = re.search(r"([0-9a-f]+) \(head\)", current_output)
+ head_rev = re.search(r"([0-9a-f]+) \(head\)", head_output)
+ if current_rev and head_rev and current_rev.group(1) == head_rev.group(1):
+ return {
+ "success": True,
+ "message": "Migration übersprungen (bereits aktuell)",
+ "stdout": current_output,
+ "stderr": ""
+ }
+ # Führe Migration aus
+ result = subprocess.run(
+ cmd,
+ capture_output=True,
+ text=True,
+ check=True,
+ cwd=project_root,
+ )
+ # Prüfe auf spezielle Info im Alembic-Output
+ output = (result.stdout or "") + (result.stderr or "")
+ if "ist bereits vorhanden" in output or "already exists" in output:
+ return {
+ "success": True,
+ "message": "Datenbankspalte bereits vorhanden",
+ "stdout": result.stdout,
+ "stderr": result.stderr
+ }
+ if "No changes detected" in output or "Keine Änderungen erkannt" in output:
+ return {
+ "success": True,
+ "message": "Migration übersprungen (keine Änderungen)",
+ "stdout": result.stdout,
+ "stderr": result.stderr
+ }
+ return {
+ "success": True,
+ "message": "Migration erfolgreich",
+ "stdout": result.stdout,
+ "stderr": result.stderr
+ }
+ except FileNotFoundError:
+ raise HTTPException(status_code=500, detail="Alembic CLI nicht gefunden (venv/.venv/Scripts/alembic.exe oder alembic im PATH nicht vorhanden)")
+ except subprocess.CalledProcessError as exc:
+ output = (exc.stdout or "") + (exc.stderr or "")
+ if "ist bereits vorhanden" in output or "already exists" in output:
+ return {
+ "success": True,
+ "message": "Datenbankspalte bereits vorhanden",
+ "stdout": exc.stdout,
+ "stderr": exc.stderr
+ }
+ if "No changes detected" in output or "Keine Änderungen erkannt" in output:
+ return {
+ "success": True,
+ "message": "Migration übersprungen (keine Änderungen)",
+ "stdout": exc.stdout,
+ "stderr": exc.stderr
+ }
+ raise HTTPException(status_code=500, detail=f"Migration fehlgeschlagen: {exc.stderr or exc.stdout}")
+
+
+@router.delete("/row")
+def delete_row(table: str, id: str):
+ """Löscht eine Zeile anhand der ID aus einer erlaubten Tabelle."""
+ allowed_tables = {"material", "spool", "printer", "job"}
+ if table not in allowed_tables:
+ raise HTTPException(status_code=400, detail="Tabelle nicht erlaubt")
+ if not os.path.exists(DB_PATH):
+ raise HTTPException(status_code=404, detail="Datenbank nicht gefunden")
+ try:
+ conn = sqlite3.connect(DB_PATH)
+ cur = conn.cursor()
+ cur.execute(f"DELETE FROM {table} WHERE id = ?", (id,))
+ affected = cur.rowcount
+ conn.commit()
+ conn.close()
+ if affected == 0:
+ raise HTTPException(status_code=404, detail="Kein Eintrag mit dieser ID gefunden")
+ return {"success": True, "message": f"Eintrag gelöscht ({table}, id={id})", "affected": affected}
+ except HTTPException:
+ raise
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=f"Delete-Fehler: {e}")
diff --git a/app/routes/debug_ams_routes.py b/app/routes/debug_ams_routes.py
new file mode 100644
index 0000000..69267d8
--- /dev/null
+++ b/app/routes/debug_ams_routes.py
@@ -0,0 +1,66 @@
+from fastapi import APIRouter
+from fastapi.responses import HTMLResponse, JSONResponse
+from fastapi.templating import Jinja2Templates
+from fastapi import Request
+
+from app.services.ams_parser import parse_ams
+from app.services.universal_mapper import UniversalMapper
+
+router = APIRouter()
+templates = Jinja2Templates(directory="frontend/templates")
+
+
+def _stub_raw_payload():
+ # Synthetischer Payload für Debug; kann später durch echten Printer-Feed ersetzt werden
+ return {
+ "ams": {
+ "modules": [
+ {
+ "ams_id": 0,
+ "active_tray": 1,
+ "tray_count": 4,
+ "trays": [
+ {"tray_id": 0, "tray_uuid": "UUID-A0-S0", "material": "PLA"},
+ {"tray_id": 1, "tray_uuid": "UUID-A0-S1", "material": "PETG"},
+ {"tray_id": 2, "tray_uuid": None, "material": None},
+ {"tray_id": 3, "tray_uuid": "UUID-A0-S3", "material": "ABS"},
+ ],
+ },
+ {
+ "ams_id": 1,
+ "active_tray": 2,
+ "tray_count": 4,
+ "trays": [
+ {"tray_id": 0, "tray_uuid": "UUID-A1-S0", "material": "PA"},
+ {"tray_id": 1, "tray_uuid": "UUID-A1-S1", "material": None},
+ {"tray_id": 2, "tray_uuid": "UUID-A1-S2", "material": "TPU"},
+ {"tray_id": 3, "tray_uuid": None, "material": None},
+ ],
+ },
+ ]
+ }
+ }
+
+
+@router.get("/debug/ams", response_class=HTMLResponse)
+async def debug_ams_page(request: Request):
+ return templates.TemplateResponse(
+ "debug_ams.html",
+ {"request": request, "title": "AMS Debug View", "active_page": "debug"},
+ )
+
+
+@router.get("/api/debug/ams")
+async def debug_ams_api():
+ raw = _stub_raw_payload()
+ parsed = parse_ams(raw)
+ mapper = UniversalMapper()
+ mapped_out = mapper.map(raw)
+ mapped_units = getattr(mapped_out, "ams_units", None)
+ return JSONResponse(
+ {
+ "raw": raw,
+ "parsed": parsed,
+ "mapped": mapped_units,
+ }
+ )
diff --git a/app/routes/debug_log_routes.py b/app/routes/debug_log_routes.py
new file mode 100644
index 0000000..565078c
--- /dev/null
+++ b/app/routes/debug_log_routes.py
@@ -0,0 +1,51 @@
+from fastapi import APIRouter, HTTPException, Query, Request
+from app.services import log_reader
+
+
+router = APIRouter(tags=["Debug Logs"])
+
+DEFAULT_LIMIT = 200
+MAX_LIMIT = log_reader.MAX_LIMIT
+
+
+def _is_admin(request: Request | None) -> bool:
+ """
+ Leichte Admin-Prüfung: nutzt das bestehende admin_token-Cookie,
+ um Admin-Logs nur für authentifizierte Nutzer freizugeben.
+ """
+ if request is None:
+ return False
+ try:
+ from app.routes.admin_routes import admin_tokens # type: ignore
+ except Exception:
+ return False
+ token = request.cookies.get("admin_token")
+ return bool(token and token in admin_tokens)
+
+
+@router.get("/logs")
+async def debug_logs(
+ request: Request,
+ module: str = Query("app"),
+ limit: int = Query(DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
+ offset: int = Query(0, ge=0),
+ level: str | None = Query(None, description="off/basic/verbose filter, optional"),
+ search: str | None = Query(None, description="Freitext-Suche, optional"),
+):
+ try:
+ allow_admin = _is_admin(request)
+ result = log_reader.read_logs(
+ module=module,
+ limit=limit,
+ offset=offset,
+ level=level,
+ search=search,
+ allow_admin=allow_admin,
+ )
+ return result
+ except log_reader.LogAccessError as exc:
+ raise HTTPException(status_code=403, detail=str(exc)) from exc
+ except ValueError as exc:
+ raise HTTPException(status_code=400, detail=str(exc)) from exc
+ except Exception as exc:
+ raise HTTPException(status_code=500, detail=str(exc)) from exc
diff --git a/app/routes/debug_network_routes.py b/app/routes/debug_network_routes.py
new file mode 100644
index 0000000..49ba098
--- /dev/null
+++ b/app/routes/debug_network_routes.py
@@ -0,0 +1,73 @@
+import ipaddress
+import socket
+from typing import Optional
+
+import psutil
+from fastapi import APIRouter
+
+router = APIRouter(
+ prefix="/api/debug",
+ tags=["Debug"]
+)
+
+
+def _private_ipv4_from_psutil() -> Optional[str]:
+ try:
+ for addrs in psutil.net_if_addrs().values():
+ for addr in addrs:
+ if addr.family == socket.AF_INET:
+ ip = addr.address
+ if _is_private_ipv4(ip):
+ return ip
+ except Exception:
+ return None
+ return None
+
+
+def _is_private_ipv4(ip: str) -> bool:
+ try:
+ ip_obj = ipaddress.ip_address(ip)
+ return ip_obj.version == 4 and ip_obj.is_private
+ except ValueError:
+ return False
+
+
+def _suggest_range(ip: str) -> Optional[str]:
+ if not _is_private_ipv4(ip):
+ return None
+ try:
+ parts = ip.split('.')
+ parts[-1] = '0'
+ return '.'.join(parts) + '/24'
+ except Exception:
+ return None
+
+
+def _detect_local_ip() -> Optional[str]:
+ try:
+ with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s:
+ s.connect(("8.8.8.8", 80))
+ ip = s.getsockname()[0]
+ if _is_private_ipv4(ip):
+ return ip
+ except Exception:
+ pass
+ return _private_ipv4_from_psutil()
+
+
+@router.get("/network")
+async def get_network_info():
+ hostname = None
+ try:
+ hostname = socket.gethostname()
+ except Exception:
+ hostname = None
+
+ local_ip = _detect_local_ip()
+ suggested = _suggest_range(local_ip) if local_ip else None
+ return {
+ "ok": True,
+ "hostname": hostname,
+ "local_ip": local_ip,
+ "suggested_range": suggested,
+ }
diff --git a/app/routes/debug_performance_routes.py b/app/routes/debug_performance_routes.py
new file mode 100644
index 0000000..52f05bd
--- /dev/null
+++ b/app/routes/debug_performance_routes.py
@@ -0,0 +1,61 @@
+import os
+import time
+from fastapi import APIRouter
+
+try:
+ import psutil # type: ignore
+except ImportError: # pragma: no cover - optional dependency
+ psutil = None
+
+router = APIRouter(prefix="/api/debug", tags=["Debug Performance"])
+
+# capture app start for uptime
+app_start_ts = time.time()
+
+
+def get_disk_path():
+ cwd = os.getcwd()
+ drive, _ = os.path.splitdrive(cwd)
+ if drive:
+ return drive + os.sep
+ return "/"
+
+
+@router.get("/performance")
+async def debug_performance():
+ data = {
+ "ok": True,
+ "backend_uptime_s": int(time.time() - app_start_ts),
+ "cpu_percent": None,
+ "ram_used_mb": None,
+ "ram_total_mb": None,
+ "disk_used_gb": None,
+ "disk_total_gb": None,
+ "note": None,
+ }
+
+ if psutil is None:
+ data["note"] = "psutil not installed"
+ return data
+
+ try:
+ data["cpu_percent"] = round(psutil.cpu_percent(interval=None), 1)
+ except Exception:
+ data["note"] = "cpu read failed"
+
+ try:
+ vm = psutil.virtual_memory()
+ data["ram_used_mb"] = int(vm.used / 1024 / 1024)
+ data["ram_total_mb"] = int(vm.total / 1024 / 1024)
+ except Exception:
+ data["note"] = (data["note"] or "ram read failed")
+
+ try:
+ disk_path = get_disk_path()
+ disk = psutil.disk_usage(disk_path)
+ data["disk_used_gb"] = round(disk.used / 1024 / 1024 / 1024, 2)
+ data["disk_total_gb"] = round(disk.total / 1024 / 1024 / 1024, 2)
+ except Exception:
+ data["note"] = (data["note"] or "disk read failed")
+
+ return data
diff --git a/app/routes/debug_routes.py b/app/routes/debug_routes.py
new file mode 100644
index 0000000..7ba431b
--- /dev/null
+++ b/app/routes/debug_routes.py
@@ -0,0 +1,496 @@
+from fastapi import APIRouter, HTTPException, Depends
+import logging
+from pydantic import BaseModel
+from sqlalchemy import text
+from app.db.session import session_scope
+import yaml
+import os
+import logging
+import inspect
+from sqlmodel import Session
+from app.database import get_session
+from app.models.settings import Setting
+
+router = APIRouter(prefix="/api/debug", tags=["Debug & Config"])
+
+
+DEPRECATED_LOGGING_RESPONSE = {"deprecated": True, "use": "/api/config"}
+
+# -----------------------------
+# MODELS
+# -----------------------------
+class LogModuleToggle(BaseModel):
+ module: str
+ enabled: bool
+
+
+class LogLevelUpdate(BaseModel):
+ level: str
+
+
+class LogRotationUpdate(BaseModel):
+ max_size_mb: int
+ backup_count: int
+
+
+# -----------------------------
+# CONFIG HELPERS
+# -----------------------------
+CONFIG_PATH = "config.yaml"
+
+
+def load_config() -> dict:
+ """Lädt die config.yaml"""
+ if not os.path.exists(CONFIG_PATH):
+ raise HTTPException(status_code=404, detail="config.yaml nicht gefunden")
+ with open(CONFIG_PATH, "r", encoding="utf-8") as f:
+ return yaml.safe_load(f)
+
+
+def save_config(config: dict) -> None:
+ """Speichert die config.yaml"""
+ try:
+ logger = logging.getLogger('app')
+ caller = None
+ try:
+ fr = inspect.stack()[1]
+ caller = f"{fr.filename}:{fr.lineno} in {fr.function}"
+ except Exception:
+ caller = "unknown"
+ logger.info(f"Writing config.yaml (debug_routes.save_config) called from {caller}")
+ except Exception:
+ pass
+ with open(CONFIG_PATH, "w", encoding="utf-8") as f:
+ yaml.dump(config, f, default_flow_style=False, allow_unicode=True)
+
+
+# -----------------------------
+# ROUTES
+# -----------------------------
+@router.get("/db/tables")
+def get_db_tables():
+ """Tabellenübersicht für Admin-Panel (SQLite)."""
+ db_path = "data/filamenthub.db"
+ tables = []
+ with session_scope() as session:
+ res = session.exec(text("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"))
+ for row in res.all():
+ table_name = row[0]
+ cols_res = session.exec(text(f"PRAGMA table_info({table_name})"))
+ columns_raw = cols_res.all()
+ columns = [{"name": col[1], "type": col[2], "primary_key": bool(col[5])} for col in columns_raw]
+ cnt_res = session.exec(text(f"SELECT COUNT(*) FROM {table_name}"))
+ cnt_row = cnt_res.first()
+ count = int(cnt_row[0]) if cnt_row else 0
+ tables.append({"name": table_name, "columns": columns, "count": count})
+ return {"tables": tables}
+
+
+@router.get("/config/logging")
+def get_logging_config():
+ """Gibt die Logging-Konfiguration zur?ck."""
+ return DEPRECATED_LOGGING_RESPONSE
+
+
+@router.get("/config")
+
+
+@router.get("/config")
+def get_full_config():
+ """Gibt die gesamte config.yaml zurück (alias zu /config/raw)."""
+ return load_config()
+
+
+@router.post("/config/logging/toggle")
+def toggle_logging_module(data: LogModuleToggle):
+ """Schaltet ein Logging-Modul an/aus."""
+ return DEPRECATED_LOGGING_RESPONSE
+
+
+@router.post("/config/logging/level")
+
+
+@router.post("/config/logging/level")
+def update_log_level(data: LogLevelUpdate):
+ """Setzt das globale Log-Level (DEBUG/INFO/WARNING/ERROR/CRITICAL)."""
+ return DEPRECATED_LOGGING_RESPONSE
+
+
+@router.post("/config/logging/rotation")
+
+
+@router.post("/config/logging/rotation")
+def update_log_rotation(data: LogRotationUpdate):
+ """Aktualisiert Logrotation (max_size_mb, backup_count) und passt MQTT-Logger an."""
+ return DEPRECATED_LOGGING_RESPONSE
+
+
+@router.get("/modules/status")
+
+
+@router.get("/modules/status")
+def get_modules_status():
+ """Status aller Logging-Module zurückgeben."""
+ config = load_config()
+ modules = config.get("logging", {}).get("modules", {})
+ result = {}
+ for name, cfg in modules.items():
+ result[name] = {"enabled": cfg.get("enabled", False), "has_logs": os.path.exists(f"logs/{name}")}
+
+ return {"global_level": config.get("logging", {}).get("level", "INFO"), "modules": result}
+
+
+@router.get("/environment")
+def get_environment_info():
+ """Infos zur Python-Umgebung."""
+ import sys
+ import platform
+
+ return {
+ "python_version": sys.version,
+ "python_executable": sys.executable,
+ "platform": platform.platform(),
+ "architecture": platform.architecture()[0],
+ "machine": platform.machine(),
+ "processor": platform.processor(),
+ }
+
+
+@router.get("/paths")
+def get_project_paths():
+ """Wichtige Projektpfade."""
+ config = load_config()
+ return {
+ "project_root": os.getcwd(),
+ "config_file": os.path.abspath(CONFIG_PATH),
+ "logs_root": os.path.abspath(config.get("paths", {}).get("logs", "./logs")),
+ "database": os.path.abspath("data/filamenthub.db"),
+ "templates": os.path.abspath("frontend/templates"),
+ "static": os.path.abspath("app/static"),
+ }
+
+
+@router.get("/config/raw")
+def get_raw_config():
+ """Gibt die komplette config.yaml zurück."""
+ return load_config()
+
+
+@router.post("/config/raw")
+def save_raw_config(data: dict):
+ """Speichert die komplette config.yaml (Rohinhalt)."""
+ if not isinstance(data, dict):
+ raise HTTPException(status_code=400, detail="Config muss ein Objekt sein")
+ save_config(data)
+ return {"success": True}
+
+
+@router.post("/restart-required")
+def check_restart_required():
+ """Dummy-Endpunkt: meldet Neustart empfohlen (falls Config geändert)."""
+ return {
+ "restart_required": True,
+ "reason": "Config-Änderungen wurden vorgenommen",
+ "recommendation": "Server neu starten für Änderungen",
+ }
+
+
+@router.get("/logs")
+def get_logs(module: str = "app", limit: int = 100):
+ """
+ Gibt Log-Einträge zurück.
+ Query-Parameter: module (app|mqtt|bambu|klipper|errors), limit (default: 100)
+ """
+ import glob
+ import re
+ from datetime import datetime
+
+ # Map module names to folder names
+ module_map = {
+ "app": "app",
+ "mqtt": "mqtt",
+ "3d_drucker": "3d_drucker",
+ "3d-drucker": "3d_drucker",
+ "3d_printer": "3d_drucker",
+ "printer": "3d_drucker",
+ "bambu": "3d_drucker",
+ "klipper": "klipper",
+ "errors": "errors"
+ }
+
+ module = module_map.get(module.lower(), module)
+ config = load_config()
+ logs_root = config.get("paths", {}).get("logs", "./logs")
+
+ # Suche nach Log-Dateien - auch in Unterordnern
+ log_pattern = os.path.join(logs_root, f"{module}*.log")
+ log_files = glob.glob(log_pattern)
+
+ # Falls nicht gefunden, suche in Unterordner mit gleichem Namen
+ if not log_files:
+ log_pattern_sub = os.path.join(logs_root, module, "*.log")
+ log_files = glob.glob(log_pattern_sub)
+
+ if not log_files:
+ return {
+ "logs": [],
+ "count": 0,
+ "module": module,
+ "debug": {
+ "logs_root": logs_root,
+ "pattern_1": log_pattern,
+ "pattern_2": os.path.join(logs_root, module, "*.log"),
+ "cwd": os.getcwd()
+ }
+ }
+
+ # Neueste Log-Datei verwenden
+ log_file = max(log_files, key=os.path.getmtime)
+
+ logs = []
+ total_lines_read = 0
+ try:
+ with open(log_file, "r", encoding="utf-8") as f:
+ lines = f.readlines()
+ total_lines_read = len(lines)
+
+ # Nimm die letzten 'limit' Zeilen
+ lines = lines[-limit:]
+
+ # Parse Log-Zeilen (Format: 2025-11-24 21:47:48,910 [INFO] uvicorn.error – Message)
+ # Unterstützt verschiedene Dash-Zeichen: - – —
+ log_pattern = re.compile(
+ r'^(\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}:\d{2}(?:,\d{3})?)\s+\[(\w+)\]\s+([\w\.]+)\s+[\u2013\u2014\-–—]+\s+(.+)$'
+ )
+
+ for line in lines:
+ line = line.strip()
+ if not line:
+ continue
+
+ match = log_pattern.match(line)
+ if match:
+ timestamp, level, log_module, message = match.groups()
+ logs.append({
+ "timestamp": timestamp,
+ "module": log_module,
+ "level": level,
+ "message": message
+ })
+ else:
+ # Fallback: unformatierte Zeile - versuche trotzdem Timestamp zu extrahieren
+ timestamp_match = re.match(r'^(\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}:\d{2}(?:,\d{3})?)', line)
+ if timestamp_match:
+ timestamp = timestamp_match.group(1)
+ message = line[len(timestamp):].strip()
+ else:
+ timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+ message = line
+
+ logs.append({
+ "timestamp": timestamp,
+ "module": module,
+ "level": "INFO",
+ "message": message
+ })
+
+ except Exception as e:
+ import traceback
+ return {
+ "error": str(e),
+ "traceback": traceback.format_exc(),
+ "logs": [],
+ "count": 0,
+ "module": module,
+ "file": log_file if 'log_file' in locals() else None
+ }
+
+ return {
+ "logs": logs,
+ "count": len(logs),
+ "module": module,
+ "file": os.path.basename(log_file),
+ "debug": {
+ "total_lines_read": total_lines_read,
+ "file_path": log_file,
+ "limit": limit
+ }
+ }
+
+
+def delete_logs(module: str = "app"):
+ """
+ Löscht Log-Dateien für das angegebene Modul.
+ Unterstützt Unterordnerstruktur.
+ """
+ import glob
+ module_map = {
+ "app": "app",
+ "mqtt": "mqtt",
+ "3d_drucker": "3d_drucker",
+ "3d-drucker": "3d_drucker",
+ "3d_printer": "3d_drucker",
+ "printer": "3d_drucker",
+ "klipper": "klipper",
+ "errors": "errors"
+ }
+ module_key = module_map.get(module.lower(), module)
+ cfg = load_config()
+ logs_root = cfg.get("paths", {}).get("logs", "./logs")
+
+ # Muster: Root/app*.log und Root/app/*.log
+ patterns = [
+ os.path.join(logs_root, f"{module_key}*.log"),
+ os.path.join(logs_root, module_key, "*.log"),
+ ]
+
+ deleted = []
+ for pat in patterns:
+ for fp in glob.glob(pat):
+ try:
+ os.remove(fp)
+ deleted.append(fp)
+ except Exception:
+ pass
+ # Server-Log: Löschvorgang protokollieren
+ try:
+ logger = logging.getLogger("app")
+ logger.info(f"Log-Dateien gelöscht: module={module_key}, count={len(deleted)}")
+ except Exception:
+ pass
+ return {"deleted": deleted, "module": module_key}
+
+
+@router.post("/logs/clear")
+def clear_logs_post(payload: dict):
+ """
+ Leert Logdateien für ein Modul (sichere Behandlung von FileHandlers).
+ Erwartet JSON-Body: { "module": "" }
+ Antwort immer 200 mit Standard-Response.
+ """
+ module = (payload or {}).get("module") if isinstance(payload, dict) else None
+ if not module:
+ return {"status": "fail", "message": "Logdatei konnte nicht geleert werden", "details": "missing module"}
+ return _clear_logs_impl(module)
+
+
+@router.delete("/logs")
+def clear_logs_delete(module: str = "app"):
+ """
+ Kompatibler DELETE-Endpunkt: /api/debug/logs?module=app
+ Antwort immer 200 mit Standard-Response.
+ """
+ return _clear_logs_impl(module)
+
+
+def _clear_logs_impl(module: str):
+ """Common implementation for safely clearing log files."""
+ from pathlib import Path
+ import glob
+
+ try:
+ module_map = {
+ "app": "app",
+ "mqtt": "mqtt",
+ "3d_drucker": "3d_drucker",
+ "3d-drucker": "3d_drucker",
+ "3d_printer": "3d_drucker",
+ "printer": "3d_drucker",
+ "klipper": "klipper",
+ "errors": "errors",
+ }
+ module_key = module_map.get(module.lower(), module)
+ cfg = load_config()
+ logs_root = cfg.get("paths", {}).get("logs", "./logs")
+ logs_root_path = Path(logs_root).resolve()
+
+ patterns = [str(logs_root_path / f"{module_key}*.log"), str(logs_root_path / module_key / "*.log")]
+ files = sorted({fp for pat in patterns for fp in glob.glob(pat)})
+
+ if not files:
+ logging.getLogger("app").info(f"Keine Logdatei zum Leeren gefunden: module={module_key}")
+ return {"status": "ok", "message": "Logdatei wurde geleert"}
+
+ cleared = []
+ logger = logging.getLogger("app")
+
+ def _matches_handler(handler, target_path: Path) -> bool:
+ base = getattr(handler, "baseFilename", None)
+ if not base:
+ return False
+ try:
+ return Path(base).resolve() == target_path
+ except Exception:
+ return False
+
+ def _collect_handlers(target_path: Path):
+ matched = set()
+ for handler in logging.root.handlers:
+ if isinstance(handler, logging.FileHandler) and _matches_handler(handler, target_path):
+ matched.add(handler)
+ for logger_obj in logging.root.manager.loggerDict.values():
+ if isinstance(logger_obj, logging.Logger):
+ for handler in getattr(logger_obj, "handlers", []):
+ if isinstance(handler, logging.FileHandler) and _matches_handler(handler, target_path):
+ matched.add(handler)
+ return matched
+
+ def _truncate_handler(handler: logging.FileHandler):
+ handler.acquire()
+ try:
+ stream = getattr(handler, "stream", None)
+ if stream:
+ stream.seek(0)
+ stream.truncate(0)
+ finally:
+ handler.release()
+
+ for fp in files:
+ file_path = Path(fp).resolve()
+ handlers = _collect_handlers(file_path)
+ if handlers:
+ handler_success = False
+ for handler in handlers:
+ try:
+ _truncate_handler(handler)
+ handler_success = True
+ except Exception as exc:
+ logger.exception(f"Fehler beim Leeren des FileHandlers für {file_path}: {exc}")
+ if not handler_success:
+ raise RuntimeError(f"FileHandler konnte nicht geleert werden: {file_path}")
+ else:
+ with open(file_path, "w", encoding="utf-8"):
+ pass
+ cleared.append(str(file_path))
+
+ logger.info(f"Logdateien geleert: module={module_key}, count={len(cleared)}")
+ return {"status": "ok", "message": "Logdatei wurde geleert"}
+
+ except Exception as exc:
+ logging.getLogger("app").exception(f"Log-Clear fehlgeschlagen: module={module}, error={exc}")
+ return {"status": "fail", "message": "Logdatei konnte nicht geleert werden", "details": str(exc)}
+
+
+# -----------------------------
+# PRO MODE CONFIRMATION
+# -----------------------------
+@router.get("/pro-mode/status")
+def get_pro_mode_status(session: Session = Depends(get_session)):
+ """Prüft ob User Pro-Mode bereits bestätigt hat"""
+ setting = session.get(Setting, "debug.pro_mode_accepted")
+ accepted = setting.value.lower() in ("true", "1", "yes") if setting and setting.value else False
+ return {"accepted": accepted}
+
+
+@router.post("/pro-mode/accept")
+def accept_pro_mode(session: Session = Depends(get_session)):
+ """Speichert dass User Pro-Mode bestätigt hat (nur einmal)"""
+ setting = session.get(Setting, "debug.pro_mode_accepted")
+ if setting:
+ setting.value = "true"
+ else:
+ setting = Setting(key="debug.pro_mode_accepted", value="true")
+ session.add(setting)
+ session.commit()
+ logging.getLogger("app").info("Pro-Mode wurde vom User bestätigt")
+ return {"success": True, "accepted": True}
diff --git a/app/routes/debug_system_routes.py b/app/routes/debug_system_routes.py
new file mode 100644
index 0000000..c344078
--- /dev/null
+++ b/app/routes/debug_system_routes.py
@@ -0,0 +1,186 @@
+import time
+from typing import Dict
+
+from fastapi import APIRouter, Request
+from sqlalchemy import text
+from sqlmodel import Session
+
+from app.database import engine
+from app.monitoring.runtime_monitor import get_runtime_metrics
+from app.routes.config_routes import _load_config # type: ignore
+from app.services.environment_info import build_environment_snapshot
+
+try:
+ from app.routes.mqtt_routes import (
+ mqtt_clients,
+ active_connections,
+ last_connect_error,
+ active_ws_clients,
+ last_ws_activity_ts,
+ )
+except Exception: # pragma: no cover - fallback
+ import paho.mqtt.client as mqtt
+ mqtt_clients: Dict[str, mqtt.Client] = {}
+ active_connections = set()
+ last_ws_ping = None # Symbol existiert im Hauptmodul nicht, daher als Dummy belassen
+ last_connect_error = None
+ active_ws_clients = 0
+ last_ws_activity_ts = None
+
+router = APIRouter(prefix="/api/debug", tags=["Debug System"])
+
+
+@router.get("/system_status")
+def system_status(request: Request):
+ api_state = {"state": "online"}
+
+ db_state = {"state": "error"}
+ try:
+ with Session(engine) as session:
+ session.execute(text("SELECT 1"))
+ db_state["state"] = "connected"
+ except Exception:
+ db_state["state"] = "error"
+
+ mqtt_state: Dict[str, object] = {"state": "disabled"}
+ try:
+ if mqtt_clients:
+ connection_id, client = next(iter(mqtt_clients.items()))
+ host, port = None, None
+ try:
+ if ":" in connection_id:
+ host, port = connection_id.split(":", 1)
+ except Exception:
+ host, port = None, None
+ mqtt_state["host"] = host
+ mqtt_state["port"] = int(port) if port and port.isdigit() else port
+
+ try:
+ if hasattr(client, "is_connected") and client.is_connected():
+ mqtt_state["state"] = "connected"
+ else:
+ mqtt_state["state"] = "disconnected"
+ except Exception:
+ mqtt_state["state"] = "error"
+
+ if last_connect_error is not None:
+ mqtt_state["last_error"] = str(last_connect_error)
+ else:
+ mqtt_state["state"] = "disabled"
+ except Exception as exc:
+ mqtt_state = {"state": "error", "last_error": str(exc)}
+
+ websocket_state: Dict[str, object] = {"state": "offline"}
+ try:
+ now = time.time()
+ clients = active_ws_clients if active_ws_clients is not None else 0
+ websocket_state["clients"] = clients
+ if last_ws_activity_ts:
+ websocket_state["last_activity_s"] = round(now - last_ws_activity_ts, 1)
+ if active_connections is None:
+ websocket_state["state"] = "offline"
+ else:
+ if clients > 0:
+ websocket_state["state"] = "connected"
+ else:
+ if last_ws_activity_ts:
+ delta = now - last_ws_activity_ts
+ websocket_state["last_activity_s"] = round(delta, 1)
+ websocket_state["state"] = "idle" if delta < 30 else "listening"
+ else:
+ websocket_state["state"] = "listening"
+ except Exception:
+ websocket_state = {"state": "offline"}
+
+ # WebSocket semantisch: connected > idle > listening; offline nur bei fehlender Erreichbarkeit
+ try:
+ if active_connections is None:
+ websocket_state = {"state": "offline"}
+ else:
+ count = len(active_connections)
+ now = time.time()
+ if count > 0:
+ websocket_state = {"state": "connected", "clients": count}
+ else:
+ # Kein aktiver Client, aber Endpoint existiert
+ ws_state = "listening"
+ # last_ws_ping entfernt, da nicht vorhanden
+ websocket_state["state"] = ws_state
+ except Exception:
+ websocket_state = {"state": "offline"}
+
+ runtime_state = get_runtime_metrics()
+ try:
+ rpm = runtime_state.get("requests_per_minute", 0) if isinstance(runtime_state, dict) else 0
+ rpm_num = float(rpm) if rpm is not None else 0.0
+ runtime_state["requests_per_minute"] = rpm_num
+ runtime_state["state"] = "active" if rpm_num > 0 else "idle"
+ except Exception:
+ runtime_state = {"requests_per_minute": 0, "avg_response_ms": None, "state": "idle"}
+
+ # System health with thresholds from settings/config
+ def _load_health_thresholds():
+ enabled = True
+ warn = 600
+ error = 1200
+ try:
+ with Session(engine) as s:
+ cfg = _load_config(s)
+ sh = cfg.get("debug", {}).get("system_health", {})
+ enabled = bool(sh.get("enabled", True))
+ warn = int(sh.get("warn_latency_ms", warn))
+ error = int(sh.get("error_latency_ms", error))
+ except Exception:
+ pass
+ return enabled, warn, error
+
+ enabled, warn_threshold, error_threshold = _load_health_thresholds()
+ reasons = []
+ health_status = "ok"
+ try:
+ avg_ms = runtime_state.get("avg_response_ms")
+ mqtt_state_value = mqtt_state.get("state")
+ if not enabled:
+ reasons = ["Health monitoring disabled"]
+ else:
+ if isinstance(avg_ms, (int, float)):
+ if avg_ms >= error_threshold:
+ health_status = "critical"
+ reasons.append(f"High average response time ({int(round(avg_ms))} ms >= {error_threshold} ms)")
+ elif avg_ms >= warn_threshold:
+ health_status = "warning"
+ reasons.append(f"High average response time ({int(round(avg_ms))} ms >= {warn_threshold} ms)")
+ else:
+ health_status = "warning"
+ reasons.append("Average response time not available")
+ reasons.append("MQTT service is disabled")
+ if health_status == "ok":
+ health_status = "warning"
+ # Service-MQTT ist immer abgeleitet vom Runtime-MQTT-Status
+ if mqtt_state_value != "connected":
+ reasons.append("Nicht verbunden")
+ if health_status == "ok":
+ health_status = "warning"
+ except Exception:
+ health_status = "warning"
+ if not reasons:
+ reasons = ["Health monitoring unavailable"]
+
+ if health_status == "ok" and not reasons:
+ reasons = ["System is operating normally"]
+ if health_status == "warning" and not reasons:
+ reasons = ["Some services require attention"]
+
+ system_health = {"status": health_status, "reasons": reasons}
+
+ environment_info = build_environment_snapshot(request)
+
+ return {
+ "api": api_state,
+ "db": db_state,
+ "mqtt": mqtt_state,
+ "websocket": websocket_state,
+ "runtime": runtime_state,
+ "system_health": system_health,
+ "environment": environment_info,
+ }
diff --git a/app/routes/jobs.py b/app/routes/jobs.py
new file mode 100644
index 0000000..bf01e44
--- /dev/null
+++ b/app/routes/jobs.py
@@ -0,0 +1,406 @@
+from fastapi import APIRouter, Depends, HTTPException
+from sqlmodel import Session, SQLModel, select
+from typing import List, Optional, Any
+from datetime import datetime
+from app.database import get_session
+from app.models.job import Job, JobCreate, JobRead, JobSpoolUsage
+from app.models.spool import Spool
+from app.models.printer import Printer
+from app.models.settings import Setting
+import app.services.live_state as live_state_module
+from app.services.eta import calculate_eta
+
+router = APIRouter(prefix="/api/jobs", tags=["jobs"])
+
+
+def _compute_eta_for_job(job: Job, session: Session) -> Optional[int]:
+ """Try to compute ETA for a Job using printer model and live-state payload.
+
+ Returns seconds (int) or None.
+ """
+ try:
+ printer = session.get(Printer, job.printer_id)
+ if not printer:
+ return None
+
+ # Try to get live payload by cloud_serial
+ cloud = printer.cloud_serial
+ live_entry = live_state_module.get_live_state(cloud) if cloud else None
+ payload = None
+ if isinstance(live_entry, dict):
+ payload = live_entry.get("payload") or {}
+
+ # Normalize print payload
+ print_data = None
+ if isinstance(payload, dict):
+ print_data = payload.get("print") or payload
+
+ # Extract fields
+ layer_num = None
+ total_layer_num = None
+ bambu_remaining_time = None
+
+ if isinstance(print_data, dict):
+ layer_num = print_data.get("layer_current") or print_data.get("layer_num") or print_data.get("layer") or print_data.get("layer_index")
+ total_layer_num = print_data.get("layer_total") or print_data.get("layer_count") or print_data.get("total_layers")
+ bambu_remaining_time = print_data.get("remain_time_s") or print_data.get("mc_remaining_time") or print_data.get("mc_remaining_seconds") or print_data.get("remaining_time") or print_data.get("remain")
+
+ # Coerce types
+ try:
+ layer_num = int(layer_num) if layer_num is not None else None
+ except Exception:
+ layer_num = None
+ try:
+ total_layer_num = int(total_layer_num) if total_layer_num is not None else None
+ except Exception:
+ total_layer_num = None
+ try:
+ if bambu_remaining_time is not None:
+ bambu_remaining_time = int(float(bambu_remaining_time))
+ except Exception:
+ bambu_remaining_time = None
+
+ eta = calculate_eta(
+ printer_model=printer.model if hasattr(printer, "model") else None,
+ started_at=job.started_at,
+ layer_num=layer_num,
+ total_layer_num=total_layer_num,
+ bambu_remaining_time=bambu_remaining_time,
+ )
+ if eta is not None and eta < 0:
+ eta = 0
+ return eta
+ except Exception:
+ return None
+
+
+def _coerce_dt(value: Any, now: datetime) -> datetime:
+ if isinstance(value, datetime):
+ return value
+ if isinstance(value, str):
+ try:
+ return datetime.fromisoformat(value)
+ except ValueError:
+ return now
+ return now
+
+
+@router.get("/", response_model=List[JobRead])
+def get_all_jobs(session: Session = Depends(get_session)):
+ """Alle Druckaufträge abrufen"""
+ jobs = session.exec(select(Job).order_by(Job.started_at.desc())).all() # type: ignore
+ # Attach ETA where possible (non-blocking)
+ for j in jobs:
+ try:
+ j.eta_seconds = _compute_eta_for_job(j, session)
+ except Exception:
+ j.eta_seconds = None
+ return jobs
+
+@router.get("/with-usage")
+def get_all_jobs_with_usage(session: Session = Depends(get_session)):
+ """Alle Jobs inkl. Spulenverbrauch (job_spool_usage) liefern"""
+ jobs = session.exec(select(Job).order_by(Job.started_at.desc())).all()
+ result = []
+ for job in jobs:
+ usages = session.exec(
+ select(JobSpoolUsage).where(JobSpoolUsage.job_id == job.id).order_by(JobSpoolUsage.order_index)
+ ).all()
+ item = job.model_dump()
+ # compute ETA and inject
+ try:
+ item["eta_seconds"] = _compute_eta_for_job(job, session)
+ except Exception:
+ item["eta_seconds"] = None
+ item["usages"] = [u.model_dump() for u in usages]
+ result.append(item)
+ return result
+
+
+@router.get("/stats/summary")
+def get_job_stats(session: Session = Depends(get_session)):
+ """Job-Statistiken abrufen"""
+ jobs = session.exec(select(Job)).all()
+
+ total_jobs = len(jobs)
+ total_filament_g = sum((job.filament_used_g or 0.0) for job in jobs)
+ total_filament_m = sum((job.filament_used_mm or 0.0) for job in jobs) / 1000 # mm to m
+
+ completed_jobs = [job for job in jobs if job.finished_at is not None]
+ active_jobs = total_jobs - len(completed_jobs)
+
+ # Energie-Berechnung
+ now = datetime.utcnow()
+ default_power_kw = 0.30 # Schätzung wenn kein Wert hinterlegt
+ power_exact_kwh = 0.0
+ power_est_kwh = 0.0
+ total_duration_h = 0.0
+
+ # Schnellzugriff: alle Printer laden
+ printers = {p.id: p for p in session.exec(select(Printer)).all()}
+
+ for job in jobs:
+ start = _coerce_dt(job.started_at, now)
+ end = _coerce_dt(job.finished_at, now)
+ duration_h = max((end - start).total_seconds(), 0) / 3600.0
+ total_duration_h += duration_h
+
+ printer = printers.get(job.printer_id)
+ power = printer.power_consumption_kw if printer else None
+ if power is not None:
+ power_exact_kwh += power * duration_h
+ else:
+ power_est_kwh += default_power_kw * duration_h
+
+ energy_kwh = power_exact_kwh + power_est_kwh
+
+ # Strompreis laden
+ price_setting = session.exec(select(Setting).where(Setting.key == "cost.electricity_price_kwh")).first()
+ price_kwh = float(price_setting.value) if price_setting and price_setting.value else None
+ energy_cost = energy_kwh * price_kwh if price_kwh is not None else None
+
+ return {
+ "total_jobs": total_jobs,
+ "completed_jobs": len(completed_jobs),
+ "active_jobs": active_jobs,
+ "total_filament_g": round(total_filament_g, 2),
+ "total_filament_m": round(total_filament_m, 2),
+ "total_duration_h": round(total_duration_h, 2),
+ "energy_kwh_exact": round(power_exact_kwh, 3),
+ "energy_kwh_estimated": round(power_est_kwh, 3),
+ "energy_kwh_total": round(energy_kwh, 3),
+ "energy_cost_total": round(energy_cost, 2) if energy_cost is not None else None,
+ "energy_price_kwh": price_kwh,
+ }
+
+
+@router.get("/{job_id}", response_model=JobRead)
+def get_job(job_id: str, session: Session = Depends(get_session)):
+ """Einzelnen Druckauftrag abrufen"""
+ job = session.get(Job, job_id)
+ if not job:
+ raise HTTPException(status_code=404, detail="Druckauftrag nicht gefunden")
+ try:
+ job.eta_seconds = _compute_eta_for_job(job, session)
+ except Exception:
+ job.eta_seconds = None
+ return job
+
+
+@router.post("/", response_model=JobRead)
+def create_job(job: JobCreate, session: Session = Depends(get_session)):
+ """Neuen Druckauftrag anlegen"""
+ db_job = Job.model_validate(job)
+
+ # Wenn Spule zugewiesen und Verbrauch vorhanden: Von Spule abziehen
+ if db_job.spool_id and db_job.filament_used_g and db_job.filament_used_g > 0:
+ spool = session.get(Spool, db_job.spool_id)
+ if spool:
+ # Gewicht abziehen
+ if spool.weight_current is not None:
+ new_weight = max(0, float(spool.weight_current) - float(db_job.filament_used_g))
+ spool.weight_current = new_weight
+
+ # Prozentsatz neu berechnen
+ if spool.weight_full and spool.weight_empty:
+ weight_range = float(spool.weight_full) - float(spool.weight_empty)
+ if weight_range > 0:
+ spool.remain_percent = ((new_weight - float(spool.weight_empty)) / weight_range) * 100
+ spool.remain_percent = max(0, min(100, spool.remain_percent)) # Clamp auf 0-100
+
+ # Spule als "leer" markieren wenn unter 50g
+ if new_weight < 50:
+ spool.is_empty = True
+
+ session.add(spool)
+
+ session.add(db_job)
+ session.commit()
+ session.refresh(db_job)
+ try:
+ db_job.eta_seconds = _compute_eta_for_job(db_job, session)
+ except Exception:
+ db_job.eta_seconds = None
+ return db_job
+
+
+@router.put("/{job_id}", response_model=JobRead)
+def update_job(job_id: str, job: JobCreate, session: Session = Depends(get_session)):
+ """Druckauftrag aktualisieren"""
+ db_job = session.get(Job, job_id)
+ if not db_job:
+ raise HTTPException(status_code=404, detail="Druckauftrag nicht gefunden")
+
+ # Merke alte Werte für Differenz-Berechnung
+ old_spool_id = db_job.spool_id
+ old_used_g = db_job.filament_used_g or 0
+
+ job_data = job.model_dump(exclude_unset=True)
+ for key, value in job_data.items():
+ setattr(db_job, key, value)
+
+ # Wenn Spule oder Verbrauch geändert wurde: Gewicht anpassen
+ new_spool_id = db_job.spool_id
+ new_used_g = db_job.filament_used_g or 0
+
+ # Fall 1: Spule wurde geändert oder hinzugefügt
+ if new_spool_id and new_spool_id != old_spool_id and new_used_g > 0:
+ spool = session.get(Spool, new_spool_id)
+ if spool:
+ # Ganzen Verbrauch abziehen
+ if spool.weight_current is not None:
+ new_weight = max(0, float(spool.weight_current) - float(new_used_g))
+ spool.weight_current = new_weight
+
+ # Prozentsatz neu berechnen
+ if spool.weight_full and spool.weight_empty:
+ weight_range = float(spool.weight_full) - float(spool.weight_empty)
+ if weight_range > 0:
+ spool.remain_percent = ((new_weight - float(spool.weight_empty)) / weight_range) * 100
+ spool.remain_percent = max(0, min(100, spool.remain_percent))
+
+ if new_weight < 50:
+ spool.is_empty = True
+
+ session.add(spool)
+
+ # Fall 2: Gleiche Spule, aber Verbrauch hat sich geändert
+ elif new_spool_id and new_spool_id == old_spool_id and new_used_g != old_used_g:
+ spool = session.get(Spool, new_spool_id)
+ if spool and spool.weight_current is not None:
+ # Differenz berechnen und anpassen
+ diff_g = new_used_g - old_used_g
+ new_weight = max(0, float(spool.weight_current) - diff_g)
+ spool.weight_current = new_weight
+
+ # Prozentsatz neu berechnen
+ if spool.weight_full and spool.weight_empty:
+ weight_range = float(spool.weight_full) - float(spool.weight_empty)
+ if weight_range > 0:
+ spool.remain_percent = ((new_weight - float(spool.weight_empty)) / weight_range) * 100
+ spool.remain_percent = max(0, min(100, spool.remain_percent))
+
+ if new_weight < 50:
+ spool.is_empty = True
+
+ session.add(spool)
+
+ session.add(db_job)
+ session.commit()
+ session.refresh(db_job)
+ try:
+ db_job.eta_seconds = _compute_eta_for_job(db_job, session)
+ except Exception:
+ db_job.eta_seconds = None
+ return db_job
+
+
+class JobSpoolUpdate(SQLModel):
+ spool_id: Optional[str] = None
+
+
+class JobManualUsageUpdate(SQLModel):
+ """Model für manuelle Verbrauchseingabe bei Jobs ohne AMS"""
+ spool_id: Optional[str] = None # Spule zuordnen (Pflicht!)
+ used_g: Optional[float] = None # Verbrauch in Gramm
+ used_mm: Optional[float] = None # Verbrauch in Millimetern
+
+
+@router.patch("/{job_id}/spool", response_model=JobRead)
+def override_job_spool(job_id: str, payload: JobSpoolUpdate, session: Session = Depends(get_session)):
+ """
+ Spulen-Zuordnung eines Jobs überschreiben (oder entfernen).
+ spool_id None entfernt die Zuordnung.
+ """
+ db_job = session.get(Job, job_id)
+ if not db_job:
+ raise HTTPException(status_code=404, detail="Druckauftrag nicht gefunden")
+
+ if payload.spool_id:
+ spool = session.get(Spool, payload.spool_id)
+ if not spool:
+ raise HTTPException(status_code=400, detail="Spule nicht gefunden")
+
+ # Status auf "Aktiv" setzen bei manueller Zuweisung (wenn nicht leer)
+ if not spool.is_empty and spool.status != "Aktiv":
+ spool.status = "Aktiv"
+ spool.is_open = True
+ session.add(spool)
+
+ db_job.spool_id = payload.spool_id
+ session.add(db_job)
+ session.commit()
+ session.refresh(db_job)
+ try:
+ db_job.eta_seconds = _compute_eta_for_job(db_job, session)
+ except Exception:
+ db_job.eta_seconds = None
+ return db_job
+
+
+@router.patch("/{job_id}/manual-usage", response_model=JobRead)
+def update_manual_usage(job_id: str, payload: JobManualUsageUpdate, session: Session = Depends(get_session)):
+ """
+ Manuelle Verbrauchseingabe für Jobs ohne AMS-Tracking.
+ Spule muss zugeordnet werden (Pflicht), dann wird Verbrauch von Spule abgezogen.
+ """
+ db_job = session.get(Job, job_id)
+ if not db_job:
+ raise HTTPException(status_code=404, detail="Druckauftrag nicht gefunden")
+
+ # Spule ist Pflicht!
+ if not payload.spool_id:
+ raise HTTPException(status_code=400, detail="Spule muss zugeordnet werden")
+
+ spool = session.get(Spool, payload.spool_id)
+ if not spool:
+ raise HTTPException(status_code=400, detail="Spule nicht gefunden")
+
+ # Spule zuordnen
+ db_job.spool_id = payload.spool_id
+
+ # Status auf "Aktiv" setzen bei manueller Zuweisung (wenn nicht leer)
+ if not spool.is_empty and spool.status != "Aktiv":
+ spool.status = "Aktiv"
+ spool.is_open = True
+
+ # Verbrauch setzen (mindestens einer muss angegeben sein)
+ if payload.used_g is None and payload.used_mm is None:
+ raise HTTPException(status_code=400, detail="Verbrauch (used_g oder used_mm) muss angegeben werden")
+
+ if payload.used_mm is not None:
+ db_job.filament_used_mm = payload.used_mm
+ if payload.used_g is not None:
+ db_job.filament_used_g = payload.used_g
+
+ # Von Spule abziehen (wenn Gewicht vorhanden)
+ if payload.used_g and spool.weight_current is not None:
+ new_weight = max(0, float(spool.weight_current) - float(payload.used_g))
+ spool.weight_current = new_weight
+
+ # Spule als "leer" markieren wenn unter 50g
+ if new_weight < 50:
+ spool.is_empty = True
+
+ session.add(spool)
+
+ session.add(db_job)
+ session.commit()
+ session.refresh(db_job)
+ try:
+ db_job.eta_seconds = _compute_eta_for_job(db_job, session)
+ except Exception:
+ db_job.eta_seconds = None
+ return db_job
+
+
+@router.delete("/{job_id}")
+def delete_job(job_id: str, session: Session = Depends(get_session)):
+ """Druckauftrag löschen"""
+ job = session.get(Job, job_id)
+ if not job:
+ raise HTTPException(status_code=404, detail="Druckauftrag nicht gefunden")
+
+ session.delete(job)
+ session.commit()
+ return {"success": True, "message": "Druckauftrag gelöscht"}
diff --git a/app/routes/live_state_routes.py b/app/routes/live_state_routes.py
new file mode 100644
index 0000000..f8a5ac8
--- /dev/null
+++ b/app/routes/live_state_routes.py
@@ -0,0 +1,22 @@
+from fastapi import APIRouter, HTTPException
+from typing import Any
+
+from app.services.live_state import get_live_state
+
+router = APIRouter(prefix="/api/live-state", tags=["LiveState"])
+
+
+@router.get("/{device_id}")
+async def get_live_state_endpoint(device_id: str) -> Any:
+ st = get_live_state(device_id)
+ if not st:
+ raise HTTPException(status_code=404, detail="Live state not found")
+ return st
+
+
+from app.services.live_state import get_all_live_state
+
+
+@router.get("/")
+async def list_live_state() -> Any:
+ return get_all_live_state()
diff --git a/app/routes/log_routes.py b/app/routes/log_routes.py
new file mode 100644
index 0000000..b1dc17b
--- /dev/null
+++ b/app/routes/log_routes.py
@@ -0,0 +1,28 @@
+from fastapi import APIRouter
+from fastapi.responses import JSONResponse
+
+router = APIRouter(prefix="/api/logs", tags=["Logs"])
+
+
+def _deprecated_response():
+ return JSONResponse({"deprecated": True, "use": "/api/debug/logs"}, status_code=410)
+
+
+@router.get("/modules")
+def get_modules():
+ return _deprecated_response()
+
+
+@router.get("/today")
+def get_today_log():
+ return _deprecated_response()
+
+
+@router.get("/date/{date}")
+def get_log_by_date(date: str):
+ return _deprecated_response()
+
+
+@router.get("/errors/latest")
+def latest_error():
+ return _deprecated_response()
diff --git a/app/routes/materials.py b/app/routes/materials.py
index c5dcf35..15d6cde 100644
--- a/app/routes/materials.py
+++ b/app/routes/materials.py
@@ -1,57 +1,85 @@
-from fastapi import APIRouter, Depends, HTTPException
+from fastapi import APIRouter, Depends, HTTPException, status
+from fastapi.responses import Response
from sqlmodel import select, Session
from typing import List
from app.database import get_session
-from app.models.material import Material, MaterialCreate, MaterialRead
+from app.models.material import Material, MaterialCreateSchema, MaterialUpdateSchema, MaterialReadSchema
router = APIRouter(prefix="/api/materials", tags=["Materials"])
+def _normalize_material_payload(data: MaterialCreateSchema | MaterialUpdateSchema) -> dict:
+ payload = data.model_dump(exclude_unset=True)
+ # remove fields that are not persisted (e.g. material_type/type alias)
+ payload.pop("material_type", None)
+ # normalize printer_slot strings like "AMS-1" to int
+ slot = payload.get("printer_slot")
+ if isinstance(slot, str):
+ digits = "".join(filter(str.isdigit, slot))
+ payload["printer_slot"] = int(digits) if digits else None
+ return payload
-@router.get("/", response_model=List[MaterialRead])
+
+@router.get("/", response_model=List[MaterialReadSchema])
def list_materials(session: Session = Depends(get_session)):
result = session.exec(select(Material)).all()
- return result
+ return [MaterialReadSchema.model_validate(m) for m in result]
+
+@router.get("/brands/list", response_model=List[str])
+def get_brands(session: Session = Depends(get_session)):
+ """Get all unique brands from materials"""
+ materials = session.exec(select(Material)).all()
+ brands = sorted(set(m.brand for m in materials if m.brand))
+ return list(brands)
-@router.get("/{material_id}", response_model=MaterialRead)
+
+@router.get("/{material_id}", response_model=MaterialReadSchema)
def get_material(material_id: str, session: Session = Depends(get_session)):
material = session.get(Material, material_id)
if not material:
raise HTTPException(status_code=404, detail="Material nicht gefunden")
- return material
+ return MaterialReadSchema.model_validate(material)
-@router.post("/", response_model=MaterialRead)
-def create_material(data: MaterialCreate, session: Session = Depends(get_session)):
- material = Material.from_orm(data)
- session.add(material)
- session.commit()
- session.refresh(material)
- return material
+@router.post("/", response_model=MaterialReadSchema, status_code=status.HTTP_201_CREATED)
+def create_material(data: MaterialCreateSchema, session: Session = Depends(get_session)):
+ exists = session.exec(select(Material).where(Material.name == data.name, Material.brand == getattr(data, "brand", None))).first()
+ if exists:
+ raise HTTPException(status_code=409, detail="Material existiert bereits")
+ try:
+ payload = _normalize_material_payload(data)
+ material = Material(**payload)
+ session.add(material)
+ session.commit()
+ session.refresh(material)
+ return MaterialReadSchema.model_validate(material)
+ except Exception as e:
+ raise HTTPException(status_code=400, detail=f"Fehler bei Validierung: {e}")
-@router.put("/{material_id}", response_model=MaterialRead)
-def update_material(material_id: str, data: MaterialCreate, session: Session = Depends(get_session)):
+@router.put("/{material_id}", response_model=MaterialReadSchema)
+def update_material(material_id: str, data: MaterialUpdateSchema, session: Session = Depends(get_session)):
material = session.get(Material, material_id)
if not material:
raise HTTPException(status_code=404, detail="Material nicht gefunden")
-
- update_data = data.dict(exclude_unset=True)
+ update_data = _normalize_material_payload(data)
for key, value in update_data.items():
setattr(material, key, value)
-
- session.add(material)
- session.commit()
- session.refresh(material)
- return material
+ try:
+ session.add(material)
+ session.commit()
+ session.refresh(material)
+ return MaterialReadSchema.model_validate(material)
+ except Exception as e:
+ raise HTTPException(status_code=400, detail=f"Fehler bei Validierung: {e}")
-@router.delete("/{material_id}")
+@router.delete("/{material_id}", status_code=status.HTTP_204_NO_CONTENT)
def delete_material(material_id: str, session: Session = Depends(get_session)):
material = session.get(Material, material_id)
if not material:
raise HTTPException(status_code=404, detail="Material nicht gefunden")
session.delete(material)
session.commit()
- return {"status": "deleted"}
+ return Response(status_code=status.HTTP_204_NO_CONTENT)
\ No newline at end of file
diff --git a/app/routes/mqtt_routes.py b/app/routes/mqtt_routes.py
new file mode 100644
index 0000000..e6d6663
--- /dev/null
+++ b/app/routes/mqtt_routes.py
@@ -0,0 +1,1359 @@
+import asyncio
+
+import os
+
+import time
+
+from collections import deque
+
+from fastapi import APIRouter, WebSocket, WebSocketDisconnect, HTTPException, Depends
+import json
+import ssl
+import logging
+import yaml
+from datetime import datetime
+from typing import Dict, Any, Optional, Sequence, Set, List, cast
+import paho.mqtt.client as mqtt
+from sqlmodel import select
+from app.database import get_session
+from services.printer_service import PrinterService
+
+import sqlalchemy as sa
+
+from app.services import mqtt_runtime
+from logging.handlers import RotatingFileHandler
+from pydantic import BaseModel
+from fastapi import Request
+
+from app.services.mqtt_payload_processor import process_mqtt_payload
+from app.services.ams_parser import parse_ams
+from app.services.job_parser import parse_job
+from app.services.universal_mapper import UniversalMapper
+from app.services.printer_auto_detector import PrinterAutoDetector
+from app.services.live_state import set_live_state
+from app.services.ams_sync import sync_ams_slots
+from app.services.job_tracking_service import job_tracking_service
+
+from app.models.printer import Printer
+from app.services.spool_number_service import assign_spool_number
+from services.mqtt_protocol_detector import MQTTProtocolDetector
+
+# ...existing code...
+
+router = APIRouter(prefix="/api/mqtt", tags=["MQTT"])
+
+# ...existing code...
+
+mqtt_ws_clients = set()
+
+
+
+# ...alle bisherigen Routen und Funktionen...
+
+
+
+@router.websocket("/ws/logs/{module}")
+
+async def websocket_logs(websocket: WebSocket, module: str):
+
+ await websocket.accept()
+
+ log_file_map = {
+ "app": "logs/app/app.log",
+ "bambu": "logs/bambu/bambu.log",
+ "klipper": "logs/klipper/klipper.log",
+ "errors": "logs/errors/errors.log",
+ "mqtt": "logs/mqtt/mqtt_messages.log",
+ }
+
+ log_file = log_file_map.get(module)
+
+ try:
+ # Optional: nur die letzten N Zeilen senden (tail). Default: 0 = keine Historie
+ tail_param = websocket.query_params.get("tail", "0") if hasattr(websocket, "query_params") else "0"
+ try:
+ tail = int(tail_param)
+ except Exception:
+ tail = 0
+
+ last_size = 0
+ if log_file and os.path.exists(log_file):
+ # Falls tail > 0 angefordert wurde, sende letzte N Zeilen; ansonsten überspringe Historie
+ if tail > 0:
+ try:
+ with open(log_file, "r", encoding="utf-8") as f:
+ dq = deque(f, maxlen=tail)
+ for line in dq:
+ await websocket.send_text(line.strip())
+ except Exception:
+ pass
+ # setze Startposition auf Dateiende, damit keine Historie erneut gesendet wird
+ try:
+ last_size = os.path.getsize(log_file)
+ except Exception:
+ last_size = 0
+
+ while True:
+
+ if log_file:
+
+ try:
+
+ with open(log_file, "r", encoding="utf-8") as f:
+
+ f.seek(last_size)
+
+ new_lines = f.readlines()
+
+ last_size = f.tell()
+
+ for line in new_lines:
+
+ await websocket.send_text(line.strip())
+
+ except FileNotFoundError:
+
+ pass
+
+ await asyncio.sleep(1)
+
+ except WebSocketDisconnect:
+ return
+from app.models.job import Job, JobSpoolUsage
+
+from sqlmodel import select
+
+from app.models.spool import Spool
+
+from app.models.material import Material
+
+from services.printer_service import PrinterService
+
+
+
+# Wichtig: KEINE zweite Router-Initialisierung wir verwenden den oben definierten `router`.
+
+
+
+# === MQTT LOGGER SETUP ===
+
+def get_mqtt_logger():
+
+ """Erstellt/holt MQTT Logger mit Rotation"""
+
+ logger = logging.getLogger("MQTT_Messages")
+
+
+
+ # Nur einmal initialisieren
+
+ if logger.handlers:
+
+ return logger
+
+
+
+ # Lese Config
+
+ try:
+
+ with open("config.yaml", "r", encoding="utf-8") as f:
+
+ config = yaml.safe_load(f)
+
+ max_size_mb = config.get("logging", {}).get("max_size_mb", 10)
+
+ backup_count = config.get("logging", {}).get("backup_count", 3)
+
+ except Exception as exc:
+
+ logging.getLogger("app.routes.mqtt").warning("Failed to read config.yaml for MQTT logger: %s", exc)
+
+ max_size_mb = 10
+
+ backup_count = 3
+
+
+
+ # Erstelle Handler
+
+ os.makedirs("logs/mqtt", exist_ok=True)
+
+ handler = RotatingFileHandler(
+
+ "logs/mqtt/mqtt_messages.log",
+
+ maxBytes=max_size_mb * 1024 * 1024,
+
+ backupCount=backup_count,
+
+ encoding="utf-8"
+
+ )
+
+
+
+ # Setze Flush sofort (kein Buffering)
+
+ handler.flush = lambda: handler.stream.flush() if handler.stream else None
+
+
+
+ formatter = logging.Formatter("%(asctime)s | %(message)s")
+
+ handler.setFormatter(formatter)
+
+
+
+ logger.setLevel(logging.INFO)
+
+ logger.addHandler(handler)
+
+ logger.propagate = False # Verhindere doppelte Logs
+
+
+
+ return logger
+
+
+
+mqtt_message_logger = get_mqtt_logger()
+
+
+
+# === MODELS ===
+
+class MQTTConnection(BaseModel):
+
+ broker: str
+
+ port: int = 1
+
+ username: Optional[str] = None
+
+ password: Optional[str] = None
+
+ client_id: Optional[str] = "filamenthub_debug"
+
+ cloud_serial: Optional[str] = None # bevorzugte Serial f�r Default-Topic
+
+ use_tls: bool = False
+
+ tls_insecure: bool = True # Self-signed / printer certs erlauben
+
+
+
+class MQTTSubscription(BaseModel):
+
+ topic: str
+
+
+
+class MQTTMessage(BaseModel):
+
+ topic: str
+
+ payload: str
+
+ timestamp: str
+
+ qos: int = 0
+
+
+
+# === GLOBAL STATE ===
+
+mqtt_clients: Dict[str, mqtt.Client] = {}
+
+active_connections: Set[WebSocket] = set()
+
+active_ws_clients: int = 0
+
+last_ws_activity_ts: Optional[float] = None
+
+message_buffer: List[MQTTMessage] = []
+
+MAX_BUFFER_SIZE = 1000
+
+# Default-Topic nicht mehr statisch hinterlegen; wird dynamisch aus client_id abgeleitet
+
+DEFAULT_TOPIC = None
+
+subscribed_topics: Set[str] = set()
+
+event_loop: Optional[asyncio.AbstractEventLoop] = None
+
+# Job-Tracking wird zentral über job_tracking_service verwaltet (siehe app/services/job_tracking_service.py)
+
+last_connect_error: Optional[int] = None # letzter RC bei fehlgeschlagener Verbindung
+
+printer_service_ref: Optional[PrinterService] = None
+
+
+# === MQTT CALLBACKS ===
+
+def on_connect(client, userdata, flags, rc, properties=None):
+
+ """Callback when connected to MQTT broker"""
+
+ connection_id = userdata.get('connection_id', 'unknown')
+
+ global last_connect_error
+
+ if rc == 0:
+
+ last_connect_error = None
+
+ print(f"[MQTT] Connected: {connection_id}")
+
+ # Default-Topic: bei Bambu ausschließlich cloud_serial verwenden.
+ # Kein Fallback auf client_id, um falsche Topics zu vermeiden.
+
+ if not subscribed_topics:
+
+ default_topic = None
+
+ try:
+
+ cserial = userdata.get('cloud_serial') if userdata else None
+
+ if cserial:
+
+ default_topic = f"device/{cserial}/report"
+
+ except Exception:
+
+ default_topic = None
+
+ if default_topic:
+
+ print(f"Abonniere Default-Topic: {default_topic}")
+
+ client.subscribe(default_topic)
+
+ subscribed_topics.add(default_topic)
+ try:
+ mqtt_runtime.register_subscription(default_topic)
+ except Exception:
+ pass
+
+ else:
+
+ for topic in subscribed_topics:
+
+ print(f"Abonniere MQTT-Topic: {topic}")
+
+ client.subscribe(topic)
+ try:
+ mqtt_runtime.register_subscription(topic)
+ except Exception:
+ pass
+
+ else:
+
+ last_connect_error = rc
+
+ print(f"[MQTT] Connection failed (rc={rc})")
+
+ # Fehlschlag: vorhandene Subscriptions leeren, damit Status korrekt ist
+
+ subscribed_topics.clear()
+ try:
+ mqtt_runtime.clear_subscriptions()
+ except Exception:
+ pass
+
+ try:
+
+ client.disconnect()
+
+ client.loop_stop()
+
+ except Exception:
+
+ pass
+
+ try:
+
+ cid = userdata.get('connection_id') if userdata else None
+
+ if cid and cid in mqtt_clients:
+
+ del mqtt_clients[cid]
+
+ except Exception:
+
+ pass
+
+
+
+
+
+def on_message(client, userdata, msg):
+
+ """Callback when message received"""
+
+ try:
+ # Ensure variables are always defined for static analysis
+ cloud_serial_from_topic = None
+ printer_model_for_mapper = None
+ printer_name_for_service = None
+ # raw payload text
+ payload = msg.payload.decode('utf-8', errors='replace')
+ # Delegate payload parsing and mapping to dedicated processor
+ try:
+ proc = process_mqtt_payload(msg.topic, payload, printer_service_ref)
+ parsed_json = proc.get("raw")
+ ams_data = proc.get("ams") or []
+ job_data = proc.get("job") or {}
+ mapped_obj = proc.get("mapped")
+ mapped_dict = proc.get("mapped_dict")
+ caps = proc.get("capabilities")
+ if proc.get("serial"):
+ cloud_serial_from_topic = proc.get("serial")
+ except Exception:
+ parsed_json = None
+ ams_data = []
+ job_data = {}
+ mapped_obj = None
+ mapped_dict = None
+ caps = None
+
+ try:
+ parts = msg.topic.split("/")
+ if len(parts) >= 2 and parts[0] == "device":
+ cloud_serial_from_topic = parts[1]
+ except Exception:
+ pass
+
+ try:
+
+ parsed_json = json.loads(payload)
+
+ if msg.topic.endswith("/report"):
+
+ ams_data = parse_ams(parsed_json)
+
+ job_data = parse_job(parsed_json) or {}
+
+ # Update in-memory live-state for this device if we have a cloud_serial
+ try:
+ if cloud_serial_from_topic:
+ set_live_state(cloud_serial_from_topic, parsed_json)
+ else:
+ print(f"[MQTT] WARNING: No cloud_serial_from_topic for {msg.topic}")
+ except Exception as e:
+ print(f"[MQTT] ERROR in set_live_state: {e}")
+ import traceback
+ traceback.print_exc()
+
+ except Exception:
+
+ parsed_json = None
+
+
+
+ # Schreibe die Nachricht in MQTT-Log (RotatingFileHandler ?bernimmt Rotation)
+
+ try:
+
+ mqtt_message_logger.info(f"Topic={msg.topic} | Payload={payload}")
+
+ except Exception as logerr:
+
+ print(f"? Fehler beim Schreiben in MQTT-Logdatei: {logerr}")
+
+
+
+ # Sende empfangene MQTT-Nachricht an alle verbundenen WebSocket-Clients (Text-Log)
+
+ if event_loop:
+
+ for ws in list(mqtt_ws_clients):
+
+ try:
+
+ asyncio.run_coroutine_threadsafe(
+
+ ws.send_text(f"{datetime.now().isoformat()} | Topic={msg.topic} | Payload={payload}"),
+
+ event_loop
+
+ )
+
+ except Exception:
+
+ pass
+
+
+
+ message = MQTTMessage(
+
+ topic=msg.topic,
+
+ payload=payload,
+
+ timestamp=datetime.now().isoformat(),
+
+ qos=msg.qos
+
+ )
+
+ printer_id_for_ams = None
+
+ printer_obj = None
+
+ caps = None
+
+ if cloud_serial_from_topic:
+
+ try:
+
+ with next(get_session()) as session:
+
+ p = session.exec(select(Printer).where(Printer.cloud_serial == cloud_serial_from_topic)).first()
+
+ if p:
+
+ printer_obj = p
+
+ printer_id_for_ams = p.id
+
+ printer_name_for_service = p.name
+
+ printer_model_for_mapper = p.model or "X1C"
+
+ except Exception:
+
+ printer_id_for_ams = None
+
+ if parsed_json:
+
+ try:
+
+ # Modell-Autoerkennung
+
+ detected_model = PrinterAutoDetector.detect_model_from_payload(parsed_json) or PrinterAutoDetector.detect_model_from_serial(getattr(printer_obj, "cloud_serial", None))
+
+ final_model = detected_model or printer_model_for_mapper or "UNKNOWN"
+
+ if printer_obj and final_model != printer_obj.model:
+
+ printer_obj.model = final_model
+
+ try:
+
+ with next(get_session()) as session:
+
+ session.add(printer_obj)
+
+ session.commit()
+
+ except Exception:
+
+ pass
+
+ printer_model_for_mapper = final_model
+
+ caps = PrinterAutoDetector.detect_capabilities(parsed_json)
+
+ mapper = UniversalMapper(printer_model_for_mapper)
+
+ mapped_obj = mapper.map(parsed_json)
+
+ mapped_dict = mapped_obj.to_dict()
+
+ if mapped_dict and mapped_dict.get("job"):
+
+ job_data = mapped_dict.get("job")
+
+ if printer_service_ref:
+ # Prefer cloud_serial as key. If no serial is present, ignore updates.
+ if cloud_serial_from_topic:
+ printer_service_ref.update_printer(cloud_serial_from_topic, mapped_obj)
+ if caps:
+ printer_service_ref.update_capabilities(cloud_serial_from_topic, caps)
+ else:
+ print("[MQTT] Received mapped data without cloud_serial; update skipped")
+
+ if caps and isinstance(mapped_dict, dict):
+
+ mapped_dict["capabilities"] = caps
+
+ except Exception:
+
+ mapped_dict = None
+
+ # AMS Sync vor Job-Tracking, damit Tag/Slot-Daten in DB stehen
+
+ if not ams_data and mapped_dict and mapped_dict.get("ams") is not None:
+
+ ams_data = mapped_dict.get("ams")
+
+ if ams_data:
+
+ try:
+
+ mqtt_message_logger.info(f"[AMS SYNC] printer_id={printer_id_for_ams} ams_count={len(ams_data) if isinstance(ams_data, list) else 0}")
+
+ sync_ams_slots(
+
+ [dict(unit) for unit in ams_data] if isinstance(ams_data, list) else [],
+
+ printer_id=printer_id_for_ams,
+
+ auto_create=True
+
+ ) if ams_data else None
+
+ mqtt_message_logger.info(f"[AMS SYNC] done printer_id={printer_id_for_ams}")
+
+ except Exception as sync_err:
+
+ mqtt_message_logger.error(f"AMS Sync failed: {sync_err}")
+
+ print(f"AMS Sync failed: {sync_err}")
+
+
+
+
+ # ============================================================
+ # JOB-TRACKING SYSTEM (Zentral über job_tracking_service)
+ # ============================================================
+ if parsed_json and msg.topic.endswith("/report") and cloud_serial_from_topic:
+ try:
+ result = job_tracking_service.process_message(
+ cloud_serial=cloud_serial_from_topic,
+ parsed_payload=parsed_json,
+ printer_id=printer_id_for_ams,
+ ams_data=[dict(unit) for unit in ams_data] if ams_data else None
+ )
+ if result:
+ mqtt_message_logger.info(f"[JOB TRACKING] {result}")
+ except Exception as job_err:
+ print(f"Job tracking error: {job_err}")
+ # Add to buffer
+
+ message_buffer.append(message)
+
+ if len(message_buffer) > MAX_BUFFER_SIZE:
+
+ message_buffer.pop(0)
+
+ # Broadcast to all connected WebSocket clients
+
+ asyncio.run_coroutine_threadsafe(
+
+ broadcast_message(
+
+ message,
+
+ ams_data=ams_data,
+
+ job_data=job_data,
+
+ printer_data=mapped_dict,
+
+ raw_payload=parsed_json,
+
+ ),
+
+ event_loop,
+
+ ) if event_loop else None
+
+ except Exception as e:
+
+ print(f"Error processing MQTT message: {e}")
+
+
+
+def on_disconnect(client, userdata, rc, properties=None):
+
+ """Callback when disconnected (MQTT v5 signature)"""
+
+ connection_id = userdata.get('connection_id', 'unknown')
+
+ print(f"🔌 MQTT Disconnected: {connection_id} (rc={rc})")
+
+
+
+async def broadcast_message(message: MQTTMessage, ams_data=None, job_data=None, printer_data=None, raw_payload=None):
+
+ """Send message to all connected WebSocket clients"""
+
+ disconnected = set()
+
+ msg_dict = {
+
+ "topic": message.topic,
+
+ "payload": message.payload,
+
+ "timestamp": message.timestamp,
+
+ "qos": message.qos,
+
+ "printer": printer_data,
+
+ "raw": raw_payload,
+
+ }
+
+ if ams_data:
+
+ msg_dict["ams"] = ams_data
+
+ if job_data:
+
+ msg_dict["job"] = job_data
+
+ for websocket in active_connections:
+
+ try:
+
+ await websocket.send_json(msg_dict)
+
+ except Exception as e:
+
+ print(f"❌ WebSocket send error: {e}")
+
+ disconnected.add(websocket)
+
+
+
+ # Remove disconnected clients
+
+ active_connections.difference_update(disconnected)
+
+
+
+# === ENDPOINTS ===
+
+
+
+@router.post("/connect")
+
+async def connect_mqtt(connection: MQTTConnection, request: Request):
+
+ """Connect to MQTT broker"""
+
+ try:
+
+ global event_loop, printer_service_ref
+
+ # Merke den aktiven Event-Loop f�r thread-sichere Broadcasts
+
+ printer_service_ref = getattr(request.app.state, "printer_service", None)
+
+ event_loop = asyncio.get_running_loop()
+
+
+
+ connection_id = f"{connection.broker}:{connection.port}"
+
+ # Modell-basierte MQTT-Protokoll-Erkennung (PRIORITÄT!)
+ from app.services.printer_auto_detector import PrinterAutoDetector
+
+ mqtt_protocol = mqtt.MQTTv311 # Default
+ detected_protocol = None
+
+ # 1. Priorität: Modell aus cloud_serial ermitteln
+ if connection.cloud_serial:
+ model = PrinterAutoDetector.detect_model_from_serial(connection.cloud_serial)
+ if model and model.upper() in PrinterAutoDetector.MODEL_MQTT_PROTOCOL:
+ protocol_str = PrinterAutoDetector.MODEL_MQTT_PROTOCOL[model.upper()]
+ if protocol_str == "5":
+ mqtt_protocol = mqtt.MQTTv5
+ detected_protocol = "5"
+ print(f"[MQTT] Modell {model} (Serial {connection.cloud_serial}) → MQTT v5")
+ elif protocol_str == "311":
+ mqtt_protocol = mqtt.MQTTv311
+ detected_protocol = "311"
+ print(f"[MQTT] Modell {model} (Serial {connection.cloud_serial}) → MQTT v3.1.1")
+
+ # 2. Fallback: Auto-Detection (nur wenn kein Modell erkannt)
+ if detected_protocol is None:
+ try:
+ detector = MQTTProtocolDetector()
+ detection = detector.detect(connection.broker, connection.password or '', connection.port)
+ if detection.get('detected'):
+ detected_protocol = detection.get('protocol')
+ if detected_protocol == "5":
+ mqtt_protocol = mqtt.MQTTv5
+ print(f"[MQTT] Auto-Detection → Protokoll {detected_protocol}")
+ except Exception as e:
+ print(f"[MQTT] Auto-Detection fehlgeschlagen: {e}")
+ detected_protocol = None
+
+
+
+ # Disconnect existing connection
+
+ if connection_id in mqtt_clients:
+
+ mqtt_clients[connection_id].disconnect()
+
+ mqtt_clients[connection_id].loop_stop()
+
+ del mqtt_clients[connection_id]
+
+
+
+ # Create new client (MQTT-Protokoll basierend auf Modell oder Auto-Detection)
+
+ client = mqtt.Client(
+
+ client_id=connection.client_id or "filamenthub_debug",
+
+ protocol=mqtt_protocol
+
+ )
+
+
+
+ # Set callbacks
+
+ client.user_data_set({
+
+ 'connection_id': connection_id,
+
+ 'client_id': connection.client_id,
+
+ 'cloud_serial': connection.cloud_serial
+
+ })
+
+ client.on_connect = on_connect
+
+ client.on_message = on_message
+
+ client.on_disconnect = on_disconnect
+
+
+
+ # TLS optional aktivieren (Bambu nutzt meist 8883 mit TLS)
+
+ use_tls = connection.use_tls or connection.port == 8883
+
+ if use_tls:
+
+ # Zertifikatspr�fung bei Bedarf deaktivieren (Drucker nutzt Self-Signed)
+
+ if connection.tls_insecure:
+
+ client.tls_set(
+
+ tls_version=ssl.PROTOCOL_TLS_CLIENT,
+
+ cert_reqs=ssl.CERT_NONE
+
+ )
+
+ client.tls_insecure_set(True)
+
+ else:
+
+ client.tls_set(tls_version=ssl.PROTOCOL_TLS_CLIENT)
+
+
+
+ # Set credentials if provided (default: Bambu bblp + API Key)
+
+ username = connection.username or "bblp"
+
+ password = connection.password
+
+ if username or password:
+
+ client.username_pw_set(username, password)
+
+
+
+ # Connect
+
+ client.connect(connection.broker, connection.port, keepalive=60)
+
+ client.loop_start()
+
+
+
+ # Store client
+
+ mqtt_clients[connection_id] = client
+
+
+
+ return {
+
+ "success": True,
+
+ "message": f"Connected to {connection_id}",
+
+ "connection_id": connection_id
+
+ }
+
+
+
+ except Exception as e:
+
+ raise HTTPException(status_code=500, detail=str(e))
+
+
+
+@router.post("/disconnect")
+
+async def disconnect_mqtt(broker: str, port: int = 1883):
+
+ """Disconnect from MQTT broker"""
+
+ try:
+
+ connection_id = f"{broker}:{port}"
+
+
+
+ if connection_id not in mqtt_clients:
+
+ raise HTTPException(status_code=404, detail="Connection not found")
+
+
+
+ client = mqtt_clients[connection_id]
+
+ client.disconnect()
+
+ client.loop_stop()
+
+ del mqtt_clients[connection_id]
+
+
+
+ # Clear subscriptions if no more clients
+
+ if not mqtt_clients:
+
+ subscribed_topics.clear()
+
+ message_buffer.clear()
+
+ try:
+ mqtt_runtime.clear_subscriptions()
+ except Exception:
+ pass
+
+
+
+ return {
+
+ "success": True,
+
+ "message": f"Disconnected from {connection_id}"
+
+ }
+
+
+
+ except HTTPException:
+
+ raise
+
+ except Exception as e:
+
+ raise HTTPException(status_code=500, detail=str(e))
+
+
+
+@router.post("/subscribe")
+
+async def subscribe_topic(subscription: MQTTSubscription):
+
+ """Subscribe to MQTT topic"""
+
+ try:
+
+ if not mqtt_clients:
+
+ raise HTTPException(status_code=400, detail="No active MQTT connection")
+
+
+
+ topic = subscription.topic
+
+
+
+ # Subscribe on all active clients
+
+ for client in mqtt_clients.values():
+
+ result, _ = client.subscribe(topic)
+
+ if result != mqtt.MQTT_ERR_SUCCESS:
+
+ raise HTTPException(status_code=500, detail=f"Subscribe failed ({result})")
+
+
+
+ subscribed_topics.add(topic)
+ try:
+ mqtt_runtime.register_subscription(topic)
+ except Exception:
+ pass
+
+
+
+ return {
+
+ "success": True,
+
+ "message": f"Subscribed to {topic}",
+
+ "topic": topic
+
+ }
+
+
+
+ except HTTPException:
+
+ raise
+
+ except Exception as e:
+
+ raise HTTPException(status_code=500, detail=str(e))
+
+
+
+@router.post("/unsubscribe")
+
+async def unsubscribe_topic(subscription: MQTTSubscription):
+
+ """Unsubscribe from MQTT topic"""
+
+ try:
+
+ if not mqtt_clients:
+
+ raise HTTPException(status_code=400, detail="No active MQTT connection")
+
+
+
+ topic = subscription.topic
+
+
+
+ # Unsubscribe on all active clients
+
+ for client in mqtt_clients.values():
+
+ client.unsubscribe(topic)
+
+
+
+ subscribed_topics.discard(topic)
+ try:
+ mqtt_runtime.unregister_subscription(topic)
+ except Exception:
+ pass
+
+
+
+ return {
+
+ "success": True,
+
+ "message": f"Unsubscribed from {topic}",
+
+ "topic": topic
+
+ }
+
+
+
+ except HTTPException:
+
+ raise
+
+ except Exception as e:
+
+ raise HTTPException(status_code=500, detail=str(e))
+
+
+
+@router.get("/status")
+
+async def get_mqtt_status():
+
+ """Get current MQTT connection status"""
+
+ connections = []
+
+ active_count = 0
+
+ for connection_id, client in mqtt_clients.items():
+
+ is_connected = client.is_connected()
+
+ connections.append({
+
+ "connection_id": connection_id,
+
+ "connected": is_connected
+
+ })
+
+ if is_connected:
+
+ active_count += 1
+
+ return {
+
+ "active_connections": active_count,
+
+ "subscribed_topics": list(subscribed_topics),
+
+ "message_buffer_size": len(message_buffer),
+
+ "websocket_clients": len(active_connections),
+
+ "connections": connections,
+
+ "last_connect_error": last_connect_error
+
+ }
+
+
+
+@router.get("/messages")
+
+async def get_messages(limit: int = 100, topic_filter: Optional[str] = None):
+
+ """Get recent messages from buffer"""
+
+ messages = message_buffer[-limit:]
+
+
+
+ if topic_filter:
+
+ messages = [m for m in messages if topic_filter in m.topic]
+
+
+
+ return {
+
+ "messages": [m.dict() for m in messages],
+
+ "total": len(messages)
+
+ }
+
+
+
+@router.post("/clear-buffer")
+
+async def clear_message_buffer():
+
+ """Clear message buffer"""
+
+ message_buffer.clear()
+
+ return {"success": True, "message": "Message buffer cleared"}
+
+
+
+@router.websocket("/ws")
+async def websocket_endpoint(websocket: WebSocket):
+ """WebSocket endpoint for live message streaming"""
+ await websocket.accept()
+ active_connections.add(websocket)
+ mqtt_ws_clients.add(websocket)
+ global active_ws_clients, last_ws_activity_ts
+ active_ws_clients = max(0, active_ws_clients + 1)
+ last_ws_activity_ts = time.time()
+ try:
+ # Send initial status
+ await websocket.send_json({
+ "type": "status",
+ "connected": len(mqtt_clients) > 0,
+ "topics": list(subscribed_topics)
+ })
+ while True:
+ data = await websocket.receive_text()
+ last_ws_activity_ts = time.time()
+ if data == "ping":
+ await websocket.send_text("pong")
+ except WebSocketDisconnect:
+ active_connections.discard(websocket)
+ mqtt_ws_clients.discard(websocket)
+ except Exception as e:
+ print(f"WS WebSocket error: {e}")
+ active_connections.discard(websocket)
+ mqtt_ws_clients.discard(websocket)
+ finally:
+ active_ws_clients = max(0, active_ws_clients - 1)
+
+@router.post("/publish")
+
+
+async def publish_message(topic: str, payload: str, qos: int = 0):
+
+ """Publish message to MQTT topic"""
+
+ try:
+
+ if not mqtt_clients:
+
+ raise HTTPException(status_code=400, detail="No active MQTT connection")
+
+
+
+ # Publish on first available client
+
+ client = list(mqtt_clients.values())[0]
+
+ result = client.publish(topic, payload, qos=qos)
+
+
+
+ if result.rc == mqtt.MQTT_ERR_SUCCESS:
+
+ return {
+
+ "success": True,
+
+ "message": f"Published to {topic}",
+
+ "topic": topic,
+
+ "payload": payload
+
+ }
+
+ else:
+
+ raise HTTPException(status_code=500, detail=f"Publish failed: {result.rc}")
+
+
+
+ except HTTPException:
+
+ raise
+
+ except Exception as e:
+
+ raise HTTPException(status_code=500, detail=str(e))
+
+
+
+@router.get("/topics/suggest")
+
+async def suggest_topics(session=Depends(get_session)):
+
+ """Get suggested topics for Bambu Lab printers with real serial numbers"""
+
+ bambu_serials = [
+ p.cloud_serial
+ for p in session.query(Printer).filter(
+ Printer.printer_type == "bambu",
+ Printer.cloud_serial != None
+ ).all()
+ ]
+ bambu_topics = []
+
+ for serial in bambu_serials:
+
+ bambu_topics.extend([
+
+ f"device/{serial}/report",
+
+ f"device/{serial}/request",
+
+ f"device/{serial}/print",
+
+ f"device/{serial}/camera",
+
+ f"device/{serial}/ams",
+
+ f"device/{serial}/temperature",
+
+ f"device/{serial}/speed",
+
+ f"device/{serial}/layer",
+
+ ])
+
+ # Fallback: Wenn keine Seriennummern, zeige Platzhalter
+
+ if not bambu_topics:
+
+ bambu_topics = [
+
+ "device/+/report",
+
+ "device/+/request",
+
+ "device/+/print",
+
+ "device/+/camera",
+
+ "device/+/ams",
+
+ "device/+/temperature",
+
+ "device/+/speed",
+
+ "device/+/layer",
+
+ ]
+
+ return {
+
+ "bambu_lab": bambu_topics,
+
+ "klipper": [
+
+ "klipper/status",
+
+ "klipper/printer",
+
+ "klipper/temperature",
+
+ "klipper/gcode/response",
+
+ ],
+
+ "common": [
+
+ "#", # All topics
+
+ "+/status", # All status topics
+
+ "device/+/#", # All device topics
+
+ ]
+
+ }
+
+
+
+@router.get("/logs")
+
+async def get_mqtt_logs():
+
+ """Gibt die empfangenen MQTT-Nachrichten als Text zur�ck"""
+
+ try:
+
+ with open("logs/mqtt/mqtt_messages.log", "r", encoding="utf-8") as f:
+
+ return f.read()
+
+ except FileNotFoundError:
+
+ return "Noch keine MQTT-Nachrichten empfangen."
+
+ except Exception as e:
+
+ return f"Fehler beim Lesen der Logdatei: {e}"
diff --git a/app/routes/mqtt_runtime_routes.py b/app/routes/mqtt_runtime_routes.py
new file mode 100644
index 0000000..5696faf
--- /dev/null
+++ b/app/routes/mqtt_runtime_routes.py
@@ -0,0 +1,294 @@
+from __future__ import annotations
+
+from typing import Any, Dict, Optional
+from sqlmodel import select
+
+from app.database import get_session
+from app.models.printer import Printer
+
+from fastapi import APIRouter
+from fastapi.responses import JSONResponse
+from pydantic import AliasChoices, BaseModel, ConfigDict, Field
+# status setzen#
+from app.services import mqtt_runtime
+
+from datetime import datetime
+
+router = APIRouter()
+
+
+class MQTTErrorResponse(BaseModel):
+ connected: bool = False
+ error: str
+
+
+class MQTTConnectRequest(BaseModel):
+ model_config = ConfigDict(populate_by_name=True)
+
+ broker: Optional[str] = Field(
+ default=None,
+ validation_alias=AliasChoices("broker", "host", "ip"),
+ description="MQTT broker host/IP (Bambu printers typically use TLS on port 8883).",
+ )
+ port: int = Field(default=8883, ge=1, le=65535)
+ client_id: str = Field(
+ default="filamenthub_debug",
+ validation_alias=AliasChoices("client_id", "clientId"),
+ min_length=1,
+ )
+ username: Optional[str] = None
+ password: Optional[str] = None
+ protocol: str = Field(default="311", description="MQTT protocol version: 5 | 311 | 31")
+ tls: bool = Field(default=True, description="Must be true (PrinterMQTTClient enforces TLS).")
+ # Printer mode
+ use_printer_config: bool = Field(default=False, description="If true, use printer config from DB (printer_id required)")
+ printer_id: Optional[str] = Field(default=None, description="Printer UUID (when use_printer_config is true)")
+
+
+class MQTTConnectResponse(BaseModel):
+ connected: bool
+ client_id: str
+ broker: str
+ port: int
+
+
+class MQTTStatusResponse(BaseModel):
+ connected: bool
+ client_id: Optional[str] = None
+ broker: Optional[str] = None
+ port: Optional[int] = None
+ protocol: Optional[str] = None
+ connected_since: Optional[str] = None
+ cloud_serial: Optional[str] = None
+ last_seen: Optional[str] = None
+ subscriptions_count: Optional[int] = None
+ topics_count: Optional[int] = None
+ message_count: Optional[int] = None
+ last_message_time: Optional[str] = None
+ qos: Optional[int] = None
+ uptime: Optional[str] = None
+
+
+class MQTTTopicsResponse(BaseModel):
+ connected: bool
+ items: list[str]
+ count: int
+
+
+class MQTTMessageItem(BaseModel):
+ topic: str
+ payload: str
+ timestamp: str
+
+
+class MQTTMessagesResponse(BaseModel):
+ connected: bool
+ messages: list[MQTTMessageItem]
+ count: int
+
+
+@router.post(
+ "/connect",
+ response_model=MQTTConnectResponse,
+ responses={400: {"model": MQTTErrorResponse}, 500: {"model": MQTTErrorResponse}},
+)
+def connect(req: MQTTConnectRequest):
+ """Connect via mqtt_runtime.
+
+ On success: HTTP 200 and a deterministic connected=true response.
+ On failure: HTTP 4xx/5xx with a clear JSON error.
+ """
+ payload = req.model_dump() if hasattr(req, "model_dump") else req.dict()
+
+ # Initialize variables for static analysis / safe returns
+ printer = None
+ broker = None
+ port = None
+ protocol = None
+ client_id = None
+ runtime_payload = None
+
+ # Printer mode: use DB as Source of Truth
+ if payload.get("use_printer_config"):
+ pid = payload.get("printer_id")
+ if not pid:
+ return JSONResponse(status_code=400, content={"connected": False, "error": "printer_id required when use_printer_config is true"})
+
+ # Load printer from DB
+ try:
+ with next(get_session()) as session:
+ printer = session.exec(select(Printer).where(Printer.id == pid)).first()
+ except Exception as exc:
+ return JSONResponse(status_code=500, content={"connected": False, "error": f"db error: {str(exc)}"})
+
+ if not printer:
+ return JSONResponse(status_code=400, content={"connected": False, "error": "Printer not found"})
+ if not getattr(printer, "active", True):
+ return JSONResponse(status_code=400, content={"connected": False, "error": "Printer not active"})
+ if getattr(printer, "printer_type", "") != "bambu":
+ return JSONResponse(status_code=400, content={"connected": False, "error": "Unsupported printer type"})
+ if not getattr(printer, "ip_address", None):
+ return JSONResponse(status_code=400, content={"connected": False, "error": "Printer has no ip_address"})
+ if not getattr(printer, "api_key", None):
+ return JSONResponse(status_code=400, content={"connected": False, "error": "Printer has no api_key"})
+
+ # Build runtime payload exclusively from DB
+ broker = printer.ip_address
+ port = int(getattr(printer, "port", 6000) or 6000)
+ protocol = str(getattr(printer, "mqtt_version", "5") or "5")
+ username = "bblp"
+ password = printer.api_key
+ tls = True
+ client_id = f"filamenthub_{printer.name}_{str(printer.id)[:6]}"
+
+ runtime_payload = {
+ "host": broker,
+ "port": port,
+ "client_id": client_id,
+ "username": username,
+ "password": password,
+ "protocol": protocol,
+ "tls": tls,
+ "cloud_serial": getattr(printer, "cloud_serial", None),
+ "printer_id": printer.id,
+ "printer_name": printer.name,
+ "printer_model": printer.model,
+ }
+
+ # cloud_serial must be present for Bambu printers
+ if not runtime_payload.get("cloud_serial"):
+ return JSONResponse(status_code=400, content={"connected": False, "error": "printer has no cloud_serial"})
+
+ result = mqtt_runtime.connect(runtime_payload)
+
+ # Handle runtime result immediately while `printer` is in scope
+ if not isinstance(result, dict):
+ return JSONResponse(status_code=500, content={"connected": False, "error": "invalid runtime response"})
+ if not result.get("success"):
+ error = str(result.get("error") or "connect failed")
+ status_code = 400 if ("missing" in error or "must be" in error) else 500
+ return JSONResponse(status_code=status_code, content={"connected": False, "error": error})
+
+ # Successful printer-mode response (connection established at transport
+ # level). IMPORTANT: application-level `connected` remains False
+ # until a device//report arrives and updates runtime state.
+ return {
+ "connected": False,
+ "mode": "printer",
+ "client_id": result.get("client_id"),
+ "printer_id": printer.id,
+ "printer_name": printer.name,
+ "broker": broker,
+ "port": port,
+ "protocol": protocol,
+ }
+ else:
+ # Manual mode: normalize external API names to the runtime service keys.
+ runtime_payload = {
+ "host": payload.get("broker"),
+ "port": payload.get("port"),
+ "client_id": payload.get("client_id"),
+ "username": payload.get("username"),
+ "password": payload.get("password"),
+ "protocol": payload.get("protocol"),
+ "tls": payload.get("tls"),
+ }
+
+ result = mqtt_runtime.connect(runtime_payload)
+
+ if not isinstance(result, dict):
+ return JSONResponse(status_code=500, content={"connected": False, "error": "invalid runtime response"})
+ if not result.get("success"):
+ error = str(result.get("error") or "connect failed")
+ status_code = 400 if ("missing" in error or "must be" in error) else 500
+ return JSONResponse(status_code=status_code, content={"connected": False, "error": error})
+
+ # fallback: manual mode response. Do NOT claim application-level
+ # connected here — UI must use /status which reflects reports.
+ return {
+ "connected": False,
+ "client_id": str(runtime_payload.get("client_id") or ""),
+ "broker": str(runtime_payload.get("host") or ""),
+ "port": int(runtime_payload.get("port") or 8883),
+ }
+
+
+@router.post(
+ "/disconnect",
+ responses={500: {"model": MQTTErrorResponse}},
+)
+def disconnect() -> Any:
+ """Disconnect via mqtt_runtime."""
+ try:
+ result = mqtt_runtime.disconnect()
+ if isinstance(result, dict):
+ return result
+ return {"success": True, "connected": False}
+ except Exception as exc:
+ return JSONResponse(status_code=500, content={"connected": False, "error": str(exc)})
+
+
+@router.get(
+ "/status",
+ response_model=MQTTStatusResponse,
+ response_model_exclude_none=True,
+ responses={500: {"model": MQTTErrorResponse}},
+)
+def status():
+ """Get status via mqtt_runtime."""
+ try:
+ # Return the runtime state 1:1 — mqtt_runtime.status() returns a dict
+ # with the keys required by the UI. Do not apply heuristics here.
+ result = mqtt_runtime.status()
+ if not isinstance(result, dict):
+ return {"connected": False}
+ return result
+
+ except Exception as exc:
+ return JSONResponse(
+ status_code=500,
+ content={"connected": False, "error": str(exc)}
+ )
+
+
+@router.get(
+ "/topics",
+ response_model_exclude_none=True,
+ responses={500: {"model": MQTTErrorResponse}},
+)
+def topics():
+ """Get subscribed MQTT topics (not message stats)."""
+ try:
+ result = mqtt_runtime.topics()
+ if not isinstance(result, dict):
+ return {"connected": False, "items": [], "count": 0}
+
+ connected = bool(result.get("connected"))
+ items = result.get("items") or []
+ count = int(result.get("count") or 0)
+
+ return {"connected": connected, "items": items, "count": count}
+ except Exception as exc:
+ return JSONResponse(status_code=500, content={"connected": False, "error": str(exc)})
+
+
+@router.get(
+ "/messages",
+ response_model_exclude_none=True,
+ responses={500: {"model": MQTTErrorResponse}},
+)
+def messages(limit: int = 50):
+ """Get last N live messages (newest first)."""
+ try:
+ result = mqtt_runtime.status()
+ connected = bool(result.get("connected")) if isinstance(result, dict) else False
+
+ msgs = mqtt_runtime.get_messages(limit=min(limit, 100))
+
+ return {
+ "connected": connected,
+ "messages": msgs,
+ "count": len(msgs),
+ }
+ except Exception as exc:
+ return JSONResponse(status_code=500, content={"connected": False, "error": str(exc)})
diff --git a/app/routes/notification_routes.py b/app/routes/notification_routes.py
new file mode 100644
index 0000000..a84e32f
--- /dev/null
+++ b/app/routes/notification_routes.py
@@ -0,0 +1,272 @@
+import json
+import asyncio
+from typing import Any, Dict, List, Set
+
+from fastapi import APIRouter, Depends, HTTPException, WebSocket, WebSocketDisconnect, Request
+from sqlmodel import Session, select
+
+from app.database import get_session, engine
+from app.models.settings import Setting
+
+router = APIRouter()
+
+DEFAULT_NOTIFICATIONS: List[Dict[str, Any]] = [
+ {
+ "id": "print_done",
+ "label": "Druck abgeschlossen",
+ "message": "Der Druck wurde erfolgreich abgeschlossen.",
+ "type": "success",
+ "persistent": True,
+ "enabled": True,
+ },
+ {
+ "id": "filament_empty",
+ "label": "Filament leer",
+ "message": "Filamentvorrat ist leer.",
+ "type": "error",
+ "persistent": True,
+ "enabled": True,
+ },
+ {
+ "id": "ams_error",
+ "label": "AMS Fehler",
+ "message": "Es liegt ein Fehler im AMS vor.",
+ "type": "warn",
+ "persistent": True,
+ "enabled": True,
+ },
+ {
+ "id": "job_no_tracking",
+ "label": "Job ohne Filament-Tracking",
+ "message": "Job '{job_name}' auf Drucker '{printer_name}' wurde ohne Filament-Tracking beendet. Bitte Spule zuordnen und Verbrauch nachtragen.",
+ "type": "warn",
+ "persistent": True,
+ "enabled": True,
+ },
+ {
+ "id": "job_failed",
+ "label": "Job fehlgeschlagen",
+ "message": "Job '{job_name}' auf Drucker '{printer_name}' ist fehlgeschlagen (Status: {status}).",
+ "type": "error",
+ "persistent": True,
+ "enabled": True,
+ },
+ {
+ "id": "job_aborted",
+ "label": "Job abgebrochen",
+ "message": "Job '{job_name}' auf Drucker '{printer_name}' wurde abgebrochen (Status: {status}).",
+ "type": "warn",
+ "persistent": True,
+ "enabled": True,
+ },
+ {
+ "id": "ams_tray_error",
+ "label": "AMS Tray Fehler",
+ "message": "Problem mit AMS-Spulenfach auf Drucker '{printer_name}' erkannt.",
+ "type": "error",
+ "persistent": True,
+ "enabled": False,
+ },
+ {
+ "id": "ams_humidity_high",
+ "label": "AMS Luftfeuchtigkeit hoch",
+ "message": "AMS auf Drucker '{printer_name}' hat zu hohe Luftfeuchtigkeit ({humidity}% > 60%).",
+ "type": "warn",
+ "persistent": True,
+ "enabled": False,
+ },
+ {
+ "id": "job_no_spool",
+ "label": "Job ohne Spule gestartet",
+ "message": "Job '{job_name}' auf Drucker '{printer_name}' wurde ohne Spulenzuordnung gestartet.",
+ "type": "warn",
+ "persistent": True,
+ "enabled": False,
+ },
+]
+
+notification_ws_clients: Set[WebSocket] = set()
+
+
+def _persist_config(session: Session, notifications: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
+ serialized = json.dumps(notifications, ensure_ascii=False)
+ setting = session.exec(select(Setting).where(Setting.key == "notifications_config")).first()
+ if setting:
+ setting.value = serialized
+ else:
+ setting = Setting(key="notifications_config", value=serialized)
+ session.add(setting)
+ session.commit()
+ return json.loads(serialized)
+
+
+def ensure_notification_config(session: Session) -> List[Dict[str, Any]]:
+ setting = session.exec(select(Setting).where(Setting.key == "notifications_config")).first()
+
+ # Bestehende Config laden
+ existing_notifications = []
+ if setting and setting.value:
+ try:
+ data = json.loads(setting.value)
+ if isinstance(data, list):
+ existing_notifications = data
+ except Exception:
+ pass
+
+ # Merge: Neue Defaults hinzufügen die noch nicht existieren
+ existing_ids = {n.get("id") for n in existing_notifications}
+ merged = list(existing_notifications) # Kopie der bestehenden
+
+ for default_notif in DEFAULT_NOTIFICATIONS:
+ if default_notif.get("id") not in existing_ids:
+ # Neue Notification aus Defaults hinzufügen
+ merged.append(default_notif)
+
+ # Wenn etwas hinzugefügt wurde, speichern
+ if len(merged) > len(existing_notifications):
+ return _persist_config(session, merged)
+
+ return existing_notifications if existing_notifications else _persist_config(session, DEFAULT_NOTIFICATIONS)
+
+
+def _validate_notifications(raw: Any) -> List[Dict[str, Any]]:
+ if isinstance(raw, dict) and "notifications" in raw:
+ raw = raw.get("notifications")
+ if not isinstance(raw, list):
+ raise HTTPException(status_code=400, detail="Ungültiges Format, erwartete Liste von Notifications.")
+
+ validated: List[Dict[str, Any]] = []
+ for item in raw:
+ if not isinstance(item, dict):
+ raise HTTPException(status_code=400, detail="Jede Notification muss ein Objekt sein.")
+ notif_id = str(item.get("id", "")).strip()
+ message = str(item.get("message", "")).strip()
+ if not notif_id or not message:
+ raise HTTPException(status_code=400, detail="Notification benötigt mindestens id und message.")
+ validated.append(
+ {
+ "id": notif_id,
+ "label": str(item.get("label", notif_id)).strip() or notif_id,
+ "message": message,
+ "type": str(item.get("type", "info")).strip() or "info",
+ "persistent": bool(item.get("persistent", False)),
+ "enabled": bool(item.get("enabled", True)),
+ }
+ )
+ return validated
+
+
+async def broadcast_notification(notification: Dict[str, Any]) -> None:
+ dead: Set[WebSocket] = set()
+ payload = {"event": "notification_trigger", "payload": notification}
+ for ws in list(notification_ws_clients):
+ try:
+ await ws.send_json(payload)
+ except Exception:
+ dead.add(ws)
+ for ws in dead:
+ try:
+ await ws.close()
+ except Exception:
+ pass
+ notification_ws_clients.discard(ws)
+
+
+@router.get("/api/notifications-config")
+def get_notifications_config(session: Session = Depends(get_session)):
+ return {"notifications": ensure_notification_config(session)}
+
+
+@router.post("/api/notifications-config")
+async def save_notifications_config(request: Request, session: Session = Depends(get_session)):
+ payload = await request.json()
+ validated = _validate_notifications(payload)
+ persisted = _persist_config(session, validated)
+ return {"notifications": persisted}
+
+
+@router.post("/api/notifications-trigger")
+async def trigger_notification(payload: Dict[str, Any], session: Session = Depends(get_session)):
+ notif_id = str(payload.get("id", "")).strip()
+ if not notif_id:
+ raise HTTPException(status_code=400, detail="Notification id fehlt.")
+ notifications = ensure_notification_config(session)
+ notification = next((n for n in notifications if n.get("id") == notif_id), None)
+ if not notification:
+ raise HTTPException(status_code=404, detail="Notification nicht gefunden.")
+ if not notification.get("enabled", True):
+ raise HTTPException(status_code=400, detail="Notification ist deaktiviert.")
+
+ await broadcast_notification(notification)
+ return {"success": True, "notification": notification}
+
+
+@router.websocket("/api/notifications/ws")
+async def notifications_websocket(websocket: WebSocket):
+ await websocket.accept()
+ notification_ws_clients.add(websocket)
+ try:
+ while True:
+ await websocket.receive_text()
+ except WebSocketDisconnect:
+ pass
+ finally:
+ notification_ws_clients.discard(websocket)
+
+
+# ===== Helper für synchrones Triggern =====
+def trigger_notification_sync(notification_id: str, **context) -> None:
+ """
+ Synchrone Funktion zum Triggern einer Notification.
+ Kann von Services (job_tracking, etc.) aufgerufen werden.
+
+ Args:
+ notification_id: Die ID der Notification (z.B. "job_failed")
+ **context: Zusätzliche Kontext-Daten (job_name, printer_name, etc.)
+ """
+ try:
+ with Session(engine) as session:
+ notifications = ensure_notification_config(session)
+ notification = next((n for n in notifications if n.get("id") == notification_id), None)
+
+ if not notification:
+ # Notification nicht definiert - ignorieren
+ return
+
+ if not notification.get("enabled", True):
+ # Notification ist deaktiviert - ignorieren
+ return
+
+ # Kontext-Daten zur Message hinzufügen falls vorhanden
+ message = notification.get("message", "")
+ if context:
+ # Ersetze Platzhalter in der Message (z.B. {job_name}, {printer_name})
+ try:
+ message = message.format(**context)
+ except (KeyError, ValueError):
+ # Falls Platzhalter fehlen, originale Message verwenden
+ pass
+
+ # Erstelle Notification-Payload
+ notif_payload = {
+ **notification,
+ "message": message,
+ "context": context
+ }
+
+ # Broadcast async (fire and forget)
+ try:
+ loop = asyncio.get_event_loop()
+ if loop.is_running():
+ # Wenn Loop läuft, schedule als Task
+ asyncio.create_task(broadcast_notification(notif_payload))
+ else:
+ # Wenn kein Loop läuft, create new one
+ asyncio.run(broadcast_notification(notif_payload))
+ except RuntimeError:
+ # Kein Event Loop - ignorieren (z.B. in Tests)
+ pass
+ except Exception as e:
+ # Fehler beim Triggern sollte nicht den Hauptprozess stoppen
+ import logging
+ logging.getLogger(__name__).error(f"Fehler beim Triggern von Notification '{notification_id}': {e}")
diff --git a/app/routes/performance_routes.py b/app/routes/performance_routes.py
new file mode 100644
index 0000000..f7ca8ef
--- /dev/null
+++ b/app/routes/performance_routes.py
@@ -0,0 +1,205 @@
+"""
+Performance Monitoring Routes
+Historische System-Performance Daten
+"""
+from fastapi import APIRouter
+from typing import List, Dict
+from datetime import datetime
+import psutil
+from collections import deque
+from fastapi import HTTPException
+
+router = APIRouter(prefix="/api/performance", tags=["Performance"])
+
+# === IN-MEMORY STORAGE ===
+# Speichert die letzten 720 Datenpunkte (1 Stunde bei 5s Intervall)
+MAX_HISTORY = 720
+performance_history: deque = deque(maxlen=MAX_HISTORY)
+recording_start = datetime.now()
+
+# === DATA COLLECTION ===
+def collect_performance_data():
+ """Sammelt aktuelle Performance-Daten"""
+ cpu_percent = psutil.cpu_percent(interval=0.1)
+ memory = psutil.virtual_memory()
+ disk = psutil.disk_usage('/')
+
+ data_point = {
+ "timestamp": datetime.now().isoformat(),
+ "cpu_percent": round(cpu_percent, 1),
+ "ram_percent": round(memory.percent, 1),
+ "ram_used_mb": round(memory.used / 1024 / 1024, 1),
+ "disk_percent": round(disk.percent, 1),
+ "disk_used_gb": round(disk.used / 1024 / 1024 / 1024, 1)
+ }
+
+ performance_history.append(data_point)
+
+ # Check for alerts
+ alerts = []
+ if cpu_percent > 90:
+ alerts.append({
+ "level": "critical",
+ "message": f"CPU Usage kritisch: {cpu_percent}%",
+ "timestamp": data_point["timestamp"]
+ })
+ elif cpu_percent > 75:
+ alerts.append({
+ "level": "warning",
+ "message": f"CPU Usage hoch: {cpu_percent}%",
+ "timestamp": data_point["timestamp"]
+ })
+
+ if memory.percent > 90:
+ alerts.append({
+ "level": "critical",
+ "message": f"RAM Usage kritisch: {memory.percent}%",
+ "timestamp": data_point["timestamp"]
+ })
+ elif memory.percent > 75:
+ alerts.append({
+ "level": "warning",
+ "message": f"RAM Usage hoch: {memory.percent}%",
+ "timestamp": data_point["timestamp"]
+ })
+
+ return data_point, alerts
+
+# === ENDPOINTS ===
+@router.get("/current")
+def get_current_performance():
+ """Gibt aktuelle Performance-Daten zurück"""
+ data_point, alerts = collect_performance_data()
+ return {
+ "current": data_point,
+ "alerts": alerts
+ }
+
+@router.get("/history")
+def get_performance_history(limit: int = 60):
+ """Gibt Performance-Historie zurück (Standard: letzte 60 Punkte = 5 Minuten)"""
+ history_list = list(performance_history)
+
+ # Limit anwenden
+ if limit > 0:
+ history_list = history_list[-limit:]
+
+ # Statistiken berechnen
+ if len(history_list) > 0:
+ cpu_values = [p["cpu_percent"] for p in history_list]
+ ram_values = [p["ram_percent"] for p in history_list]
+
+ stats = {
+ "avg_cpu": round(sum(cpu_values) / len(cpu_values), 1),
+ "max_cpu": round(max(cpu_values), 1),
+ "min_cpu": round(min(cpu_values), 1),
+ "avg_ram": round(sum(ram_values) / len(ram_values), 1),
+ "max_ram": round(max(ram_values), 1),
+ "min_ram": round(min(ram_values), 1),
+ "data_points": len(history_list)
+ }
+ else:
+ stats = {
+ "avg_cpu": 0,
+ "max_cpu": 0,
+ "min_cpu": 0,
+ "avg_ram": 0,
+ "max_ram": 0,
+ "min_ram": 0,
+ "data_points": 0
+ }
+
+ return {
+ "history": history_list,
+ "stats": stats,
+ "recording_since": recording_start.isoformat(),
+ "total_data_points": len(performance_history)
+ }
+
+@router.post("/clear")
+def clear_performance_history():
+ """Löscht die Performance-Historie"""
+ global recording_start
+ performance_history.clear()
+ recording_start = datetime.now()
+
+ return {
+ "success": True,
+ "message": "Performance-Historie gelöscht",
+ "recording_since": recording_start.isoformat()
+ }
+
+@router.get("/export")
+def export_performance_data():
+ """Exportiert alle Performance-Daten als JSON"""
+ return {
+ "recording_since": recording_start.isoformat(),
+ "total_data_points": len(performance_history),
+ "data": list(performance_history)
+ }
+
+
+@router.get("/panel")
+def performance_panel(limit: int = 12):
+ """
+ Liefert einen defensiven Datensatz für das Performance-Panel.
+ Rückgabe ist stabil und abwärtskompatibel; Felder sind optional nutzbar.
+ """
+ try:
+ current, alerts = collect_performance_data()
+ except Exception as exc: # pragma: no cover - defensive fallback
+ # Fallback bei psutil-/IO-Fehlern
+ current = {
+ "timestamp": datetime.now().isoformat(),
+ "cpu_percent": None,
+ "ram_percent": None,
+ "ram_used_mb": None,
+ "disk_percent": None,
+ "disk_used_gb": None,
+ }
+ alerts = [{"level": "error", "message": f"Performance read failed: {exc}", "timestamp": current["timestamp"]}]
+
+ # Historie defensiv aufbereiten
+ history_list = list(performance_history)
+ if limit > 0:
+ history_list = history_list[-limit:]
+
+ if history_list:
+ cpu_values = [p.get("cpu_percent") or 0 for p in history_list]
+ ram_values = [p.get("ram_percent") or 0 for p in history_list]
+ stats = {
+ "avg_cpu": round(sum(cpu_values) / len(cpu_values), 1),
+ "max_cpu": round(max(cpu_values), 1),
+ "min_cpu": round(min(cpu_values), 1),
+ "avg_ram": round(sum(ram_values) / len(ram_values), 1),
+ "max_ram": round(max(ram_values), 1),
+ "min_ram": round(min(ram_values), 1),
+ "data_points": len(history_list),
+ }
+ else:
+ stats = {
+ "avg_cpu": None,
+ "max_cpu": None,
+ "min_cpu": None,
+ "avg_ram": None,
+ "max_ram": None,
+ "min_ram": None,
+ "data_points": 0,
+ }
+
+ return {
+ "schema_version": 1,
+ "timestamp": datetime.now().isoformat(),
+ "current": current,
+ "alerts": alerts,
+ "history": {
+ "items": history_list,
+ "stats": stats,
+ "total": len(performance_history),
+ "recording_since": recording_start.isoformat(),
+ },
+ "meta": {
+ "interval_hint_seconds": 5,
+ "limit": limit,
+ },
+ }
diff --git a/app/routes/printers.py b/app/routes/printers.py
new file mode 100644
index 0000000..1816650
--- /dev/null
+++ b/app/routes/printers.py
@@ -0,0 +1,325 @@
+from fastapi import APIRouter, Depends, HTTPException, UploadFile, File
+from sqlmodel import Session, select
+from typing import List, Dict, Any
+import logging
+import socket
+import httpx
+import os
+from app.database import get_session
+from app.models.printer import Printer, PrinterCreate, PrinterRead
+from app.services import mqtt_runtime
+
+# Hinweis: kleine Kommentar-Änderung, um Dateisystem-Änderung und Reload zu triggern
+
+router = APIRouter(prefix="/api/printers", tags=["printers"])
+logger = logging.getLogger("app.routes.printers")
+
+UPLOAD_DIR = os.path.join("app", "static", "uploads", "printers")
+os.makedirs(UPLOAD_DIR, exist_ok=True)
+
+
+def get_image_url(printer_id: str) -> str | None:
+ for ext in (".jpg", ".jpeg", ".png", ".webp"):
+ candidate = os.path.join(UPLOAD_DIR, f"{printer_id}{ext}")
+ if os.path.exists(candidate):
+ return f"/static/uploads/printers/{printer_id}{ext}"
+ return None
+
+
+@router.get("/", response_model=List[PrinterRead])
+def get_all_printers(live: bool = False, session: Session = Depends(get_session)):
+ """
+ Alle Drucker abrufen.
+ - live=false (default): schnelle Checks mit 0.3s Timeout
+ - live=true: Live-Check mit kurzem Timeout
+ """
+ printers = session.exec(select(Printer)).all()
+ result = []
+ for printer in printers:
+ online: bool = False
+
+ # Kurze Timeout-Checks immer aktivieren (default 0.3s für schnelle Response)
+ timeout_val = 0.8 if live else 0.3
+
+ try:
+ if printer.printer_type in ["bambu", "bambu_lab"]:
+ targets = [printer.port] if printer.port else []
+ targets.extend([6000, 8883])
+ for port in targets:
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.settimeout(timeout_val)
+ res = sock.connect_ex((printer.ip_address, port))
+ sock.close()
+ if res == 0:
+ online = True
+ break
+ elif printer.printer_type == "klipper":
+ port = printer.port or 7125
+ url = f"http://{printer.ip_address}:{port}/server/info"
+ r = httpx.get(url, timeout=timeout_val)
+ online = r.status_code == 200
+ else:
+ online = False
+ except Exception:
+ online = False
+
+ p_dict = printer.dict()
+ p_dict["online"] = online
+ p_dict["image_url"] = get_image_url(printer.id)
+ result.append(p_dict)
+ return result
+
+
+@router.get("/{printer_id}", response_model=PrinterRead)
+def get_printer(printer_id: str, session: Session = Depends(get_session)):
+ """Einzelnen Drucker abrufen"""
+ printer = session.get(Printer, printer_id)
+ if not printer:
+ raise HTTPException(status_code=404, detail="Drucker nicht gefunden")
+ p_dict = printer.dict()
+ p_dict["image_url"] = get_image_url(printer.id)
+ return p_dict
+
+
+@router.get("/{printer_id}/credentials", response_model=Dict[str, Any], summary="Get Printer Credentials")
+def get_printer_credentials(printer_id: str, session: Session = Depends(get_session)):
+ """
+ Lade Drucker Credentials (MQTT-relevant) aus der Datenbank.
+ Liefert nur die Felder, die für MQTT-Connections benötigt werden.
+ """
+ printer = session.get(Printer, printer_id)
+ if not printer:
+ raise HTTPException(status_code=404, detail=f"Drucker mit ID {printer_id} nicht gefunden")
+
+ return {
+ "success": True,
+ "printer_id": printer.id,
+ "name": printer.name,
+ "api_key": printer.api_key,
+ "cloud_serial": printer.cloud_serial,
+ "ip_address": printer.ip_address,
+ "port": printer.port,
+ "printer_type": printer.printer_type,
+ "mqtt_version": printer.mqtt_version,
+ "model": printer.model
+ }
+
+
+@router.post("/")
+def create_printer(printer: PrinterCreate, session: Session = Depends(get_session)):
+ """Neuen Drucker anlegen"""
+ # Duplicate-Check per IP + Typ (Lite)
+ if printer.ip_address and printer.printer_type:
+ exists = session.exec(
+ select(Printer).where(
+ Printer.ip_address == printer.ip_address,
+ Printer.printer_type == printer.printer_type
+ )
+ ).first()
+ if exists:
+ existing = exists.dict()
+ existing["status"] = "exists"
+ existing["image_url"] = get_image_url(exists.id)
+ return existing
+ # Für Bambu muss eine Seriennummer und Access Code vorhanden sein
+ if printer.printer_type in ["bambu", "bambu_lab"]:
+ if not printer.cloud_serial or not printer.api_key:
+ raise HTTPException(status_code=400, detail="Seriennummer und Access Code sind erforderlich")
+
+ # Setze Standard-MQTT-Port für Bambu auf 8883, falls nicht angegeben
+ if printer.printer_type in ["bambu", "bambu_lab"] and not printer.port:
+ printer.port = 8883
+
+ db_printer = Printer.model_validate(printer)
+ session.add(db_printer)
+ session.commit()
+ session.refresh(db_printer)
+ p_dict = db_printer.dict()
+ p_dict["image_url"] = get_image_url(db_printer.id)
+ p_dict["status"] = "created"
+ return p_dict
+
+
+@router.put("/{printer_id}", response_model=PrinterRead)
+def update_printer(printer_id: str, printer: PrinterCreate, session: Session = Depends(get_session)):
+ """Drucker aktualisieren"""
+ db_printer = session.get(Printer, printer_id)
+ if not db_printer:
+ raise HTTPException(status_code=404, detail="Drucker nicht gefunden")
+ old_auto_connect = bool(getattr(db_printer, "auto_connect", False))
+ # Duplicate-Check bei IP/Typ-Änderung
+ if printer.ip_address and printer.printer_type:
+ exists = session.exec(
+ select(Printer).where(
+ Printer.ip_address == printer.ip_address,
+ Printer.printer_type == printer.printer_type,
+ Printer.id != printer_id
+ )
+ ).first()
+ if exists:
+ raise HTTPException(status_code=409, detail="Drucker mit dieser IP/Typ existiert bereits")
+ if printer.printer_type in ["bambu", "bambu_lab"]:
+ if not printer.cloud_serial or not printer.api_key:
+ raise HTTPException(status_code=400, detail="Seriennummer und Access Code sind erforderlich")
+ # If Bambu and no port provided, default to 8883
+ # (apply after merging data below to cover updates that switch type)
+ printer_data = printer.model_dump(exclude_unset=True)
+ for key, value in printer_data.items():
+ setattr(db_printer, key, value)
+ if db_printer.printer_type in ["bambu", "bambu_lab"] and not db_printer.port:
+ db_printer.port = 8883
+
+ session.add(db_printer)
+ session.commit()
+ session.refresh(db_printer)
+ new_auto_connect = bool(getattr(db_printer, "auto_connect", False))
+ if old_auto_connect != new_auto_connect:
+ logger.info(
+ "Auto-connect flag changed (%s→%s) for printer %s",
+ old_auto_connect,
+ new_auto_connect,
+ printer_id,
+ )
+ try:
+ mqtt_runtime.apply_auto_connect(db_printer)
+ except Exception as exc:
+ logger.exception("Failed to apply auto-connect change for printer %s: %s", printer_id, exc)
+ p_dict = db_printer.dict()
+ p_dict["image_url"] = get_image_url(db_printer.id)
+ return p_dict
+
+
+@router.post("/{printer_id}/image")
+async def upload_printer_image(
+ printer_id: str,
+ file: UploadFile = File(...),
+ session: Session = Depends(get_session)
+):
+ """Bild für einen Drucker hochladen und Pfad setzen."""
+ printer = session.get(Printer, printer_id)
+ if not printer:
+ raise HTTPException(status_code=404, detail="Drucker nicht gefunden")
+
+ # Dateityp prüfen
+ content_type = (file.content_type or "").lower()
+ if content_type not in ["image/jpeg", "image/png", "image/webp"]:
+ raise HTTPException(status_code=400, detail="Nur JPG, PNG oder WEBP erlaubt")
+
+ # Größe prüfen (max 1 MB)
+ data = await file.read()
+ if len(data) > 1_000_000:
+ raise HTTPException(status_code=400, detail="Bild zu groß (max 1 MB)")
+
+ # Endung bestimmen
+ ext = ".jpg"
+ if content_type == "image/png":
+ ext = ".png"
+ elif content_type == "image/webp":
+ ext = ".webp"
+
+ # existierende Dateien entfernen
+ for e in (".jpg", ".jpeg", ".png", ".webp"):
+ candidate = os.path.join(UPLOAD_DIR, f"{printer_id}{e}")
+ if os.path.exists(candidate):
+ try:
+ os.remove(candidate)
+ except Exception:
+ pass
+
+ file_path = os.path.join(UPLOAD_DIR, f"{printer_id}{ext}")
+ with open(file_path, "wb") as f:
+ f.write(data)
+
+ image_url = f"/static/uploads/printers/{printer_id}{ext}"
+ return {"success": True, "image_url": image_url}
+
+
+@router.delete("/{printer_id}")
+def delete_printer(printer_id: str, session: Session = Depends(get_session)):
+ """Drucker löschen"""
+ printer = session.get(Printer, printer_id)
+ if not printer:
+ raise HTTPException(status_code=404, detail="Drucker nicht gefunden")
+
+ session.delete(printer)
+ session.commit()
+ return {"success": True, "message": "Drucker gelöscht"}
+
+
+@router.post("/{printer_id}/test")
+async def test_printer_connection(printer_id: str, session: Session = Depends(get_session)):
+ """Verbindung zum Drucker testen"""
+ printer = session.get(Printer, printer_id)
+ if not printer:
+ raise HTTPException(status_code=404, detail="Drucker nicht gefunden")
+
+ if printer.printer_type == "manual":
+ return {
+ "status": "info",
+ "message": "Manuelle Drucker haben keine Netzwerk-Verbindung",
+ "online": None
+ }
+
+ try:
+ if printer.printer_type in ["bambu", "bambu_lab"]:
+ # Test MQTT port (6000)
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.settimeout(3)
+ result = sock.connect_ex((printer.ip_address, printer.port or 6000))
+ sock.close()
+
+ if result == 0:
+ return {
+ "status": "success",
+ "message": f"Bambu Lab Drucker erreichbar auf {printer.ip_address}:{printer.port or 6000}",
+ "online": True
+ }
+ else:
+ return {
+ "status": "error",
+ "message": f"Bambu Lab Drucker nicht erreichbar auf {printer.ip_address}:{printer.port or 6000}",
+ "online": False
+ }
+
+ elif printer.printer_type == "klipper":
+ # Test Moonraker API (7125)
+ port = printer.port or 7125
+ url = f"http://{printer.ip_address}:{port}/server/info"
+
+ async with httpx.AsyncClient(timeout=3.0) as client:
+ response = await client.get(url)
+
+ if response.status_code == 200:
+ data = response.json()
+ klippy_state = data.get("result", {}).get("klippy_state", "unknown")
+ return {
+ "status": "success",
+ "message": f"Klipper Drucker erreichbar - Status: {klippy_state}",
+ "online": True,
+ "klippy_state": klippy_state
+ }
+ else:
+ return {
+ "status": "warning",
+ "message": f"Klipper API antwortet mit Status {response.status_code}",
+ "online": False
+ }
+
+ return {
+ "status": "error",
+ "message": "Unbekannter Drucker-Typ",
+ "online": False
+ }
+
+ except socket.timeout:
+ return {
+ "status": "error",
+ "message": "Verbindungs-Timeout - Drucker nicht erreichbar",
+ "online": False
+ }
+ except Exception as e:
+ return {
+ "status": "error",
+ "message": f"Verbindungsfehler: {str(e)}",
+ "online": False
+ }
diff --git a/app/routes/printers.py.tmp b/app/routes/printers.py.tmp
new file mode 100644
index 0000000..dd4b014
--- /dev/null
+++ b/app/routes/printers.py.tmp
@@ -0,0 +1,264 @@
+from fastapi import APIRouter, Depends, HTTPException, UploadFile, File
+from sqlmodel import Session, select
+from typing import List
+import socket
+import httpx
+import os
+from app.database import get_session
+from app.models.printer import Printer, PrinterCreate, PrinterRead
+
+router = APIRouter(prefix="/api/printers", tags=["printers"])
+
+UPLOAD_DIR = os.path.join("app", "static", "uploads", "printers")
+os.makedirs(UPLOAD_DIR, exist_ok=True)
+
+
+def get_image_url(printer_id: str) -> str | None:
+ for ext in (".jpg", ".jpeg", ".png", ".webp"):
+ candidate = os.path.join(UPLOAD_DIR, f"{printer_id}{ext}")
+ if os.path.exists(candidate):
+ return f"/static/uploads/printers/{printer_id}{ext}"
+ return None
+
+
+@router.get("/", response_model=List[PrinterRead])
+def get_all_printers(session: Session = Depends(get_session)):
+ """Alle Drucker abrufen"""
+ printers = session.exec(select(Printer)).all()
+ result = []
+ for printer in printers:
+ online = False
+ # Verbindungstest je nach Typ
+ try:
+ if printer.printer_type in ["bambu", "bambu_lab"]:
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.settimeout(1)
+ res = sock.connect_ex((printer.ip_address, printer.port or 6000))
+ sock.close()
+ online = (res == 0)
+ elif printer.printer_type == "klipper":
+ port = printer.port or 7125
+ url = f"http://{printer.ip_address}:{port}/server/info"
+ try:
+ r = httpx.get(url, timeout=1)
+ online = r.status_code == 200
+ except Exception:
+ online = False
+ elif printer.printer_type == "manual":
+ online = None
+ except Exception:
+ online = False
+ # Dict mit Online-Status zurückgeben
+ p_dict = printer.dict()
+ p_dict["online"] = online
+ p_dict["image_url"] = get_image_url(printer.id)
+ result.append(p_dict)
+ return result
+
+
+@router.get("/{printer_id}", response_model=PrinterRead)
+def get_printer(printer_id: str, session: Session = Depends(get_session)):
+ """Einzelnen Drucker abrufen"""
+ printer = session.get(Printer, printer_id)
+ if not printer:
+ raise HTTPException(status_code=404, detail="Drucker nicht gefunden")
+ p_dict = printer.dict()
+ p_dict["image_url"] = get_image_url(printer.id)
+ return p_dict
+
+
+@router.post("/", response_model=PrinterRead)
+def create_printer(printer: PrinterCreate, session: Session = Depends(get_session)):
+ """Neuen Drucker anlegen"""
+ # Einfacher Duplicate-Check per IP/Port
+ if printer.ip_address:
+ exists = session.exec(
+ select(Printer).where(
+ Printer.ip_address == printer.ip_address,
+ Printer.port == (printer.port or 6000)
+ )
+ ).first()
+ if exists:
+ raise HTTPException(status_code=409, detail="Drucker mit dieser IP/Port existiert bereits")
+ # Für Bambu muss eine Seriennummer und Access Code vorhanden sein
+ if printer.printer_type in ["bambu", "bambu_lab"]:
+ if not printer.cloud_serial or not printer.api_key:
+ raise HTTPException(status_code=400, detail="Seriennummer und Access Code sind erforderlich")
+
+ db_printer = Printer.model_validate(printer)
+ session.add(db_printer)
+ session.commit()
+ session.refresh(db_printer)
+ p_dict = db_printer.dict()
+ p_dict["image_url"] = get_image_url(db_printer.id)
+ return p_dict
+
+
+@router.put("/{printer_id}", response_model=PrinterRead)
+def update_printer(printer_id: str, printer: PrinterCreate, session: Session = Depends(get_session)):
+ """Drucker aktualisieren"""
+ db_printer = session.get(Printer, printer_id)
+ if not db_printer:
+ raise HTTPException(status_code=404, detail="Drucker nicht gefunden")
+ # Duplicate-Check bei IP/Port-Änderung
+ if printer.ip_address:
+ exists = session.exec(
+ select(Printer).where(
+ Printer.ip_address == printer.ip_address,
+ Printer.port == (printer.port or 6000),
+ Printer.id != printer_id
+ )
+ ).first()
+ if exists:
+ raise HTTPException(status_code=409, detail="Drucker mit dieser IP/Port existiert bereits")
+ if printer.printer_type in ["bambu", "bambu_lab"]:
+ if not printer.cloud_serial or not printer.api_key:
+ raise HTTPException(status_code=400, detail="Seriennummer und Access Code sind erforderlich")
+
+ printer_data = printer.model_dump(exclude_unset=True)
+ for key, value in printer_data.items():
+ setattr(db_printer, key, value)
+
+ session.add(db_printer)
+ session.commit()
+ session.refresh(db_printer)
+ p_dict = db_printer.dict()
+ p_dict["image_url"] = get_image_url(db_printer.id)
+ return p_dict
+
+
+@router.post("/{printer_id}/image")
+async def upload_printer_image(
+ printer_id: str,
+ file: UploadFile = File(...),
+ session: Session = Depends(get_session)
+):
+ """Bild für einen Drucker hochladen und Pfad setzen."""
+ printer = session.get(Printer, printer_id)
+ if not printer:
+ raise HTTPException(status_code=404, detail="Drucker nicht gefunden")
+
+ # Dateityp prüfen
+ content_type = (file.content_type or "").lower()
+ if content_type not in ["image/jpeg", "image/png", "image/webp"]:
+ raise HTTPException(status_code=400, detail="Nur JPG, PNG oder WEBP erlaubt")
+
+ # Größe prüfen (max 1 MB)
+ data = await file.read()
+ if len(data) > 1_000_000:
+ raise HTTPException(status_code=400, detail="Bild zu groß (max 1 MB)")
+
+ # Endung bestimmen
+ ext = ".jpg"
+ if content_type == "image/png":
+ ext = ".png"
+ elif content_type == "image/webp":
+ ext = ".webp"
+
+ # existierende Dateien entfernen
+ for e in (".jpg", ".jpeg", ".png", ".webp"):
+ candidate = os.path.join(UPLOAD_DIR, f"{printer_id}{e}")
+ if os.path.exists(candidate):
+ try:
+ os.remove(candidate)
+ except Exception:
+ pass
+
+ file_path = os.path.join(UPLOAD_DIR, f"{printer_id}{ext}")
+ with open(file_path, "wb") as f:
+ f.write(data)
+
+ image_url = f"/static/uploads/printers/{printer_id}{ext}"
+ return {"success": True, "image_url": image_url}
+
+
+@router.delete("/{printer_id}")
+def delete_printer(printer_id: str, session: Session = Depends(get_session)):
+ """Drucker löschen"""
+ printer = session.get(Printer, printer_id)
+ if not printer:
+ raise HTTPException(status_code=404, detail="Drucker nicht gefunden")
+
+ session.delete(printer)
+ session.commit()
+ return {"success": True, "message": "Drucker gelöscht"}
+
+
+@router.post("/{printer_id}/test")
+async def test_printer_connection(printer_id: str, session: Session = Depends(get_session)):
+ """Verbindung zum Drucker testen"""
+ printer = session.get(Printer, printer_id)
+ if not printer:
+ raise HTTPException(status_code=404, detail="Drucker nicht gefunden")
+
+ if printer.printer_type == "manual":
+ return {
+ "status": "info",
+ "message": "Manuelle Drucker haben keine Netzwerk-Verbindung",
+ "online": None
+ }
+
+ try:
+ if printer.printer_type in ["bambu", "bambu_lab"]:
+ # Test MQTT port (6000)
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.settimeout(3)
+ result = sock.connect_ex((printer.ip_address, printer.port or 6000))
+ sock.close()
+
+ if result == 0:
+ return {
+ "status": "success",
+ "message": f"Bambu Lab Drucker erreichbar auf {printer.ip_address}:{printer.port or 6000}",
+ "online": True
+ }
+ else:
+ return {
+ "status": "error",
+ "message": f"Bambu Lab Drucker nicht erreichbar auf {printer.ip_address}:{printer.port or 6000}",
+ "online": False
+ }
+
+ elif printer.printer_type == "klipper":
+ # Test Moonraker API (7125)
+ port = printer.port or 7125
+ url = f"http://{printer.ip_address}:{port}/server/info"
+
+ async with httpx.AsyncClient(timeout=3.0) as client:
+ response = await client.get(url)
+
+ if response.status_code == 200:
+ data = response.json()
+ klippy_state = data.get("result", {}).get("klippy_state", "unknown")
+ return {
+ "status": "success",
+ "message": f"Klipper Drucker erreichbar - Status: {klippy_state}",
+ "online": True,
+ "klippy_state": klippy_state
+ }
+ else:
+ return {
+ "status": "warning",
+ "message": f"Klipper API antwortet mit Status {response.status_code}",
+ "online": False
+ }
+
+ return {
+ "status": "error",
+ "message": "Unbekannter Drucker-Typ",
+ "online": False
+ }
+
+ except socket.timeout:
+ return {
+ "status": "error",
+ "message": "Verbindungs-Timeout - Drucker nicht erreichbar",
+ "online": False
+ }
+ except Exception as e:
+ return {
+ "status": "error",
+ "message": f"Verbindungsfehler: {str(e)}",
+ "online": False
+ }
+
diff --git a/app/routes/scanner_routes.py b/app/routes/scanner_routes.py
new file mode 100644
index 0000000..7833190
--- /dev/null
+++ b/app/routes/scanner_routes.py
@@ -0,0 +1,867 @@
+"""
+Printer Scanner & Discovery Routes
+Network Scanner für Bambu Lab und Klipper/Moonraker
+"""
+import socket
+import asyncio
+import time
+import logging
+from typing import List, Dict, Optional
+from fastapi import APIRouter, HTTPException, Depends
+from pydantic import BaseModel
+import ipaddress
+import json
+from sqlmodel import Session, select
+from app.database import get_session
+from app.models.settings import Setting
+
+router = APIRouter(prefix="/api/scanner", tags=["Printer Scanner"])
+debug_printer_router = APIRouter(prefix="/api/debug/printer", tags=["Debug Printer"])
+log = logging.getLogger(__name__)
+DEFAULT_FINGERPRINT_PORTS = [8883, 6000, 7125]
+DEFAULT_FINGERPRINT_TIMEOUT_MS = 1500
+
+
+# -----------------------------
+# MODELS
+# -----------------------------
+class ScanRequest(BaseModel):
+ ip_range: str = "192.168.1.0/24"
+ ports: Optional[List[int]] = None
+ timeout: float = 0.5
+
+
+class PrinterInfo(BaseModel):
+ ip: str
+ hostname: Optional[str] = None
+ type: str # bambu, klipper, unknown
+ port: int
+ accessible: bool
+ response_time: Optional[float] = None
+
+
+class PrinterTestRequest(BaseModel):
+ ip: str
+ port: int = 6000
+ timeout_ms: int = 1500
+
+class ProbeRequest(BaseModel):
+ host: str
+ port: int
+ kind: Optional[str] = None
+
+class FingerprintRequest(BaseModel):
+ host: str
+ port: Optional[int] = None
+ timeout_ms: int = 2000
+
+
+def _validate_ipv4(ip: str) -> str:
+ try:
+ ip_obj = ipaddress.ip_address(ip)
+ if ip_obj.version != 4:
+ raise HTTPException(status_code=400, detail="ip must be ipv4")
+ return ip
+ except ValueError:
+ raise HTTPException(status_code=400, detail="ip must be ipv4")
+
+
+# -----------------------------
+# NETWORK UTILITIES
+# -----------------------------
+async def check_port(ip: str, port: int, timeout: float = 0.3) -> bool:
+ """Prüft ob ein Port offen ist (async)"""
+ try:
+ # Run in executor um nicht zu blockieren
+ loop = asyncio.get_event_loop()
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.settimeout(timeout)
+ result = await loop.run_in_executor(None, sock.connect_ex, (ip, port))
+ sock.close()
+ return result == 0
+ except Exception as exc:
+ log.debug("check_port failed for %s:%s -> %s", ip, port, exc, exc_info=True)
+ return False
+
+
+def get_hostname(ip: str) -> Optional[str]:
+ """Versucht den Hostname aufzulösen"""
+ try:
+ return socket.gethostbyaddr(ip)[0]
+ except Exception as exc:
+ log.debug("get_hostname failed for %s: %s", ip, exc, exc_info=True)
+ return None
+
+
+async def scan_host(ip: str, ports: List[int], timeout: float = 0.5) -> Optional[PrinterInfo]:
+ """Scannt einen Host auf offene Ports - priorisiert Drucker-Ports"""
+
+ # Priorisiere Drucker-Ports (6000 für Bambu, 7125 für Klipper)
+ priority_ports = [6000, 7125]
+ other_ports = [p for p in ports if p not in priority_ports]
+ scan_order = priority_ports + other_ports
+
+ for port in scan_order:
+ if port not in ports:
+ continue
+
+ if await check_port(ip, port, timeout):
+ # Erkenne Drucker-Typ anhand des Ports
+ printer_type = "unknown"
+
+ if port in [990, 8883, 322, 6000]:
+ printer_type = "bambu"
+ elif port == 7125:
+ printer_type = "klipper"
+ elif port == 80:
+ # Port 80 könnte Klipper oder Router sein - prüfe ob andere Drucker-Ports auch offen
+ if await check_port(ip, 7125, timeout):
+ printer_type = "klipper"
+ port = 7125
+ else:
+ # Wahrscheinlich kein Drucker (Router/FritzBox)
+ continue
+
+ hostname = get_hostname(ip)
+
+ return PrinterInfo(
+ ip=ip,
+ hostname=hostname,
+ type=printer_type,
+ port=port,
+ accessible=True,
+ response_time=timeout
+ )
+
+ return None
+
+
+# -----------------------------
+# SCAN ENDPOINTS
+# -----------------------------
+@router.post("/scan/network")
+async def scan_network(request: ScanRequest):
+ """
+ Scannt ein Netzwerk nach Druckern
+ Standard Ports:
+ - Bambu Lab: 6000 (MQTT)
+ - Klipper/Moonraker: 7125 (API)
+ """
+
+ # Default Ports wenn nicht angegeben
+ ports: List[int] = request.ports or [6000, 7125, 80]
+
+ try:
+ # IP Range parsen
+ network = ipaddress.ip_network(request.ip_range, strict=False)
+
+ # Limit: Max 254 IPs scannen
+ hosts = [str(h) for h in list(network.hosts())]
+ if len(hosts) > 254:
+ raise HTTPException(
+ status_code=400,
+ detail="IP Range zu groß. Max 254 Hosts erlaubt."
+ )
+
+ # Parallel scannen, aber mit Limit um das Event-Loop nicht zu blockieren
+ sem = asyncio.Semaphore(50)
+
+ async def limited_scan(ip: str):
+ async with sem:
+ return await scan_host(ip, ports, request.timeout)
+
+ tasks = [limited_scan(ip) for ip in hosts]
+ results = await asyncio.gather(*tasks, return_exceptions=True)
+
+ found_printers = [r for r in results if isinstance(r, PrinterInfo)]
+
+ return {
+ "success": True,
+ "scanned_hosts": len(hosts),
+ "found_printers": len(found_printers),
+ "printers": found_printers
+ }
+
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=f"Scan Fehler: {str(e)}")
+
+
+@router.get("/scan/quick")
+async def quick_scan():
+ """
+ Schneller Scan des lokalen Netzwerks.
+ Nutzt haeufige IPs, erweitert um einen begrenzten /24-Sweep als Fallback.
+ """
+ common_ips = []
+ subnet_base = None
+
+ try:
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ s.connect(("8.8.8.8", 80))
+ local_ip = s.getsockname()[0]
+ s.close()
+
+ subnet_parts = local_ip.split('.')
+ subnet_base = f"{subnet_parts[0]}.{subnet_parts[1]}.{subnet_parts[2]}"
+
+ host_numbers = [1, 2, 10, 20, 30, 40, 41, 42, 50, 100, 110, 120, 150, 200, 250, 254]
+ own_host = int(subnet_parts[3])
+ for offset in range(-5, 6):
+ nearby_host = own_host + offset
+ if 1 <= nearby_host <= 254 and nearby_host not in host_numbers:
+ host_numbers.append(nearby_host)
+
+ for host in sorted(set(host_numbers)):
+ common_ips.append(f"{subnet_base}.{host}")
+ except Exception as exc:
+ log.debug("Failed to determine local IP for detect_bambu_printers: %s", exc, exc_info=True)
+ base = "192.168."
+ for subnet in ["0", "1", "2", "178"]:
+ common_ips.extend([
+ f"{base}{subnet}.1",
+ f"{base}{subnet}.2",
+ f"{base}{subnet}.10",
+ f"{base}{subnet}.100",
+ ])
+
+ ports = [990, 8883, 7125, 322, 6000]
+
+ tasks = [scan_host(ip, ports, timeout=0.3) for ip in common_ips]
+ results = asyncio.gather(*tasks, return_exceptions=True)
+ results = await results
+
+ def to_lite(printer: PrinterInfo):
+ detected_type = printer.type or "generic"
+ if detected_type == "klipper":
+ detected_type = "klipper (Moonraker detected)"
+ return {
+ "ip": printer.ip,
+ "port": printer.port,
+ "type": detected_type,
+ "status": "idle"
+ }
+
+ found_printers = []
+ for result in results:
+ if isinstance(result, PrinterInfo):
+ found_printers.append(to_lite(result))
+
+ if not found_printers and subnet_base:
+ sweep_hosts = [f"{subnet_base}.{i}" for i in range(1, 255)]
+ sweep_hosts = sweep_hosts[:120]
+ tasks = [scan_host(ip, ports, timeout=0.25) for ip in sweep_hosts]
+ results = await asyncio.gather(*tasks, return_exceptions=True)
+ for result in results:
+ if isinstance(result, PrinterInfo):
+ found_printers.append(to_lite(result))
+ return {
+ "success": True,
+ "scanned_hosts": len(common_ips) + len(sweep_hosts),
+ "found_printers": len(found_printers),
+ "printers": found_printers
+ }
+
+ return {
+ "success": True,
+ "scanned_hosts": len(common_ips),
+ "found_printers": len(found_printers),
+ "printers": found_printers
+ }
+
+@router.get("/test/connection")
+async def test_connection(ip: str, port: int = 6000):
+ """Testet die Verbindung zu einem spezifischen Drucker"""
+
+ import time
+ start = time.time()
+ is_open = await check_port(ip, port, timeout=2.0)
+ response_time = time.time() - start
+
+ if not is_open:
+ return {
+ "success": False,
+ "message": f"Port {port} auf {ip} nicht erreichbar",
+ "ip": ip,
+ "port": port,
+ "response_time": response_time
+ }
+
+ # Erkenne Typ
+ printer_type = "unknown"
+ message = f"Port {port} ist erreichbar"
+
+ if port == 6000:
+ printer_type = "bambu"
+ message = "✓ Bambu Lab MQTT Port erreichbar (Port 6000 offen, MQTT Login erforderlich)"
+ elif port == 990:
+ printer_type = "bambu"
+ message = "✓ Bambu Lab FTP Port erreichbar (Port 990)"
+ elif port == 8883:
+ printer_type = "bambu"
+ message = "✓ Bambu Lab MQTT SSL Port erreichbar (Port 8883)"
+ elif port in [7125, 80]:
+ printer_type = "klipper"
+ message = f"✓ Klipper API erreichbar (Port {port})"
+
+ hostname = get_hostname(ip)
+
+ return {
+ "success": True,
+ "message": message,
+ "ip": ip,
+ "port": port,
+ "hostname": hostname,
+ "type": printer_type,
+ "response_time": round(response_time * 1000, 2) # ms
+ }
+
+
+@router.get("/detect/bambu")
+async def detect_bambu_printers():
+ """Schnelle Erkennung von Bambu Lab Druckern im lokalen Netzwerk
+
+ Bambu Lab Ports:
+ - 990: FTP (File Transfer)
+ - 8883: MQTT over SSL
+ - 322: FTP Data
+ - 50000-50100: FTP Passive Mode Range
+ """
+
+ # Lokales Subnetz ermitteln
+ try:
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ s.connect(("8.8.8.8", 80))
+ local_ip = s.getsockname()[0]
+ s.close()
+
+ # Subnetz extrahieren
+ subnet_parts = local_ip.split('.')
+ local_subnet = f"{subnet_parts[0]}.{subnet_parts[1]}.{subnet_parts[2]}.0/24"
+ network_ranges = [local_subnet]
+ except Exception as exc:
+ log.debug("Failed to determine local IP for detect_bambu_printers (fallback): %s", exc, exc_info=True)
+ # Fallback
+ network_ranges = ["192.168.0.0/24", "192.168.1.0/24", "192.168.178.0/24"]
+
+ # Bambu Lab Ports (in Priorität)
+ bambu_ports = [990, 8883, 322, 6000]
+ found_printers = []
+
+ async def check_bambu_host(ip_str: str):
+ for port in bambu_ports:
+ if await check_port(ip_str, port, timeout=0.3):
+ return {
+ "ip": ip_str,
+ "port": port,
+ "type": "bambu",
+ "hostname": get_hostname(ip_str)
+ }
+ return None
+
+ for range_str in network_ranges:
+ network = ipaddress.ip_network(range_str, strict=False)
+ hosts = [str(ip) for ip in list(network.hosts())[:50]] # Nur erste 50 IPs
+
+ # Scanne parallel
+ tasks = [check_bambu_host(ip) for ip in hosts]
+ results = await asyncio.gather(*tasks, return_exceptions=True)
+
+ for result in results:
+ if result and not isinstance(result, Exception):
+ found_printers.append(result)
+
+ return {
+ "found": len(found_printers),
+ "printers": found_printers
+ }
+
+
+@router.get("/detect/klipper")
+async def detect_klipper_printers():
+ """Schnelle Erkennung von Klipper/Moonraker (Port 7125) im lokalen Netzwerk"""
+
+ # Lokales Subnetz ermitteln
+ try:
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ s.connect(("8.8.8.8", 80))
+ local_ip = s.getsockname()[0]
+ s.close()
+
+ subnet_parts = local_ip.split('.')
+ local_subnet = f"{subnet_parts[0]}.{subnet_parts[1]}.{subnet_parts[2]}.0/24"
+ network_ranges = [local_subnet]
+ except Exception as exc:
+ log.debug("Failed to determine local IP for detect_klipper_printers: %s", exc, exc_info=True)
+ network_ranges = ["192.168.0.0/24", "192.168.1.0/24", "192.168.178.0/24"]
+
+ found_printers = []
+
+ async def check_klipper_host(ip_str: str):
+ if await check_port(ip_str, 7125, timeout=0.3):
+ return {
+ "ip": ip_str,
+ "port": 7125,
+ "type": "klipper",
+ "hostname": get_hostname(ip_str)
+ }
+ return None
+
+ for range_str in network_ranges:
+ network = ipaddress.ip_network(range_str, strict=False)
+ hosts = [str(ip) for ip in list(network.hosts())[:50]] # Erste 50 IPs
+
+ # Scanne parallel
+ tasks = [check_klipper_host(ip) for ip in hosts]
+ results = await asyncio.gather(*tasks, return_exceptions=True)
+
+ for result in results:
+ if result and not isinstance(result, Exception):
+ found_printers.append(result)
+
+ return {
+ "found": len(found_printers),
+ "printers": found_printers
+ }
+
+
+# -----------------------------
+# CONFIG GENERATION
+# -----------------------------
+@router.post("/generate/config")
+async def generate_config(printers: List[Dict]):
+ """
+ Generiert Config-Vorschläge basierend auf gefundenen Druckern
+ """
+
+ config_suggestions = []
+
+ for printer in printers:
+ ip = printer.get("ip")
+ printer_type = printer.get("type")
+ hostname = printer.get("hostname", "unknown")
+
+ if printer_type == "bambu":
+ ip_suffix = ip.split('.')[-1] if ip else "unknown"
+ config_suggestions.append({
+ "type": "bambu",
+ "name": hostname or f"Bambu_{ip_suffix}",
+ "config": {
+ "bambu_lan": {
+ "enabled": True,
+ "ip": ip,
+ "port": 6000,
+ "access_code": "ENTER_YOUR_ACCESS_CODE"
+ }
+ }
+ })
+
+ elif printer_type == "klipper":
+ ip_suffix = ip.split('.')[-1] if ip else "unknown"
+ config_suggestions.append({
+ "type": "klipper",
+ "name": hostname or f"Klipper_{ip_suffix}",
+ "config": {
+ "klipper": {
+ "enabled": True,
+ "moonraker_url": f"http://{ip}:7125",
+ "api_key": "OPTIONAL_API_KEY"
+ }
+ }
+ })
+
+ return {
+ "success": True,
+ "count": len(config_suggestions),
+ "suggestions": config_suggestions
+ }
+
+
+# -----------------------------
+# NETWORK INFO
+# -----------------------------
+@router.get("/network/info")
+def get_network_info():
+ """Gibt Informationen über das lokale Netzwerk zurück"""
+
+ # Eigene IP ermitteln
+ try:
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ s.connect(("8.8.8.8", 80))
+ local_ip = s.getsockname()[0]
+ s.close()
+ except Exception as exc:
+ log.debug("Failed to determine local IP in get_network_info: %s", exc, exc_info=True)
+ local_ip = "127.0.0.1"
+
+ # Hostname
+ hostname = socket.gethostname()
+
+ # Geschätztes Netzwerk
+ ip_parts = local_ip.split('.')
+ estimated_network = f"{ip_parts[0]}.{ip_parts[1]}.{ip_parts[2]}.0/24"
+
+ return {
+ "local_ip": local_ip,
+ "hostname": hostname,
+ "estimated_network": estimated_network,
+ "default_scan_range": estimated_network
+ }
+
+
+@debug_printer_router.post("/test")
+async def test_printer_port(payload: PrinterTestRequest):
+ ip = _validate_ipv4(payload.ip)
+ port = payload.port
+ if not (1 <= port <= 65535):
+ raise HTTPException(status_code=400, detail="port must be between 1 and 65535")
+ timeout_ms = payload.timeout_ms
+ if timeout_ms < 200:
+ timeout_ms = 200
+ if timeout_ms > 5000:
+ timeout_ms = 5000
+
+ timeout_s = timeout_ms / 1000.0
+ start = time.perf_counter()
+ try:
+ with socket.create_connection((ip, port), timeout=timeout_s) as sock:
+ sock.settimeout(timeout_s)
+ latency_ms = int((time.perf_counter() - start) * 1000)
+ log.debug("Port test success ip=%s port=%s latency_ms=%s", ip, port, latency_ms)
+ return {
+ "ok": True,
+ "ip": ip,
+ "port": port,
+ "reachable": True,
+ "latency_ms": latency_ms
+ }
+ except socket.timeout:
+ return {
+ "ok": True,
+ "ip": ip,
+ "port": port,
+ "reachable": False,
+ "error": "timeout",
+ "latency_ms": None
+ }
+ except ConnectionRefusedError:
+ return {
+ "ok": True,
+ "ip": ip,
+ "port": port,
+ "reachable": False,
+ "error": "refused",
+ "latency_ms": None
+ }
+ except OSError as exc:
+ err_label = "unreachable" if isinstance(exc, OSError) else "error"
+ return {
+ "ok": True,
+ "ip": ip,
+ "port": port,
+ "reachable": False,
+ "error": err_label,
+ "latency_ms": None
+ }
+ except Exception as exc: # pragma: no cover - defensive
+ detail = str(exc)[:120]
+ log.warning("Port test exception ip=%s port=%s err=%s", ip, port, detail)
+ return {
+ "ok": False,
+ "ip": ip,
+ "port": port,
+ "reachable": False,
+ "error": "exception",
+ "detail": detail,
+ "latency_ms": None
+ }
+
+
+@debug_printer_router.post("/probe")
+async def probe_printer(payload: ProbeRequest):
+ host = (payload.host or "").strip()
+ if not host:
+ raise HTTPException(status_code=400, detail="host required")
+ port = payload.port
+ if not (1 <= port <= 65535):
+ raise HTTPException(status_code=400, detail="port must be between 1 and 65535")
+ timeout_s = 2.0
+ start = time.perf_counter()
+ detected_type = "unknown"
+ if port == 6000:
+ detected_type = "bambu"
+ elif port == 7125:
+ detected_type = "klipper"
+
+ http_status: Optional[int] = None
+
+ try:
+ with socket.create_connection((host, port), timeout=timeout_s):
+ latency_ms = int((time.perf_counter() - start) * 1000)
+ # Optional: einfacher HTTP-Check fuer Moonraker (Port 7125)
+ if port == 7125:
+ try:
+ import http.client
+ conn = http.client.HTTPConnection(host, port=port, timeout=2.0)
+ conn.request("GET", "/")
+ resp = conn.getresponse()
+ http_status = resp.status
+ conn.close()
+ except Exception:
+ http_status = None
+ else:
+ # TCP-Connect erfolgreich -> als OK markieren
+ http_status = 200
+ except socket.timeout:
+ return {
+ "ok": True,
+ "status": "FEHLER",
+ "latency_ms": None,
+ "detected_type": detected_type,
+ "http_status": None,
+ "error_class": "timeout",
+ "message": "Zeitüberschreitung beim Verbindungsaufbau",
+ "details": ["Host oder Port reagiert nicht innerhalb des Zeitlimits."]
+ }
+ except ConnectionRefusedError:
+ return {
+ "ok": True,
+ "status": "FEHLER",
+ "latency_ms": None,
+ "detected_type": detected_type,
+ "http_status": None,
+ "error_class": "refused",
+ "message": "Verbindung wurde abgelehnt",
+ "details": ["Dienst auf dem Zielport lehnt die Verbindung ab."]
+ }
+ except OSError as exc:
+ return {
+ "ok": True,
+ "status": "FEHLER",
+ "latency_ms": None,
+ "detected_type": detected_type,
+ "http_status": None,
+ "error_class": "dns" if isinstance(exc, socket.gaierror) else "unreachable",
+ "message": "Zielhost nicht erreichbar",
+ "details": ["Bitte Host/IP und Port prüfen."]
+ }
+ except Exception as exc:
+ detail = str(exc)[:120]
+ log.warning("Probe exception host=%s port=%s err=%s", host, port, detail)
+ raise HTTPException(status_code=500, detail="Probe fehlgeschlagen")
+
+ status_label = "OK"
+ message = "Verbindung erfolgreich aufgebaut"
+ error_class = "none"
+ details = []
+
+ # HTTP-Status auswerten, falls vorhanden
+ if http_status is not None and http_status != 200:
+ if http_status == 401:
+ status_label = "FEHLER"
+ error_class = "auth"
+ message = "Authentifizierung erforderlich oder fehlerhaft"
+ details.append("HTTP 401 vom Ziel erhalten.")
+ elif http_status == 404:
+ status_label = "FEHLER"
+ error_class = "not_found"
+ message = "Endpunkt existiert nicht (404)"
+ details.append("HTTP 404 vom Ziel erhalten.")
+ elif http_status >= 500:
+ status_label = "FEHLER"
+ error_class = "http_error"
+ message = "Interner Fehler am Drucker (HTTP 5xx)"
+ details.append(f"HTTP {http_status} vom Ziel erhalten.")
+ else:
+ status_label = "WARNUNG"
+ error_class = "http"
+ message = f"HTTP Status {http_status}"
+ details.append(f"HTTP {http_status} vom Ziel erhalten.")
+
+ if latency_ms > 600 and status_label == "OK":
+ status_label = "WARNUNG"
+ message = "Hohe Antwortzeit"
+ error_class = "slow"
+ details.append(f"Antwortzeit {latency_ms} ms")
+
+ return {
+ "ok": True,
+ "status": status_label,
+ "latency_ms": latency_ms,
+ "detected_type": detected_type,
+ "http_status": http_status,
+ "error_class": error_class,
+ "message": message,
+ "details": details
+ }
+
+def _fingerprint_port(host: str, port: int, timeout_s: float):
+ """
+ Versucht einen TCP-Connect und liefert ein kleines Statusobjekt zurueck.
+ Fuer 8883 wird ein Hinweis auf Auth/Cert gegeben, wenn Connect klappt.
+ """
+ start = time.perf_counter()
+ try:
+ with socket.create_connection((host, port), timeout=timeout_s):
+ latency_ms = int((time.perf_counter() - start) * 1000)
+ if port == 8883:
+ return {
+ "reachable": True,
+ "error_class": "auth_required",
+ "message": "SSL/MQTT erreichbar, Login/Zertifikat erforderlich",
+ "latency_ms": latency_ms
+ }
+ elif port == 7125:
+ return {
+ "reachable": True,
+ "error_class": "ok",
+ "message": "Port erreichbar (Klipper/Moonraker)",
+ "latency_ms": latency_ms
+ }
+ return {
+ "reachable": True,
+ "error_class": "ok",
+ "message": "Port erreichbar",
+ "latency_ms": latency_ms
+ }
+ except socket.timeout:
+ return {
+ "reachable": False,
+ "error_class": "timeout",
+ "message": "Zeitueberschreitung",
+ "latency_ms": None
+ }
+ except ConnectionRefusedError:
+ return {
+ "reachable": False,
+ "error_class": "refused",
+ "message": "Verbindung abgelehnt",
+ "latency_ms": None
+ }
+ except OSError:
+ return {
+ "reachable": False,
+ "error_class": "unreachable",
+ "message": "Zielhost nicht erreichbar",
+ "latency_ms": None
+ }
+ except Exception as exc:
+ detail = str(exc)[:120]
+ return {
+ "reachable": False,
+ "error_class": "error",
+ "message": detail,
+ "latency_ms": None
+ }
+
+def _load_settings_map(session: Session) -> Dict[str, str]:
+ data: Dict[str, str] = {}
+ for row in session.exec(select(Setting)).all():
+ data[row.key] = row.value
+ return data
+
+
+def _get_bool(settings: Dict[str, str], key: str, default: bool) -> bool:
+ val = settings.get(key)
+ if val is None:
+ return default
+ return str(val).lower() in {"1", "true", "yes", "on"}
+
+
+def _get_int(settings: Dict[str, str], key: str, default: int) -> int:
+ val = settings.get(key)
+ try:
+ return int(val)
+ except (TypeError, ValueError):
+ return default
+
+
+def _get_ports(settings: Dict[str, str], key: str, default_ports: List[int]) -> List[int]:
+ val = settings.get(key)
+ if not val:
+ return default_ports
+ raw_val = val
+ if isinstance(val, str) and val.strip().startswith("["):
+ try:
+ parsed = json.loads(val)
+ raw_val = parsed
+ except Exception:
+ raw_val = val
+ if isinstance(raw_val, list):
+ parts = raw_val
+ else:
+ parts = [p.strip() for p in str(raw_val).split(",")]
+ ports: List[int] = []
+ for p in parts:
+ try:
+ port_int = int(p)
+ if 1 <= port_int <= 65535:
+ ports.append(port_int)
+ except ValueError:
+ continue
+ return ports or default_ports
+
+
+@debug_printer_router.post("/fingerprint")
+async def fingerprint_printer(payload: FingerprintRequest, session: Session = Depends(get_session)):
+ """
+ Ermittelt Port-Erreichbarkeit fuer Bambu (8883/6000) und Klipper (7125).
+ Keine echten Credentials, nur TCP + Hinweis bei Auth-Anforderung.
+ Immer aktiv sobald ein Drucker gefunden wurde (Teil der 3-Stufen Debug-Logik).
+ """
+ settings_map = _load_settings_map(session)
+ host = (payload.host or "").strip()
+ if not host:
+ raise HTTPException(status_code=400, detail="host required")
+ timeout_ms_setting = _get_int(settings_map, "fingerprint.timeout_ms", DEFAULT_FINGERPRINT_TIMEOUT_MS)
+ timeout_ms = payload.timeout_ms or timeout_ms_setting
+ if timeout_ms < 500:
+ timeout_ms = 500
+ timeout_s = timeout_ms / 1000.0
+
+ ports_to_check: List[int] = []
+ if payload.port:
+ ports_to_check.append(payload.port)
+ else:
+ ports_to_check.extend(_get_ports(settings_map, "fingerprint.ports", DEFAULT_FINGERPRINT_PORTS))
+
+ results = {}
+ results_list = []
+ for p in ports_to_check:
+ res = _fingerprint_port(host, p, timeout_s)
+ results[str(p)] = res
+ results_list.append(
+ {
+ "port": p,
+ "status": res.get("error_class", "unreachable"),
+ "message": res.get("message"),
+ "latency_ms": res.get("latency_ms"),
+ }
+ )
+
+ detected_type = "unknown"
+ confidence = 10
+ if results.get("7125", {}).get("reachable"):
+ detected_type = "klipper"
+ confidence = 95
+ elif results.get("6000", {}).get("reachable"):
+ detected_type = "bambu"
+ confidence = 90
+ elif results.get("8883", {}).get("reachable"):
+ detected_type = "bambu"
+ confidence = 70
+
+ status_label = "ERROR"
+ if any(v.get("reachable") for v in results.values()):
+ status_label = "OK"
+ if any(v.get("reachable") and v.get("error_class") == "auth_required" for v in results.values()):
+ status_label = "WARNUNG"
+
+ return {
+ "ok": True,
+ "status": status_label,
+ "detected_type": detected_type,
+ "confidence": confidence,
+ "ports": results,
+ "results": results_list,
+ "message": "Fingerprint abgeschlossen"
+ }
diff --git a/app/routes/service_routes.py b/app/routes/service_routes.py
new file mode 100644
index 0000000..55a9ba8
--- /dev/null
+++ b/app/routes/service_routes.py
@@ -0,0 +1,673 @@
+"""
+Service Control Routes
+Server Management, Docker, Dependencies, Tests
+"""
+import os
+import sys
+import subprocess
+import logging
+import psutil
+import zipfile
+from datetime import datetime
+import tempfile
+from fastapi import APIRouter, HTTPException
+from pydantic import BaseModel
+from typing import Optional
+
+router = APIRouter(prefix="/api/services", tags=["Service Control"])
+
+logger = logging.getLogger("app.routes.services")
+
+
+# -----------------------------
+# MODELS
+# -----------------------------
+class CommandResult(BaseModel):
+ success: bool
+ message: str
+ output: Optional[str] = None
+ exit_code: Optional[int] = None
+
+
+# -----------------------------
+# HELPER FUNCTIONS
+# -----------------------------
+def get_project_root():
+ """Gibt das Projekt-Root-Verzeichnis zurück"""
+ return os.getcwd()
+
+
+def get_python_executable():
+ """Gibt den Python-Pfad zurück"""
+ return sys.executable
+
+
+def make_test_db_path() -> str:
+ """Erstellt einen eindeutigen Pfad für die Test-DB im System-Temp-Ordner.
+
+ Vermeidet Locks auf gemeinsam genutzten Volumes (z.B. SMB/NFS) indem
+ temporäre DB-Dateien pro Testlauf im OS-Temp-Verzeichnis angelegt werden.
+ """
+ ts = datetime.now().strftime("%Y%m%d_%H%M%S_%f")
+ return os.path.join(tempfile.gettempdir(), f"filamenthub_test_{ts}.db")
+
+
+def run_command(command: str, cwd: Optional[str] = None, shell: bool = True, env: Optional[dict] = None) -> CommandResult:
+ """Führt einen Command aus und gibt das Ergebnis zurück
+
+ Args:
+ command: Der auszuführende Befehl
+ cwd: Working directory
+ shell: Shell-Modus aktivieren
+ env: Optionale Umgebungsvariablen (werden mit os.environ gemerged)
+ """
+ try:
+ # Merge custom env with system env
+ run_env = os.environ.copy()
+ if env:
+ run_env.update(env)
+
+ result = subprocess.run(
+ command,
+ shell=shell,
+ capture_output=True,
+ text=True,
+ cwd=cwd or get_project_root(),
+ env=run_env,
+ timeout=60
+ )
+
+ return CommandResult(
+ success=result.returncode == 0,
+ message="Erfolgreich ausgeführt" if result.returncode == 0 else "Fehler beim Ausführen",
+ output=result.stdout + result.stderr,
+ exit_code=result.returncode
+ )
+ except subprocess.TimeoutExpired:
+ return CommandResult(
+ success=False,
+ message="Command Timeout (>60s)",
+ output="Der Befehl hat zu lange gedauert"
+ )
+ except Exception as e:
+ return CommandResult(
+ success=False,
+ message=f"Fehler: {str(e)}",
+ output=None
+ )
+
+
+def create_test_response(status: str, message: str, details: Optional[str] = None) -> dict:
+ """Erstellt standardisiertes Test-Response-Format
+
+ Args:
+ status: 'ok', 'fail', oder 'blocked'
+ message: Menschlich lesbare Kurzmeldung
+ details: Optionale technische Details
+
+ Returns:
+ Standardisiertes Response-Dict
+ """
+ response = {
+ "status": status,
+ "message": message,
+ "timestamp": datetime.now().isoformat()
+ }
+ if details:
+ response["details"] = details
+ return response
+
+
+# -----------------------------
+# PROCESS INFO
+# -----------------------------
+@router.get("/process/info")
+def get_process_info():
+ """Gibt Informationen über den aktuellen Prozess zurück"""
+ process = psutil.Process()
+
+ return {
+ "pid": process.pid,
+ "name": process.name(),
+ "status": process.status(),
+ "create_time": process.create_time(),
+ "cpu_percent": process.cpu_percent(interval=0.1),
+ "memory_mb": round(process.memory_info().rss / 1024 / 1024, 2),
+ "num_threads": process.num_threads(),
+ "python_executable": get_python_executable(),
+ "python_version": sys.version
+ }
+
+
+@router.get("/process/list")
+def list_python_processes():
+ """Listet alle Python-Prozesse auf"""
+ processes = []
+ for proc in psutil.process_iter(['pid', 'name', 'cmdline', 'memory_info']):
+ try:
+ if 'python' in proc.info['name'].lower():
+ processes.append({
+ "pid": proc.info['pid'],
+ "name": proc.info['name'],
+ "memory_mb": round(proc.info['memory_info'].rss / 1024 / 1024, 2),
+ "cmdline": ' '.join(proc.info['cmdline'][:3]) if proc.info['cmdline'] else ''
+ })
+ except (psutil.NoSuchProcess, psutil.AccessDenied):
+ continue
+
+ return {"processes": processes, "count": len(processes)}
+
+
+# -----------------------------
+# DEPENDENCIES
+# -----------------------------
+@router.post("/dependencies/install")
+async def install_dependencies():
+ """Installiert Dependencies aus requirements.txt"""
+ python = get_python_executable()
+ command = f'"{python}" -m pip install -r requirements.txt'
+
+ result = run_command(command)
+ return result
+
+
+@router.post("/dependencies/update")
+async def update_dependency(package: str):
+ """Aktualisiert ein einzelnes Package"""
+ python = get_python_executable()
+ command = f'"{python}" -m pip install --upgrade {package}'
+
+ result = run_command(command)
+ return result
+
+
+@router.get("/dependencies/list")
+def list_dependencies():
+ """Listet alle installierten Packages auf"""
+ python = get_python_executable()
+ command = f'"{python}" -m pip list --format=json'
+
+ result = run_command(command)
+
+ if result.success and result.output:
+ import json
+ try:
+ packages = json.loads(result.output)
+ return {"packages": packages, "count": len(packages)}
+ except Exception as exc:
+ logger.debug("Failed to parse pip list output: %s", exc, exc_info=True)
+ return {"packages": [], "count": 0}
+
+ return {"packages": [], "count": 0}
+
+
+@router.get("/dependencies/outdated")
+def list_outdated_dependencies():
+ """Listet alle veralteten Packages auf"""
+ python = get_python_executable()
+ command = f'"{python}" -m pip list --outdated --format=json'
+
+ result = run_command(command)
+
+ if result.success and result.output:
+ import json
+ try:
+ packages = json.loads(result.output)
+ return {
+ "packages": packages,
+ "count": len(packages),
+ "has_updates": len(packages) > 0
+ }
+ except Exception as exc:
+ logger.debug("Failed to parse pip outdated output: %s", exc, exc_info=True)
+ return {"packages": [], "count": 0, "has_updates": False}
+
+ return {"packages": [], "count": 0, "has_updates": False}
+
+
+@router.post("/dependencies/update-all")
+async def update_all_dependencies():
+ """Aktualisiert alle Packages auf die neueste Version"""
+ python = get_python_executable()
+
+ # Erst outdated packages abrufen
+ outdated_result = run_command(f'"{python}" -m pip list --outdated --format=json')
+
+ if not outdated_result.success or not outdated_result.output:
+ return CommandResult(
+ success=False,
+ message="Konnte veraltete Packages nicht ermitteln"
+ )
+
+ import json
+ try:
+ outdated = json.loads(outdated_result.output)
+ if len(outdated) == 0:
+ return CommandResult(
+ success=True,
+ message="Alle Packages sind bereits aktuell!"
+ )
+
+ # Alle outdated packages updaten
+ package_names = [pkg['name'] for pkg in outdated]
+ packages_str = ' '.join(package_names)
+ command = f'"{python}" -m pip install --upgrade {packages_str}'
+
+ result = run_command(command)
+ return result
+
+ except Exception as e:
+ return CommandResult(
+ success=False,
+ message=f"Fehler beim Update: {str(e)}"
+ )
+
+
+# -----------------------------
+# TESTS
+# -----------------------------
+# Mini-Test-Status-API
+# Alle Test-Endpunkte liefern ein standardisiertes Response-Format:
+# {
+# "status": "ok" | "fail" | "blocked",
+# "message": "Menschlich lesbare Kurzmeldung",
+# "details": "Optionale technische Details (max 500 Zeichen)",
+# "timestamp": "ISO-8601 Zeitstempel"
+# }
+#
+# HTTP-Status ist IMMER 200 OK
+# Status-Logik erfolgt über das JSON-Feld "status"
+# -----------------------------
+
+@router.post("/tests/run")
+async def run_tests():
+ """Führt pytest aus"""
+ try:
+ python = get_python_executable()
+ command = f'"{python}" -m pytest -v'
+ result = run_command(command)
+
+ if result.success:
+ return create_test_response(
+ status="ok",
+ message="Tests erfolgreich"
+ )
+ else:
+ return create_test_response(
+ status="fail",
+ message="Tests fehlgeschlagen",
+ details=result.output[:500] if result.output else "Keine Details verfügbar"
+ )
+ except Exception as e:
+ return create_test_response(
+ status="blocked",
+ message="Tests konnten nicht ausgeführt werden",
+ details=str(e)
+ )
+
+
+@router.post("/tests/coverage")
+async def run_tests_with_coverage():
+ """Führt pytest mit Coverage aus - Plattformunabhängig"""
+ try:
+ python = get_python_executable()
+ # Use a unique test DB in the system temp directory to avoid locks
+ test_db_path = make_test_db_path()
+ # Umgebungsvariablen für Test-DB
+ test_env = {
+ "FILAMENTHUB_DB_PATH": test_db_path,
+ "PYTHONPATH": os.getcwd()
+ }
+
+ # DB initialisieren
+ init_result = run_command(
+ f'"{python}" -c "from app.database import init_db; init_db()"',
+ env=test_env
+ )
+
+ if not init_result.success:
+ return create_test_response(
+ status="blocked",
+ message="Fehler beim Initialisieren der Test-DB",
+ details=init_result.output[:500] if init_result.output else None
+ )
+
+ # Coverage nur über Smoke-Tests, um gelockte Prod-DB zu vermeiden
+ result = run_command(
+ f'"{python}" -m pytest --cov=app --cov-report=term tests/test_smoke_crud.py',
+ env=test_env
+ )
+
+ if result.success:
+ return create_test_response(
+ status="ok",
+ message="Coverage-Test erfolgreich"
+ )
+ else:
+ return create_test_response(
+ status="fail",
+ message="Coverage-Test fehlgeschlagen",
+ details=result.output[:500] if result.output else "Keine Details verfügbar"
+ )
+ except Exception as e:
+ return create_test_response(
+ status="blocked",
+ message="Coverage-Test konnte nicht ausgeführt werden",
+ details=str(e)
+ )
+
+
+def _test_command(py_args: str) -> CommandResult:
+ """
+ Führt Tests gegen eine eigene Test-DB aus, damit die laufende Prod-DB nicht gelockt wird.
+ Plattformunabhängig: Funktioniert auf Windows, Linux (Unraid), Raspberry Pi.
+ """
+ python = get_python_executable()
+ # Use unique temp DB path to prevent collisions/locks
+ test_db_path = make_test_db_path()
+
+ # Umgebungsvariablen für Test-DB
+ test_env = {
+ "FILAMENTHUB_DB_PATH": test_db_path,
+ "PYTHONPATH": os.getcwd()
+ }
+
+ # DB initialisieren
+ init_result = run_command(
+ f'"{python}" -c "from app.database import init_db; init_db()"',
+ env=test_env
+ )
+
+ if not init_result.success:
+ return CommandResult(
+ success=False,
+ message="Fehler beim Initialisieren der Test-DB",
+ output=init_result.output
+ )
+
+ # Tests ausführen
+ return run_command(
+ f'"{python}" -m pytest {py_args}',
+ env=test_env
+ )
+
+
+@router.post("/tests/smoke")
+async def run_smoke_tests():
+ """Smoke-CRUD-Tests gegen Test-DB - Plattformunabhängig"""
+ try:
+ result = _test_command("tests/test_smoke_crud.py -q")
+
+ if result.success:
+ return create_test_response(
+ status="ok",
+ message="Smoke CRUD Test erfolgreich"
+ )
+ else:
+ return create_test_response(
+ status="fail",
+ message="Smoke CRUD Test fehlgeschlagen",
+ details=result.output[:500] if result.output else "Keine Details verfügbar"
+ )
+ except Exception as e:
+ return create_test_response(
+ status="blocked",
+ message="Test konnte nicht ausgeführt werden",
+ details=str(e)
+ )
+
+
+@router.post("/tests/db")
+async def run_db_tests():
+ """DB-CRUD-Testskript gegen Test-DB (kein pytest-Wrapper) - Plattformunabhängig"""
+ try:
+ python = get_python_executable()
+ test_db_path = make_test_db_path()
+ # Umgebungsvariablen für Test-DB
+ test_env = {
+ "FILAMENTHUB_DB_PATH": test_db_path,
+ "PYTHONPATH": os.getcwd()
+ }
+
+ # DB initialisieren
+ init_result = run_command(
+ f'"{python}" -c "from app.database import init_db; init_db()"',
+ env=test_env
+ )
+
+ if not init_result.success:
+ return create_test_response(
+ status="blocked",
+ message="Fehler beim Initialisieren der Test-DB",
+ details=init_result.output[:500] if init_result.output else None
+ )
+
+ # Test-Skript ausführen
+ result = run_command(
+ f'"{python}" tests/test_db_crud.py',
+ env=test_env
+ )
+
+ if result.success:
+ return create_test_response(
+ status="ok",
+ message="DB CRUD Test erfolgreich"
+ )
+ else:
+ return create_test_response(
+ status="fail",
+ message="DB CRUD Test fehlgeschlagen",
+ details=result.output[:500] if result.output else "Keine Details verfügbar"
+ )
+ except Exception as e:
+ return create_test_response(
+ status="blocked",
+ message="Test konnte nicht ausgeführt werden",
+ details=str(e)
+ )
+
+
+@router.post("/tests/all")
+async def run_all_tests():
+ """Alle Tests gegen Test-DB"""
+ try:
+ result = _test_command("-q")
+
+ if result.success:
+ return create_test_response(
+ status="ok",
+ message="Alle Tests erfolgreich"
+ )
+ else:
+ return create_test_response(
+ status="fail",
+ message="Einige Tests fehlgeschlagen",
+ details=result.output[:500] if result.output else "Keine Details verfügbar"
+ )
+ except Exception as e:
+ return create_test_response(
+ status="blocked",
+ message="Tests konnten nicht ausgeführt werden",
+ details=str(e)
+ )
+
+
+# -----------------------------
+# DOCKER
+# -----------------------------
+@router.get("/docker/status")
+def docker_status():
+ """Prüft ob Docker verfügbar ist"""
+ result = run_command("docker --version")
+
+ if result.success:
+ compose_result = run_command("docker compose version")
+ return {
+ "available": True,
+ "docker_version": result.output.strip() if result.output else "Unknown",
+ "compose_available": compose_result.success,
+ "compose_version": compose_result.output.strip() if compose_result.success and compose_result.output else None
+ }
+
+ return {
+ "available": False,
+ "docker_version": None,
+ "compose_available": False,
+ "compose_version": None
+ }
+
+
+@router.post("/docker/compose/up")
+async def docker_compose_up():
+ """Startet Docker Compose"""
+ result = run_command("docker compose up -d")
+ return result
+
+
+@router.post("/docker/compose/down")
+async def docker_compose_down():
+ """Stoppt Docker Compose"""
+ result = run_command("docker compose down")
+ return result
+
+
+@router.get("/docker/compose/ps")
+def docker_compose_ps():
+ """Zeigt laufende Container"""
+ result = run_command("docker compose ps --format json")
+ return {"output": result.output, "success": result.success}
+
+
+# -----------------------------
+# FILE OPERATIONS
+# -----------------------------
+@router.get("/logs/list")
+def list_log_files():
+ """Listet alle Log-Dateien auf"""
+ return {"deprecated": True, "use": "/api/debug/logs"}
+
+@router.post("/logs/clear/{module}")
+async def clear_module_logs(module: str):
+ """Loescht alle Logs eines Moduls"""
+ return {"deprecated": True, "use": "/api/debug/logs"}
+
+# -----------------------------
+# BACKUP
+# -----------------------------
+@router.post("/backup")
+async def create_backup():
+ """
+ Erstellt ein kombiniertes Backup (SQLite-DB + Logfiles) als ZIP im Verzeichnis data/backups.
+ """
+ backup_root = "data/backups"
+ os.makedirs(backup_root, exist_ok=True)
+
+ db_path = os.environ.get("FILAMENTHUB_DB_PATH", "data/filamenthub.db")
+ logs_root = "logs"
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
+ zip_path = os.path.join(backup_root, f"filamenthub_backup_{timestamp}.zip")
+
+ files_added = 0
+ with zipfile.ZipFile(zip_path, mode="w", compression=zipfile.ZIP_DEFLATED) as zipf:
+ if os.path.exists(db_path):
+ zipf.write(db_path, arcname="database/filamenthub.db")
+ files_added += 1
+
+ if os.path.exists(logs_root):
+ for root, _, files in os.walk(logs_root):
+ for file in files:
+ if file.endswith(".log"):
+ file_path = os.path.join(root, file)
+ arcname = os.path.join("logs", os.path.relpath(file_path, logs_root))
+ zipf.write(file_path, arcname=arcname)
+ files_added += 1
+
+ if files_added == 0:
+ try:
+ os.remove(zip_path)
+ except OSError:
+ pass
+ raise HTTPException(status_code=404, detail="Weder Datenbank noch Logfiles gefunden")
+
+ size_mb = round(os.path.getsize(zip_path) / 1024 / 1024, 3)
+ return {
+ "success": True,
+ "message": "Backup (DB + Logs) erstellt",
+ "backup_path": os.path.abspath(zip_path),
+ "backup_size_mb": size_mb,
+ "files_added": files_added,
+ "timestamp": timestamp
+ }
+
+
+# -----------------------------
+# SERVER CONTROL
+# -----------------------------
+@router.post("/server/restart")
+async def restart_server():
+ """
+ Triggert einen Server-Neustart
+ HINWEIS: Funktioniert nur mit uvicorn reload=True
+ """
+ return {
+ "success": True,
+ "message": "Server-Neustart wird durch File-Änderung getriggert",
+ "note": "Bei reload=True wird automatisch neugestartet"
+ }
+
+
+@router.get("/server/stats")
+def get_server_stats():
+ """Gibt Server-Statistiken zurück"""
+ import time
+ from datetime import datetime
+ from app.routes.system_routes import START_TIME
+
+ uptime = time.time() - START_TIME
+ process = psutil.Process()
+
+ # Network connections (wie viele aktive Verbindungen)
+ try:
+ connections = len(process.connections())
+ except (psutil.AccessDenied, psutil.NoSuchProcess):
+ connections = 0
+
+ # Hostname ermitteln
+ try:
+ import socket
+ hostname = socket.gethostname()
+ except Exception as exc:
+ logger.debug("Failed to determine hostname: %s", exc, exc_info=True)
+ hostname = "Unknown"
+
+ return {
+ "start_time": datetime.fromtimestamp(START_TIME).strftime("%Y-%m-%d %H:%M:%S"),
+ "uptime_seconds": round(uptime, 2),
+ "uptime_formatted": format_uptime(uptime),
+ "python_version": f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}",
+ "platform": sys.platform,
+ "hostname": hostname,
+ "port": 8080, # aus config laden später
+ "active_connections": connections,
+ "threads": process.num_threads(),
+ "memory_mb": round(process.memory_info().rss / 1024 / 1024, 2)
+ }
+
+
+def format_uptime(seconds: float) -> str:
+ """Formatiert Uptime in lesbares Format"""
+ days, remainder = divmod(int(seconds), 86400)
+ hours, remainder = divmod(remainder, 3600)
+ minutes, seconds = divmod(remainder, 60)
+
+ parts = []
+ if days > 0:
+ parts.append(f"{days}d")
+ if hours > 0:
+ parts.append(f"{hours}h")
+ if minutes > 0:
+ parts.append(f"{minutes}m")
+ if seconds > 0 or not parts:
+ parts.append(f"{seconds}s")
+
+ return " ".join(parts)
diff --git a/app/routes/settings_routes.py b/app/routes/settings_routes.py
new file mode 100644
index 0000000..f5a932b
--- /dev/null
+++ b/app/routes/settings_routes.py
@@ -0,0 +1,258 @@
+from fastapi import APIRouter, Depends, HTTPException
+from sqlmodel import Session, select
+
+from app.database import get_session
+from app.models.settings import Setting
+import json
+from pathlib import Path
+
+router = APIRouter()
+
+
+DEFAULTS = {
+ "ams_mode": "single",
+ "debug_ws_logging": "false",
+ "debug_center_mode": "lite",
+ "debug_center_pro_unlocked": "false",
+ "cost.electricity_price_kwh": "0.30",
+}
+PRO_CONFIG_DEFAULTS = {
+ "debug.config.debug_logging_enabled": "false",
+ "debug.config.latency_warning_threshold_ms": "600",
+ "debug.config.scanner_probe_timeout_ms": "800",
+ "debug.config.scanner_allow_duplicates": "false",
+ "debug.config.websocket_debug_level": "basic",
+}
+TRUE_VALUES = {"1", "true", "yes", "on"}
+WEBSOCKET_DEBUG_LEVELS = {"off", "basic", "verbose"}
+MIN_LATENCY_WARNING_MS = 100
+MIN_SCANNER_PROBE_TIMEOUT_MS = 200
+
+
+def get_setting(session: Session, key: str, default: str | None = None) -> str | None:
+ setting = session.exec(select(Setting).where(Setting.key == key)).first()
+ if setting:
+ return setting.value
+ if default is not None:
+ setting = Setting(key=key, value=default)
+ session.add(setting)
+ session.commit()
+ session.refresh(setting)
+ return setting.value
+ return None
+
+
+def set_setting(session: Session, key: str, value: str) -> None:
+ setting = session.exec(select(Setting).where(Setting.key == key)).first()
+ if setting:
+ setting.value = value
+ else:
+ setting = Setting(key=key, value=value)
+ session.add(setting)
+ session.commit()
+
+
+def _normalize_bool(value: str | None, default: bool) -> bool:
+ if value is None:
+ return default
+ return str(value).lower() in TRUE_VALUES
+
+
+def _normalize_int(value: str | None, default: int, minimum: int | None = None) -> int:
+ try:
+ normalized = int(str(value))
+ except (TypeError, ValueError):
+ return default
+ if minimum is not None and normalized < minimum:
+ return default
+ return normalized
+
+
+def _normalize_float(value: str | None, default: float, minimum: float | None = None) -> float:
+ try:
+ normalized = float(str(value))
+ except (TypeError, ValueError):
+ return default
+ if minimum is not None and normalized < minimum:
+ return default
+ return normalized
+
+
+def _normalize_enum(value: str | None, allowed: set[str], default: str) -> str:
+ if value is None:
+ return default
+ normalized = str(value).lower()
+ return normalized if normalized in allowed else default
+
+
+def _load_config_defaults() -> dict:
+ config_path = Path(__file__).resolve().parents[2] / "config.json"
+ if not config_path.exists():
+ return {}
+ try:
+ return json.loads(config_path.read_text(encoding="utf-8"))
+ except Exception:
+ return {}
+
+
+def _ensure_runtime_settings(session: Session) -> None:
+ defaults = _load_config_defaults()
+ debug_defaults = defaults.get("debug", {})
+ runtime_defaults = debug_defaults.get("runtime", {})
+ scanner_defaults = defaults.get("scanner", {}).get("pro", {})
+ fingerprint_defaults = defaults.get("fingerprint", {})
+
+ to_init = {
+ "debug.runtime.enabled": str(runtime_defaults.get("enabled", True)).lower(),
+ "debug.runtime.poll_interval_ms": str(runtime_defaults.get("poll_interval_ms", 2000)),
+ "scanner.pro.deep_probe": str(scanner_defaults.get("deep_probe", False)).lower(),
+ "scanner.pro.fingerprint_enabled": str(scanner_defaults.get("fingerprint_enabled", False)).lower(),
+ "fingerprint.enabled": str(fingerprint_defaults.get("enabled", False)).lower(),
+ "fingerprint.timeout_ms": str(fingerprint_defaults.get("timeout_ms", 1500)),
+ "cost.electricity_price_kwh": DEFAULTS["cost.electricity_price_kwh"],
+ }
+ ports_default = fingerprint_defaults.get("ports", [8883, 6000, 7125])
+ if not isinstance(ports_default, list):
+ ports_default = [8883, 6000, 7125]
+ to_init["fingerprint.ports"] = json.dumps(ports_default)
+
+ for key, value in to_init.items():
+ if session.exec(select(Setting).where(Setting.key == key)).first():
+ continue
+ set_setting(session, key, value)
+
+
+@router.get("/api/settings")
+def get_settings(session: Session = Depends(get_session)):
+ _ensure_runtime_settings(session)
+ ams_mode = get_setting(session, "ams_mode", DEFAULTS["ams_mode"]) or DEFAULTS["ams_mode"]
+ debug_ws_logging = get_setting(session, "debug_ws_logging", DEFAULTS["debug_ws_logging"]) or DEFAULTS["debug_ws_logging"]
+ debug_center_mode = get_setting(session, "debug_center_mode", DEFAULTS["debug_center_mode"]) or DEFAULTS["debug_center_mode"]
+ pro_unlocked = get_setting(session, "debug_center_pro_unlocked", DEFAULTS["debug_center_pro_unlocked"]) or DEFAULTS["debug_center_pro_unlocked"]
+ electricity_price = get_setting(session, "cost.electricity_price_kwh", DEFAULTS["cost.electricity_price_kwh"]) or DEFAULTS["cost.electricity_price_kwh"]
+ debug_logging_enabled = get_setting(session, "debug.config.debug_logging_enabled", PRO_CONFIG_DEFAULTS["debug.config.debug_logging_enabled"]) or PRO_CONFIG_DEFAULTS["debug.config.debug_logging_enabled"]
+ latency_warning_threshold = get_setting(session, "debug.config.latency_warning_threshold_ms", PRO_CONFIG_DEFAULTS["debug.config.latency_warning_threshold_ms"]) or PRO_CONFIG_DEFAULTS["debug.config.latency_warning_threshold_ms"]
+ scanner_probe_timeout = get_setting(session, "debug.config.scanner_probe_timeout_ms", PRO_CONFIG_DEFAULTS["debug.config.scanner_probe_timeout_ms"]) or PRO_CONFIG_DEFAULTS["debug.config.scanner_probe_timeout_ms"]
+ scanner_allow_duplicates = get_setting(session, "debug.config.scanner_allow_duplicates", PRO_CONFIG_DEFAULTS["debug.config.scanner_allow_duplicates"]) or PRO_CONFIG_DEFAULTS["debug.config.scanner_allow_duplicates"]
+ websocket_debug_level = get_setting(session, "debug.config.websocket_debug_level", PRO_CONFIG_DEFAULTS["debug.config.websocket_debug_level"]) or PRO_CONFIG_DEFAULTS["debug.config.websocket_debug_level"]
+ return {
+ "ams_mode": ams_mode if ams_mode in {"single", "multi"} else DEFAULTS["ams_mode"],
+ "debug_ws_logging": _normalize_bool(debug_ws_logging, default=False),
+ "debug_center_mode": debug_center_mode if debug_center_mode in {"lite", "pro"} else DEFAULTS["debug_center_mode"],
+ "debug_center_pro_unlocked": _normalize_bool(pro_unlocked, default=False),
+ "cost.electricity_price_kwh": _normalize_float(electricity_price, default=float(DEFAULTS["cost.electricity_price_kwh"]), minimum=0.0),
+ "debug.config.debug_logging_enabled": _normalize_bool(debug_logging_enabled, default=False),
+ "debug.config.latency_warning_threshold_ms": _normalize_int(
+ latency_warning_threshold,
+ default=int(PRO_CONFIG_DEFAULTS["debug.config.latency_warning_threshold_ms"]),
+ minimum=MIN_LATENCY_WARNING_MS,
+ ),
+ "debug.config.scanner_probe_timeout_ms": _normalize_int(
+ scanner_probe_timeout,
+ default=int(PRO_CONFIG_DEFAULTS["debug.config.scanner_probe_timeout_ms"]),
+ minimum=MIN_SCANNER_PROBE_TIMEOUT_MS,
+ ),
+ "debug.config.scanner_allow_duplicates": _normalize_bool(scanner_allow_duplicates, default=False),
+ "debug.config.websocket_debug_level": _normalize_enum(
+ websocket_debug_level, WEBSOCKET_DEBUG_LEVELS, PRO_CONFIG_DEFAULTS["debug.config.websocket_debug_level"]
+ ),
+ }
+
+
+@router.put("/api/settings")
+async def update_settings(payload: dict, session: Session = Depends(get_session)):
+ _ensure_runtime_settings(session)
+ allowed_keys = {
+ "ams_mode",
+ "debug_ws_logging",
+ "debug_center_mode",
+ "debug_center_pro_unlocked",
+ "cost.electricity_price_kwh",
+ "debug.config.debug_logging_enabled",
+ "debug.config.latency_warning_threshold_ms",
+ "debug.config.scanner_probe_timeout_ms",
+ "debug.config.scanner_allow_duplicates",
+ "debug.config.websocket_debug_level",
+ }
+ if not any(k in payload for k in allowed_keys):
+ raise HTTPException(status_code=400, detail="Keine gueltigen Settings uebergeben.")
+
+ if "ams_mode" in payload:
+ mode = str(payload.get("ams_mode")).lower()
+ if mode not in {"single", "multi"}:
+ raise HTTPException(status_code=400, detail="ams_mode muss single oder multi sein.")
+ set_setting(session, "ams_mode", mode)
+
+ if "debug_ws_logging" in payload:
+ val = payload.get("debug_ws_logging")
+ normalized = "true" if str(val).lower() in TRUE_VALUES else "false"
+ set_setting(session, "debug_ws_logging", normalized)
+
+ if "debug_center_mode" in payload:
+ mode = str(payload.get("debug_center_mode", "")).lower()
+ if mode not in {"lite", "pro"}:
+ raise HTTPException(status_code=400, detail="debug_center_mode muss lite oder pro sein.")
+ set_setting(session, "debug_center_mode", mode)
+
+ if "debug_center_pro_unlocked" in payload:
+ val = payload.get("debug_center_pro_unlocked")
+ normalized = "true" if str(val).lower() in TRUE_VALUES else "false"
+ set_setting(session, "debug_center_pro_unlocked", normalized)
+
+ if "debug.config.debug_logging_enabled" in payload:
+ val = payload.get("debug.config.debug_logging_enabled")
+ normalized = "true" if str(val).lower() in TRUE_VALUES else "false"
+ set_setting(session, "debug.config.debug_logging_enabled", normalized)
+
+ if "cost.electricity_price_kwh" in payload:
+ val = payload.get("cost.electricity_price_kwh")
+ if val is None:
+ raise HTTPException(status_code=400, detail="cost.electricity_price_kwh darf nicht leer sein.")
+ try:
+ price = float(val)
+ except (TypeError, ValueError):
+ raise HTTPException(status_code=400, detail="cost.electricity_price_kwh muss eine Zahl sein.")
+ if price < 0:
+ raise HTTPException(status_code=400, detail="cost.electricity_price_kwh darf nicht negativ sein.")
+ set_setting(session, "cost.electricity_price_kwh", str(price))
+
+ if "debug.config.latency_warning_threshold_ms" in payload:
+ val = payload.get("debug.config.latency_warning_threshold_ms")
+ if val is None:
+ raise HTTPException(status_code=400, detail="latency_warning_threshold_ms darf nicht leer sein.")
+ try:
+ threshold = int(val)
+ except (TypeError, ValueError):
+ raise HTTPException(status_code=400, detail="latency_warning_threshold_ms muss eine Zahl sein.")
+ if threshold < MIN_LATENCY_WARNING_MS:
+ raise HTTPException(
+ status_code=400, detail=f"latency_warning_threshold_ms muss >= {MIN_LATENCY_WARNING_MS} sein."
+ )
+ set_setting(session, "debug.config.latency_warning_threshold_ms", str(threshold))
+
+ if "debug.config.scanner_probe_timeout_ms" in payload:
+ val = payload.get("debug.config.scanner_probe_timeout_ms")
+ if val is None:
+ raise HTTPException(status_code=400, detail="scanner_probe_timeout_ms darf nicht leer sein.")
+ try:
+ timeout = int(val)
+ except (TypeError, ValueError):
+ raise HTTPException(status_code=400, detail="scanner_probe_timeout_ms muss eine Zahl sein.")
+ if timeout < MIN_SCANNER_PROBE_TIMEOUT_MS:
+ raise HTTPException(
+ status_code=400, detail=f"scanner_probe_timeout_ms muss >= {MIN_SCANNER_PROBE_TIMEOUT_MS} sein."
+ )
+ set_setting(session, "debug.config.scanner_probe_timeout_ms", str(timeout))
+
+ if "debug.config.scanner_allow_duplicates" in payload:
+ val = payload.get("debug.config.scanner_allow_duplicates")
+ normalized = "true" if str(val).lower() in TRUE_VALUES else "false"
+ set_setting(session, "debug.config.scanner_allow_duplicates", normalized)
+
+ if "debug.config.websocket_debug_level" in payload:
+ level = str(payload.get("debug.config.websocket_debug_level", "")).lower()
+ if level not in WEBSOCKET_DEBUG_LEVELS:
+ raise HTTPException(status_code=400, detail="websocket_debug_level muss off, basic oder verbose sein.")
+ set_setting(session, "debug.config.websocket_debug_level", level)
+
+ return get_settings(session)
diff --git a/app/routes/spool_numbers.py b/app/routes/spool_numbers.py
new file mode 100644
index 0000000..1111065
--- /dev/null
+++ b/app/routes/spool_numbers.py
@@ -0,0 +1,351 @@
+"""
+Neue API-Endpoints für Spulen-Nummern-System
+
+Diese Routes erweitern die bestehenden Spulen-APIs um:
+- Suche nach Spulen-Nummer
+- Live-Suche für Quick-Assign
+- Manuelle AMS-Slot-Zuweisung
+- Spule von Slot entfernen
+"""
+from fastapi import APIRouter, Depends, HTTPException
+from sqlmodel import select, Session, or_
+from typing import List, Optional
+from datetime import datetime
+
+from app.database import get_session
+from app.models.spool import Spool, SpoolReadSchema
+from app.models.printer import Printer
+from pydantic import BaseModel
+
+
+router = APIRouter(prefix="/api/spools", tags=["Spool Numbers"])
+
+
+# === SUCHE ===
+
+@router.get("/by-number/{spool_number}", response_model=SpoolReadSchema)
+def get_spool_by_number(spool_number: int, session: Session = Depends(get_session)):
+ """
+ Findet Spule nach Spulen-Nummer
+
+ Args:
+ spool_number: Spulen-Nummer (z.B. 3 für "Spule #3")
+
+ Returns:
+ Spool-Objekt
+
+ Raises:
+ 404: Spule nicht gefunden
+ """
+ spool = session.exec(
+ select(Spool).where(Spool.spool_number == spool_number)
+ ).first()
+
+ if not spool:
+ raise HTTPException(
+ status_code=404,
+ detail=f"Spule #{spool_number} nicht gefunden"
+ )
+
+ return SpoolReadSchema.model_validate(spool)
+
+
+@router.get("/search", response_model=List[SpoolReadSchema])
+def search_spools(
+ term: str = "",
+ unassigned: bool = False,
+ printer_id: Optional[str] = None,
+ session: Session = Depends(get_session)
+):
+ """
+ Live-Suche in Spulen (OHNE JOINs - sehr schnell!)
+
+ Sucht in denormalisierten Feldern: spool_number, name, vendor, color
+
+ Query-Parameter:
+ - term: Suchbegriff (optional)
+ - unassigned: true = nur nicht zugewiesene Spulen
+ - printer_id: Nur Spulen eines bestimmten Druckers
+
+ Returns:
+ Liste von Spulen
+
+ Beispiele:
+ GET /api/spools/search?term=6
+ → Findet #6, #16, #26
+
+ GET /api/spools/search?term=PLA&unassigned=true
+ → Findet alle freien PLA-Spulen
+
+ GET /api/spools/search?term=black
+ → Findet alle schwarzen Spulen
+ """
+ stmt = select(Spool)
+
+ # Filter: Nur nicht zugewiesen
+ if unassigned:
+ stmt = stmt.where(
+ Spool.printer_id.is_(None),
+ Spool.ams_slot.is_(None)
+ )
+
+ # Filter: Bestimmter Drucker
+ if printer_id:
+ stmt = stmt.where(Spool.printer_id == printer_id)
+
+ # Suchterm (sucht in Nummer, Name, Vendor, Farbe)
+ if term:
+ search_pattern = f"%{term}%"
+
+ # Prüfe ob term eine Zahl ist (exakte Nummern-Suche)
+ if term.isdigit():
+ term_int = int(term)
+ stmt = stmt.where(
+ or_(
+ Spool.spool_number == term_int,
+ Spool.name.like(search_pattern),
+ Spool.vendor.like(search_pattern),
+ Spool.color.like(search_pattern)
+ )
+ )
+ else:
+ # Text-Suche
+ stmt = stmt.where(
+ or_(
+ Spool.name.like(search_pattern),
+ Spool.vendor.like(search_pattern),
+ Spool.color.like(search_pattern)
+ )
+ )
+
+ # Sortierung nach Nummer
+ stmt = stmt.order_by(Spool.spool_number)
+
+ spools = session.exec(stmt).all()
+ return [SpoolReadSchema.model_validate(s) for s in spools]
+
+
+# === AMS-ZUWEISUNG ===
+
+class AssignRequest(BaseModel):
+ printer_id: str
+ slot_number: int
+
+
+@router.post("/{spool_number}/assign")
+def assign_to_slot(
+ spool_number: int,
+ data: AssignRequest,
+ session: Session = Depends(get_session)
+):
+ """
+ Weist Spule manuell einem AMS-Slot zu
+
+ Body:
+ {
+ "printer_id": "uuid",
+ "slot_number": 1-4
+ }
+
+ Returns:
+ {
+ "spool_number": 7,
+ "printer_id": "uuid",
+ "slot": 2,
+ "assigned": true
+ }
+
+ Raises:
+ 400: Ungültiger Slot (nicht 1-4)
+ 404: Spule nicht gefunden
+ 409: Spule bereits zugewiesen ODER Slot bereits belegt
+ """
+ # Validierung: Slot 1-4
+ if data.slot_number not in [1, 2, 3, 4]:
+ raise HTTPException(
+ status_code=400,
+ detail="Slot muss 1-4 sein"
+ )
+
+ # Finde Spule
+ spool = session.exec(
+ select(Spool).where(Spool.spool_number == spool_number)
+ ).first()
+
+ if not spool:
+ raise HTTPException(
+ status_code=404,
+ detail=f"Spule #{spool_number} nicht gefunden"
+ )
+
+ # Prüfe ob Spule bereits zugewiesen
+ if spool.printer_id:
+ raise HTTPException(
+ status_code=409,
+ detail=(
+ f"Spule #{spool_number} ist bereits Drucker '{spool.printer_id}' "
+ f"Slot {spool.ams_slot} zugewiesen"
+ )
+ )
+
+ # Prüfe ob Slot frei
+ existing = session.exec(
+ select(Spool).where(
+ Spool.printer_id == data.printer_id,
+ Spool.ams_slot == data.slot_number
+ )
+ ).first()
+
+ if existing:
+ raise HTTPException(
+ status_code=409,
+ detail=f"Slot {data.slot_number} ist bereits mit Spule #{existing.spool_number} belegt"
+ )
+
+ # Prüfe ob Drucker existiert
+ printer = session.get(Printer, data.printer_id)
+ if not printer:
+ raise HTTPException(
+ status_code=404,
+ detail=f"Drucker {data.printer_id} nicht gefunden"
+ )
+
+ # Zuweisen
+ spool.printer_id = data.printer_id
+ spool.ams_slot = data.slot_number
+ spool.updated_at = datetime.utcnow().isoformat()
+
+ # Status-Logik: Nur wenn Spule manuell erstellt wurde (hat spool_number)
+ # UND noch nie im AMS war (used_count = 0 oder None)
+ # → Warte auf ersten Druck, dann wird Status auf "Aktiv" gesetzt
+ # (Status-Änderung erfolgt in mqtt_routes.py beim Job-Start)
+
+ session.add(spool)
+ session.commit()
+ session.refresh(spool)
+
+ return {
+ "spool_number": spool.spool_number,
+ "printer_id": data.printer_id,
+ "printer_name": printer.name if printer else None,
+ "slot": data.slot_number,
+ "assigned": True
+ }
+
+
+@router.post("/{spool_number}/unassign")
+def unassign_from_slot(
+ spool_number: int,
+ session: Session = Depends(get_session)
+):
+ """
+ Entfernt Spule von AMS-Slot
+
+ Returns:
+ {
+ "spool_number": 7,
+ "assigned": false,
+ "previous_slot": 2
+ }
+
+ Raises:
+ 404: Spule nicht gefunden
+ """
+ spool = session.exec(
+ select(Spool).where(Spool.spool_number == spool_number)
+ ).first()
+
+ if not spool:
+ raise HTTPException(
+ status_code=404,
+ detail=f"Spule #{spool_number} nicht gefunden"
+ )
+
+ last_slot = spool.ams_slot
+
+ # Entfernen
+ spool.printer_id = None
+ spool.ams_slot = None
+ spool.last_slot = last_slot # Merke letzten Slot
+ spool.updated_at = datetime.utcnow().isoformat()
+
+ # Status-Logik: Spule zurück ins Lager
+ # Nur für manuell angelegte Spulen mit Nummer
+ if spool.spool_number:
+ # NUR wenn Spule NICHT leer ist
+ if not spool.is_empty and spool.status == "Aktiv":
+ spool.status = "Lager"
+ # is_open bleibt True, da Spule bereits geöffnet wurde
+ # Wenn is_empty = True → Status bleibt unverändert (z.B. "Leer")
+
+ session.add(spool)
+ session.commit()
+
+ return {
+ "spool_number": spool.spool_number,
+ "assigned": False,
+ "previous_slot": last_slot
+ }
+
+
+# === DRUCKER-SPULEN ===
+
+@router.get("/printer/{printer_id}/slots")
+def get_printer_slots(
+ printer_id: str,
+ session: Session = Depends(get_session)
+):
+ """
+ Gibt alle AMS-Slots eines Druckers zurück (1-4)
+
+ Zeigt auch leere Slots an für bessere UI-Darstellung.
+
+ Returns:
+ {
+ "slots": [
+ {
+ "slot": 1,
+ "spool": {...}, # oder null wenn leer
+ "empty": false
+ },
+ ...
+ ]
+ }
+ """
+ # Prüfe ob Drucker existiert
+ printer = session.get(Printer, printer_id)
+ if not printer:
+ raise HTTPException(
+ status_code=404,
+ detail=f"Drucker {printer_id} nicht gefunden"
+ )
+
+ # Hole zugewiesene Spulen
+ spools = session.exec(
+ select(Spool)
+ .where(Spool.printer_id == printer_id)
+ .order_by(Spool.ams_slot)
+ ).all()
+
+ # Erstelle Slot-Array (1-4)
+ slots = []
+ for slot_num in [1, 2, 3, 4]:
+ slot_spool = next(
+ (s for s in spools if s.ams_slot == slot_num),
+ None
+ )
+
+ if slot_spool:
+ slots.append({
+ "slot": slot_num,
+ "spool": SpoolReadSchema.model_validate(slot_spool),
+ "empty": False
+ })
+ else:
+ slots.append({
+ "slot": slot_num,
+ "spool": None,
+ "empty": True
+ })
+
+ return {"slots": slots}
diff --git a/app/routes/spools.py b/app/routes/spools.py
index 3e7adb5..47e6ea1 100644
--- a/app/routes/spools.py
+++ b/app/routes/spools.py
@@ -1,57 +1,209 @@
-from fastapi import APIRouter, Depends, HTTPException
-from sqlmodel import select, Session
+from fastapi import APIRouter, Depends, HTTPException, status
+from fastapi.responses import Response
+from sqlmodel import select, Session, col
from typing import List
from app.database import get_session
-from app.models.spool import Spool, SpoolCreate, SpoolRead
+from app.models.spool import Spool, SpoolCreateSchema, SpoolUpdateSchema, SpoolReadSchema
+from app.services.spool_number_service import assign_spool_number
router = APIRouter(prefix="/api/spools", tags=["Spools"])
+def _normalize_spool_payload(data: SpoolCreateSchema | SpoolUpdateSchema, *, is_update: bool = False) -> dict:
+ payload = data.model_dump(exclude_unset=True)
+ # Color wird jetzt persistiert (Teil des Nummern-Systems)
+ # payload.pop("color", None) - ENTFERNT
+ # alias weight -> weight_current
+ if "weight" in payload:
+ payload["weight_current"] = payload.pop("weight")
+ # normalize printer_slot strings like "AMS-2"
+ slot = payload.get("printer_slot")
+ if isinstance(slot, str):
+ digits = "".join(filter(str.isdigit, slot))
+ payload["printer_slot"] = int(digits) if digits else None
+ ams_slot = payload.get("ams_slot")
+ if isinstance(ams_slot, str):
+ digits = "".join(filter(str.isdigit, ams_slot))
+ payload["ams_slot"] = int(digits) if digits else None
+ if not is_update:
+ payload.setdefault("weight_full", 1000)
+ payload.setdefault("weight_empty", 250)
+ # Falls kein aktuelles Gewicht explizit gesetzt wurde, auf weight_full setzen
+ if payload.get("weight_current") is None:
+ payload["weight_current"] = payload.get("weight_full")
+ # Neue Spulen: remain_percent auf 100% setzen (nicht leer)
+ if payload.get("remain_percent") is None:
+ payload["remain_percent"] = 100.0
+ # Neue Spulen: is_open auf True setzen (geöffnet)
+ if "is_open" not in payload:
+ payload["is_open"] = True
+ return payload
-@router.get("/", response_model=List[SpoolRead])
+
+@router.get("/", response_model=List[SpoolReadSchema])
def list_spools(session: Session = Depends(get_session)):
result = session.exec(select(Spool)).all()
- return result
+ return [SpoolReadSchema.model_validate(s) for s in result]
+
+
+@router.get("/unnumbered", response_model=List[SpoolReadSchema])
+def list_unnumbered_spools(session: Session = Depends(get_session)):
+ """
+ Gibt alle Spulen zurück, die KEINE Nummer haben
+ Nützlich für Benachrichtigungen: "Neue Spule im AMS erkannt - Bitte Nummer vergeben"
+ """
+ stmt = select(Spool).where(col(Spool.spool_number).is_(None))
+ result = session.exec(stmt).all()
+ return [SpoolReadSchema.model_validate(s) for s in result]
-@router.get("/{spool_id}", response_model=SpoolRead)
+
+@router.get("/{spool_id}", response_model=SpoolReadSchema)
def get_spool(spool_id: str, session: Session = Depends(get_session)):
spool = session.get(Spool, spool_id)
if not spool:
raise HTTPException(status_code=404, detail="Spule nicht gefunden")
- return spool
+ return SpoolReadSchema.model_validate(spool)
-@router.post("/", response_model=SpoolRead)
-def create_spool(data: SpoolCreate, session: Session = Depends(get_session)):
- spool = Spool.from_orm(data)
- session.add(spool)
- session.commit()
- session.refresh(spool)
- return spool
+@router.post("/", response_model=SpoolReadSchema, status_code=status.HTTP_201_CREATED)
+def create_spool(data: SpoolCreateSchema, session: Session = Depends(get_session)):
+ # Prüfe auf Duplikate nur wenn label gesetzt ist
+ if data.label:
+ exists = session.exec(select(Spool).where(Spool.label == data.label, Spool.material_id == data.material_id)).first()
+ if exists:
+ raise HTTPException(status_code=409, detail="Spule mit dieser Bezeichnung existiert bereits")
+ try:
+ payload = _normalize_spool_payload(data)
+ spool = Spool(**payload)
+
+ # NEU: Automatisch Spulen-Nummer zuweisen
+ assign_spool_number(spool, session)
+
+ session.add(spool)
+ session.commit()
+ session.refresh(spool)
+ return SpoolReadSchema.model_validate(spool)
+ except Exception as e:
+ raise HTTPException(status_code=400, detail=f"Fehler bei Validierung: {e}")
-@router.put("/{spool_id}", response_model=SpoolRead)
-def update_spool(spool_id: str, data: SpoolCreate, session: Session = Depends(get_session)):
+@router.put("/{spool_id}", response_model=SpoolReadSchema)
+def update_spool(spool_id: str, data: SpoolUpdateSchema, session: Session = Depends(get_session)):
spool = session.get(Spool, spool_id)
if not spool:
raise HTTPException(status_code=404, detail="Spule nicht gefunden")
-
- update_data = data.dict(exclude_unset=True)
+ update_data = _normalize_spool_payload(data, is_update=True)
+ # Schutz: Nummer darf nur freigegeben werden, wenn Spule leer ist
+ if "spool_number" in update_data and update_data.get("spool_number") is None:
+ next_is_empty = update_data.get("is_empty", spool.is_empty)
+ if not next_is_empty:
+ update_data.pop("spool_number", None)
for key, value in update_data.items():
setattr(spool, key, value)
+ # AUTOMATISCHE NUMMERN-FREIGABE: Wenn Spule leer wird, Nummer entfernen
+ if spool.is_empty and spool.spool_number is not None:
+ spool.spool_number = None
+
+ try:
+ session.add(spool)
+ session.commit()
+ session.refresh(spool)
+ return SpoolReadSchema.model_validate(spool)
+ except Exception as e:
+ raise HTTPException(status_code=400, detail=f"Fehler bei Validierung: {e}")
+
+
+@router.delete("/{spool_id}", status_code=status.HTTP_204_NO_CONTENT)
+def delete_spool(spool_id: str, session: Session = Depends(get_session)):
+ spool = session.get(Spool, spool_id)
+ if not spool:
+ raise HTTPException(status_code=404, detail="Spule nicht gefunden")
+ session.delete(spool)
+ session.commit()
+ return Response(status_code=status.HTTP_204_NO_CONTENT)
+
+
+@router.post("/{spool_id}/assign", response_model=SpoolReadSchema)
+def assign_spool_to_slot(
+ spool_id: str,
+ printer_id: str,
+ slot_number: int,
+ session: Session = Depends(get_session)
+):
+ """
+ Weist eine Spule einem AMS-Slot zu
+
+ POST /api/spools/{spool_id}/assign?printer_id=xxx&slot_number=1
+ """
+ # Validierung: Slot muss 1-4 sein
+ if slot_number not in [1, 2, 3, 4]:
+ raise HTTPException(status_code=400, detail="Slot muss 1-4 sein")
+
+ # Finde Spule
+ spool = session.get(Spool, spool_id)
+ if not spool:
+ raise HTTPException(status_code=404, detail="Spule nicht gefunden")
+
+ # Prüfe ob Spule bereits zugewiesen
+ if spool.printer_id is not None:
+ raise HTTPException(
+ status_code=409,
+ detail=f"Spule ist bereits Drucker '{spool.printer_id}' Slot {spool.ams_slot} zugewiesen"
+ )
+
+ # Prüfe ob Slot frei
+ stmt = select(Spool).where(
+ Spool.printer_id == printer_id,
+ Spool.ams_slot == slot_number
+ )
+ existing = session.exec(stmt).first()
+
+ if existing:
+ raise HTTPException(
+ status_code=409,
+ detail=f"Slot {slot_number} ist bereits mit Spule belegt"
+ )
+
+ # Zuweisen
+ spool.printer_id = printer_id
+ spool.ams_slot = slot_number
+
session.add(spool)
session.commit()
session.refresh(spool)
- return spool
+ return SpoolReadSchema.model_validate(spool)
-@router.delete("/{spool_id}")
-def delete_spool(spool_id: str, session: Session = Depends(get_session)):
+
+@router.post("/{spool_id}/unassign", response_model=SpoolReadSchema)
+def unassign_spool(spool_id: str, session: Session = Depends(get_session)):
+ """
+ Entfernt eine Spule aus einem AMS-Slot
+
+ POST /api/spools/{spool_id}/unassign
+ """
spool = session.get(Spool, spool_id)
if not spool:
raise HTTPException(status_code=404, detail="Spule nicht gefunden")
- session.delete(spool)
+
+ # Merke letzten Slot
+ if spool.ams_slot is not None:
+ spool.last_slot = spool.ams_slot
+
+ # Entferne Zuweisung
+ spool.printer_id = None
+ spool.ams_slot = None
+
+ # Status-Logik: Spule zurück ins Lager
+ # Wenn Spule nicht leer ist und Status "Aktiv" war, zurück auf "Lager" setzen
+ if not spool.is_empty and spool.status == "Aktiv":
+ spool.status = "Lager"
+ # is_open bleibt True, da Spule bereits geöffnet wurde
+
+ session.add(spool)
session.commit()
- return {"status": "deleted"}
+ session.refresh(spool)
+
+ return SpoolReadSchema.model_validate(spool)
diff --git a/app/routes/statistics_routes.py b/app/routes/statistics_routes.py
new file mode 100644
index 0000000..32842e6
--- /dev/null
+++ b/app/routes/statistics_routes.py
@@ -0,0 +1,273 @@
+from datetime import datetime, timedelta
+from typing import List, Dict, Any
+
+from fastapi import APIRouter, Depends
+from sqlmodel import Session, select
+
+from app.database import get_session
+from app.models.job import Job
+from app.models.printer import Printer
+from app.models.spool import Spool
+from app.models.material import Material
+from app.models.settings import Setting
+
+router = APIRouter(prefix="/api/statistics", tags=["statistics"])
+
+
+DEFAULT_POWER_KW = 0.30 # fallback when printer has no power_consumption_kw
+
+
+def _job_duration_hours(job: Job, now: datetime) -> float:
+ start = _coerce_dt(job.started_at, now)
+ end = _coerce_dt(job.finished_at, now)
+ return max((end - start).total_seconds(), 0) / 3600.0
+
+
+def _energy_for_job(job: Job, printers: Dict[str, Printer], now: datetime) -> tuple[float, float]:
+ """Returns (energy_exact_kwh, energy_est_kwh)."""
+ duration_h = _job_duration_hours(job, now)
+ printer = printers.get(job.printer_id)
+ power = printer.power_consumption_kw if printer else None
+ if power is not None:
+ return power * duration_h, 0.0
+ return 0.0, DEFAULT_POWER_KW * duration_h
+
+
+def _coerce_dt(value: Any, now: datetime) -> datetime:
+ if isinstance(value, datetime):
+ return value
+ if isinstance(value, str):
+ try:
+ return datetime.fromisoformat(value)
+ except ValueError:
+ return now
+ return now
+
+
+@router.get("/timeline")
+def timeline(days: int = 30, session: Session = Depends(get_session)):
+ now = datetime.utcnow()
+ since = now - timedelta(days=days)
+ jobs = session.exec(select(Job).where(Job.started_at >= since)).all()
+ printers = {p.id: p for p in session.exec(select(Printer)).all()}
+
+ buckets: Dict[str, Dict[str, float]] = {}
+ for job in jobs:
+ day = _coerce_dt(job.started_at, now).date().isoformat()
+ b = buckets.setdefault(day, {"jobs": 0, "filament_g": 0.0, "duration_h": 0.0, "energy_kwh": 0.0})
+ b["jobs"] += 1
+ b["filament_g"] += job.filament_used_g or 0.0
+ duration_h = _job_duration_hours(job, now)
+ b["duration_h"] += duration_h
+ exact, est = _energy_for_job(job, printers, now)
+ b["energy_kwh"] += exact + est
+
+ data = [
+ {
+ "date": day,
+ "jobs": int(vals["jobs"]),
+ "filament_g": round(vals["filament_g"], 2),
+ "duration_h": round(vals["duration_h"], 2),
+ "energy_kwh": round(vals["energy_kwh"], 3),
+ }
+ for day, vals in sorted(buckets.items())
+ ]
+ return {"days": days, "data": data}
+
+
+@router.get("/timeline-by-material")
+def timeline_by_material(days: int = 30, session: Session = Depends(get_session)):
+ """Timeline gruppiert nach Material-Typ"""
+ now = datetime.utcnow()
+ since = now - timedelta(days=days)
+ jobs = session.exec(select(Job).where(Job.started_at >= since)).all()
+ spools = {s.id: s for s in session.exec(select(Spool)).all()}
+ materials = {m.id: m for m in session.exec(select(Material)).all()}
+
+ # Structure: buckets[date][material_name] = weight_g
+ buckets: Dict[str, Dict[str, float]] = {}
+ material_names = set()
+
+ for job in jobs:
+ day = _coerce_dt(job.started_at, now).date().isoformat()
+ # Try to get material from spool_id
+ material_name = "Unbekannt"
+ if job.spool_id:
+ spool = spools.get(job.spool_id)
+ if spool and spool.material_id:
+ mat = materials.get(spool.material_id)
+ if mat:
+ material_name = mat.name
+
+ material_names.add(material_name)
+ b = buckets.setdefault(day, {})
+ b[material_name] = b.get(material_name, 0.0) + (job.filament_used_g or 0.0)
+
+ # Build dataset structure for Chart.js
+ dates = sorted(buckets.keys())
+ datasets = {}
+ for mat_name in sorted(material_names):
+ datasets[mat_name] = [buckets[d].get(mat_name, 0.0) for d in dates]
+
+ return {
+ "days": days,
+ "dates": dates,
+ "datasets": [{"material": k, "data": v} for k, v in datasets.items()]
+ }
+
+
+@router.get("/timeline-costs")
+def timeline_costs(days: int = 30, session: Session = Depends(get_session)):
+ """Kosten-Entwicklung über Zeit"""
+ now = datetime.utcnow()
+ since = now - timedelta(days=days)
+ jobs = session.exec(select(Job).where(Job.started_at >= since)).all()
+ printers = {p.id: p for p in session.exec(select(Printer)).all()}
+ price_setting = session.exec(select(Setting).where(Setting.key == "cost.electricity_price_kwh")).first()
+ price_kwh = float(price_setting.value) if price_setting and price_setting.value else 0.30
+
+ buckets: Dict[str, float] = {}
+ cumulative_cost = 0.0
+ daily_cumulative: Dict[str, float] = {}
+
+ for job in sorted(jobs, key=lambda j: _coerce_dt(j.started_at, now)):
+ day = _coerce_dt(job.started_at, now).date().isoformat()
+ exact, est = _energy_for_job(job, printers, now)
+ energy_kwh = exact + est
+ cost = energy_kwh * price_kwh
+ buckets[day] = buckets.get(day, 0.0) + cost
+ cumulative_cost += cost
+ daily_cumulative[day] = cumulative_cost
+
+ dates = sorted(buckets.keys())
+ return {
+ "days": days,
+ "dates": dates,
+ "daily_cost": [round(buckets[d], 2) for d in dates],
+ "cumulative_cost": [round(daily_cumulative[d], 2) for d in dates],
+ }
+
+
+@router.get("/heatmap")
+def heatmap(days: int = 90, session: Session = Depends(get_session)):
+ """Heatmap-Daten für Druckaktivität"""
+ now = datetime.utcnow()
+ since = now - timedelta(days=days)
+ jobs = session.exec(select(Job).where(Job.started_at >= since)).all()
+
+ activity: Dict[str, Dict[str, Any]] = {}
+ for job in jobs:
+ day = _coerce_dt(job.started_at, now).date().isoformat()
+ a = activity.setdefault(day, {"jobs": 0, "filament_g": 0.0, "duration_h": 0.0})
+ a["jobs"] += 1
+ a["filament_g"] += job.filament_used_g or 0.0
+ duration_h = _job_duration_hours(job, now)
+ a["duration_h"] += duration_h
+
+ # Generate all dates in range
+ all_dates = []
+ for i in range(days):
+ date = (now - timedelta(days=days - i - 1)).date()
+ all_dates.append(date.isoformat())
+
+ result = []
+ for date in all_dates:
+ data = activity.get(date, {"jobs": 0, "filament_g": 0.0, "duration_h": 0.0})
+ result.append({
+ "date": date,
+ "jobs": data["jobs"],
+ "filament_g": round(data["filament_g"], 1),
+ "duration_h": round(data["duration_h"], 2),
+ })
+
+ return {"days": days, "data": result}
+
+
+@router.get("/by-printer")
+def by_printer(session: Session = Depends(get_session)):
+ now = datetime.utcnow()
+ jobs = session.exec(select(Job)).all()
+ printers = {p.id: p for p in session.exec(select(Printer)).all()}
+
+ agg: Dict[str, Dict[str, Any]] = {}
+ for job in jobs:
+ pid = job.printer_id
+ if not pid:
+ continue
+ printer = printers.get(pid)
+ b = agg.setdefault(pid, {
+ "printer_id": pid,
+ "printer_name": printer.name if printer else "Unbekannt",
+ "jobs": 0,
+ "filament_g": 0.0,
+ "duration_h": 0.0,
+ "energy_kwh": 0.0,
+ })
+ b["jobs"] += 1
+ b["filament_g"] += job.filament_used_g or 0.0
+ duration_h = _job_duration_hours(job, now)
+ b["duration_h"] += duration_h
+ exact, est = _energy_for_job(job, printers, now)
+ b["energy_kwh"] += exact + est
+
+ return list(agg.values())
+
+
+@router.get("/by-material")
+def by_material(session: Session = Depends(get_session)):
+ materials = {m.id: m for m in session.exec(select(Material)).all()}
+ spools = {s.id: s for s in session.exec(select(Spool)).all()}
+ jobs = session.exec(select(Job)).all()
+
+ agg: Dict[str, Dict[str, Any]] = {}
+
+ # Aggregate from jobs (actual usage)
+ for job in jobs:
+ if not job.spool_id:
+ continue
+ spool = spools.get(job.spool_id)
+ if not spool or not spool.material_id:
+ continue
+ mid = spool.material_id
+ mat = materials.get(mid)
+ b = agg.setdefault(mid, {
+ "material_id": mid,
+ "material_name": mat.name if mat else "Unbekannt",
+ "brand": mat.brand if mat else None,
+ "color": getattr(mat, "color", None),
+ "spools": 0,
+ "total_weight_g": 0.0,
+ })
+ b["total_weight_g"] += job.filament_used_g or 0.0
+
+ # Count unique spools per material
+ spool_counts: Dict[str, set] = {}
+ for spool in spools.values():
+ if spool.material_id:
+ spool_counts.setdefault(spool.material_id, set()).add(spool.id)
+
+ for mid, b in agg.items():
+ b["spools"] = len(spool_counts.get(mid, set()))
+
+ return list(agg.values())
+
+
+@router.get("/costs")
+def costs(session: Session = Depends(get_session)):
+ now = datetime.utcnow()
+ jobs = session.exec(select(Job)).all()
+ printers = {p.id: p for p in session.exec(select(Printer)).all()}
+ price_setting = session.exec(select(Setting).where(Setting.key == "cost.electricity_price_kwh")).first()
+ price_kwh = float(price_setting.value) if price_setting and price_setting.value else None
+
+ energy_total = 0.0
+ for job in jobs:
+ exact, est = _energy_for_job(job, printers, now)
+ energy_total += exact + est
+
+ energy_cost = energy_total * price_kwh if price_kwh is not None else None
+ return {
+ "energy_kwh_total": round(energy_total, 3),
+ "energy_cost_total": round(energy_cost, 2) if energy_cost is not None else None,
+ "energy_price_kwh": price_kwh,
+ }
diff --git a/app/routes/system_routes.py b/app/routes/system_routes.py
new file mode 100644
index 0000000..f76eb25
--- /dev/null
+++ b/app/routes/system_routes.py
@@ -0,0 +1,187 @@
+import time
+import time
+import os
+import psutil
+import yaml
+import logging
+import inspect
+import platform
+import shutil
+from fastapi import APIRouter, HTTPException
+from pydantic import BaseModel
+from app.database import get_session
+from app.routes.config_routes import _load_config
+
+router = APIRouter(prefix="/api/system", tags=["System Status"])
+
+START_TIME = time.time()
+
+
+# -----------------------------
+# CONFIG LADEN
+# -----------------------------
+def load_config():
+ with open("config.yaml", "r", encoding="utf-8") as f:
+ return yaml.safe_load(f)
+
+
+def save_config(config: dict) -> None:
+ try:
+ logger = logging.getLogger('app')
+ caller = None
+ try:
+ fr = inspect.stack()[1]
+ caller = f"{fr.filename}:{fr.lineno} in {fr.function}"
+ except Exception:
+ caller = "unknown"
+ logger.info(f"Writing config.yaml (system_routes.save_config) called from {caller}")
+ except Exception:
+ pass
+ with open("config.yaml", "w", encoding="utf-8") as f:
+ yaml.dump(config, f, default_flow_style=False, allow_unicode=True)
+
+
+class ModeUpdate(BaseModel):
+ mode: str
+
+
+# -----------------------------
+# UPTIME FORMATTIERUNG
+# -----------------------------
+def format_uptime(seconds):
+ m, s = divmod(int(seconds), 60)
+ h, m = divmod(m, 60)
+ return f"{h:02d}:{m:02d}:{s:02d}"
+
+
+# -----------------------------
+# DRUCKER-ERKENNUNG (Dummy)
+# Wird später erweitert
+# -----------------------------
+def detect_printer_mode(config):
+ return config.get("integrations", {}).get("mode", "bambu")
+
+
+# -----------------------------
+# SYSTEMSTATUS API
+# -----------------------------
+@router.get("/status")
+def system_status():
+ cfg_yaml = load_config()
+ cfg_settings = None
+ try:
+ with next(get_session()) as session:
+ cfg_settings = _load_config(session)
+ except Exception:
+ cfg_settings = None
+
+ # APP BLOCK
+ app_info = {
+ "name": cfg_yaml["app"]["name"],
+ "version": cfg_yaml["app"]["version"],
+ "environment": cfg_yaml["app"]["environment"],
+ "uptime": format_uptime(time.time() - START_TIME),
+ }
+
+ # LOGGING BLOCK
+ logging_cfg = (cfg_settings or {}).get("logging") or cfg_yaml.get("logging", {})
+ logging_status = (cfg_settings or {}).get("logging_status", {})
+ logging_info = {
+ "level": logging_cfg.get("level", cfg_yaml.get("logging", {}).get("level", "basic")),
+ "modules": {},
+ }
+ if logging_status:
+ logging_info["modules"] = {name: bool(val) for name, val in logging_status.items()}
+ else:
+ logging_info["modules"] = {
+ name: cfg.get("enabled", False) for name, cfg in cfg_yaml.get("logging", {}).get("modules", {}).items()
+ }
+
+ # SYSTEM BLOCK (CPU/RAM/DISK)
+ vm = psutil.virtual_memory()
+ disk = shutil.disk_usage(".")
+
+ system_info = {
+ "cpu_percent": psutil.cpu_percent(interval=0.2),
+ "cpu_count": psutil.cpu_count(),
+ "ram_percent": vm.percent,
+ "ram_total_gb": round(vm.total / (1024**3), 2),
+ "ram_used_gb": round(vm.used / (1024**3), 2),
+ "ram_free_gb": round(vm.available / (1024**3), 2),
+ "disk_percent": round((disk.used / disk.total) * 100, 1),
+ "disk_total_gb": round(disk.total / (1024**3), 2),
+ "disk_used_gb": round(disk.used / (1024**3), 2),
+ "disk_free_gb": round(disk.free / (1024**3), 2),
+ "platform": platform.system(),
+ "platform_release": platform.release(),
+ "architecture": platform.machine()
+ }
+
+
+ # ECHTER ONLINE-STATUS AUS DB
+ import socket
+ import httpx
+ from app.models.printer import Printer
+ from sqlmodel import select
+ bambu_status = "offline"
+ klipper_status = "offline"
+ bambu_active = 0
+ klipper_active = 0
+ mode = detect_printer_mode(cfg_yaml)
+ try:
+ with next(get_session()) as session:
+ printers = session.exec(select(Printer).where(Printer.active == True)).all() # noqa: E712
+ for p in printers:
+ if p.printer_type in ["bambu", "bambu_lab"]:
+ bambu_active += 1
+ if p.printer_type == "klipper":
+ klipper_active += 1
+ if p.printer_type in ["bambu", "bambu_lab"]:
+ # Live-Test wie in printers.py
+ try:
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.settimeout(2)
+ res = sock.connect_ex((p.ip_address, p.port or 6000))
+ sock.close()
+ if res == 0:
+ bambu_status = "online"
+ except Exception:
+ pass
+ if p.printer_type == "klipper":
+ port = p.port or 7125
+ url = f"http://{p.ip_address}:{port}/server/info"
+ try:
+ r = httpx.get(url, timeout=2)
+ if r.status_code == 200:
+ klipper_status = "online"
+ except Exception:
+ pass
+ except Exception:
+ pass
+ printer_info = {
+ "bambu": bambu_status,
+ "klipper": klipper_status,
+ "mode": mode,
+ "bambu_active": bambu_active,
+ "klipper_active": klipper_active,
+ }
+
+ return {
+ "app": app_info,
+ "logging": logging_info,
+ "system": system_info,
+ "printers": printer_info
+ }
+
+
+@router.post("/mode")
+def set_mode(update: ModeUpdate):
+ allowed = {"bambu", "klipper", "dual", "standalone"}
+ mode = update.mode.lower()
+ if mode not in allowed:
+ raise HTTPException(status_code=400, detail=f"Ungültiger Modus. Erlaubt: {', '.join(allowed)}")
+ cfg = load_config()
+ cfg.setdefault("integrations", {})
+ cfg["integrations"]["mode"] = mode
+ save_config(cfg)
+ return {"success": True, "mode": mode}
diff --git a/app/services/__init__.py b/app/services/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/app/services/__init__.py
@@ -0,0 +1 @@
+
diff --git a/app/services/ams_normalizer.py b/app/services/ams_normalizer.py
new file mode 100644
index 0000000..eb35dfd
--- /dev/null
+++ b/app/services/ams_normalizer.py
@@ -0,0 +1,164 @@
+import logging
+from typing import Any, Dict, List, Optional
+
+from app.services.ams_parser import AMSUnit, Tray, parse_ams
+
+logger = logging.getLogger(__name__)
+
+
+def _safe_get(d: Optional[Dict[str, Any]], k: str, default: Any = None) -> Any:
+ if not isinstance(d, dict):
+ return default
+ return d.get(k, default)
+
+
+def _to_int(value: Any) -> Optional[int]:
+ try:
+ if value is None:
+ return None
+ return int(value)
+ except Exception:
+ return None
+
+
+def _to_float(value: Any) -> Optional[float]:
+ try:
+ if value is None:
+ return None
+ return float(value)
+ except Exception:
+ return None
+
+
+def _rssi_to_percent(value: Any) -> Optional[str]:
+ """Convert RSSI strings like '-42dBm' or numeric values to a human-friendly percent string.
+
+ Returns e.g. '83%' or None if parsing failed.
+ Mapping uses range -100..-30 dBm -> 0..100%.
+ """
+ if value is None:
+ return None
+ try:
+ # Accept formats like '-42dBm' or '-42'
+ s = str(value).strip()
+ # strip trailing 'dBm' or similar
+ if s.lower().endswith('dbm'):
+ s = s[:-3]
+ # remove any non-digit/sign characters
+ import re
+ m = re.search(r"-?\d+", s)
+ if not m:
+ return None
+ rssi = int(m.group(0))
+ # clamp
+ rssi = max(-100, min(-30, rssi))
+ percent = int(round((rssi + 100) / 70.0 * 100))
+ return f"{percent}%"
+ except Exception:
+ return None
+
+
+def _normalize_tray(tray: Tray) -> Dict[str, Optional[Any]]:
+ # Normalize numeric fields where possible and keep names consistent for frontend
+ slot = None
+ if isinstance(tray, dict):
+ raw_slot = None
+ if "tray_id" in tray and tray.get("tray_id") is not None:
+ raw_slot = tray.get("tray_id")
+ elif "id" in tray and tray.get("id") is not None:
+ raw_slot = tray.get("id")
+ slot = _to_int(raw_slot)
+
+ remain = None
+ if isinstance(tray, dict):
+ # parser already tries multiple keys; here defensively parse common ones
+ remain = _to_float(tray.get("remain") or tray.get("remain_percent") or tray.get("remain_weight"))
+
+ total_len = None
+ if isinstance(tray, dict):
+ total_len = _to_int(tray.get("total_len"))
+
+ nozzle_min = None
+ nozzle_max = None
+ if isinstance(tray, dict):
+ nozzle_min = _to_float(tray.get("nozzle_temp_min") or tray.get("nozzle_min") or tray.get("nozzle_temp_min"))
+ nozzle_max = _to_float(tray.get("nozzle_temp_max") or tray.get("nozzle_max") or tray.get("nozzle_temp_max"))
+
+ return {
+ "slot": slot,
+ "tray_uuid": tray.get("tray_uuid") if isinstance(tray, dict) else None,
+ "tag_uid": tray.get("tag_uid") if isinstance(tray, dict) else None,
+ "remain_weight": None,
+ "remain_percent": remain,
+ "total_len": total_len,
+ "nozzle_temp_min": nozzle_min,
+ "nozzle_temp_max": nozzle_max,
+ }
+
+
+def _normalize_ams_unit(ams_unit: AMSUnit) -> Dict[str, Any]:
+ return {
+ "ams_id": ams_unit.get("ams_id"),
+ "temp": ams_unit.get("temp"),
+ "humidity": ams_unit.get("humidity"),
+ "active_tray": ams_unit.get("active_tray"),
+ "trays": [_normalize_tray(t) for t in (ams_unit.get("trays") or [])],
+ }
+
+
+def normalize_device(device_entry: Dict[str, Any]) -> Dict[str, Any]:
+ device_serial = _safe_get(device_entry, "device")
+ ts = _safe_get(device_entry, "ts")
+ payload = _safe_get(device_entry, "payload", {}) or {}
+
+ try:
+ ams_units = parse_ams(payload) or []
+ except Exception:
+ logger.exception("parse_ams failed for device %s", device_serial)
+ ams_units = []
+
+ normalized_ams = []
+ for u in ams_units:
+ try:
+ normalized_ams.append(_normalize_ams_unit(u))
+ except Exception:
+ logger.exception("Failed to normalize AMS unit for device %s: %s", device_serial, u)
+
+ return {
+ "device_serial": device_serial,
+ "ts": ts,
+ "online": bool(payload),
+ # Firmware/version info (best-effort)
+ "firmware": (
+ (payload.get("upgrade_state") or {}).get("ota_new_version_number")
+ or payload.get("ver")
+ or payload.get("version")
+ or payload.get("fw")
+ or payload.get("fw_version")
+ or _safe_get(payload, "device", {}).get("ver") if isinstance(_safe_get(payload, "device", {}), dict) else None
+ or _safe_get(payload, "device", {}).get("online", {}).get("version") if isinstance(_safe_get(payload, "device", {}), dict) else None
+ ),
+ # Signal quality (wifi RSSI, etc.) — present as dBm or numeric string in various keys
+ "signal": _rssi_to_percent(
+ payload.get("wifi_signal")
+ or payload.get("signal")
+ or _safe_get(payload, "device", {}).get("wifi_signal") if isinstance(_safe_get(payload, "device", {}), dict) else None
+ ),
+ "ams_units": normalized_ams,
+ }
+
+
+def normalize_live_state(live_state: Optional[Dict[str, Dict[str, Any]]]) -> Dict[str, List[Dict[str, Any]]]:
+ devices: List[Dict[str, Any]] = []
+ for device_id, entry in (live_state or {}).items():
+ try:
+ devices.append(normalize_device(entry))
+ except Exception:
+ logger.exception("Failed to normalize device %s", device_id)
+ devices.append({
+ "device_serial": device_id,
+ "ts": entry.get("ts") if isinstance(entry, dict) else None,
+ "online": False,
+ "ams_units": [],
+ })
+ return {"devices": devices}
diff --git a/app/services/ams_parser.py b/app/services/ams_parser.py
new file mode 100644
index 0000000..d923a63
--- /dev/null
+++ b/app/services/ams_parser.py
@@ -0,0 +1,150 @@
+"""Helpers to extract AMS data from Bambu report payloads."""
+from typing import Any, Dict, List, Optional, TypedDict
+
+__all__ = ["Tray", "AMSUnit", "parse_ams", "parse_active_tray"]
+
+
+class Tray(TypedDict, total=False):
+ tray_id: int
+ name: Optional[str]
+ material: Optional[str]
+ color: Optional[str]
+ remain: Optional[float]
+ humidity: Optional[float]
+ temp: Optional[float]
+ status: Optional[str]
+ tag_uid: Optional[str]
+ tray_uuid: Optional[str]
+ total_len: Optional[int]
+
+
+class AMSUnit(TypedDict, total=False):
+ ams_id: int
+ active_tray: Optional[int]
+ trays: List[Tray]
+ humidity: Optional[float]
+ temp: Optional[float]
+ status: Optional[str]
+
+
+def _first_defined(*values: Any) -> Any:
+ for value in values:
+ if value is not None:
+ return value
+ return None
+
+
+def _to_int(value: Any) -> Optional[int]:
+ try:
+ if value is None:
+ return None
+ return int(value)
+ except Exception:
+ return None
+
+
+def _to_float(value: Any) -> Optional[float]:
+ try:
+ if value is None:
+ return None
+ return float(value)
+ except Exception:
+ return None
+
+
+def _dict_lookup(obj: Dict[str, Any], *keys: str, default: Any = None) -> Any:
+ current: Any = obj
+ for key in keys:
+ if not isinstance(current, dict):
+ return default
+ current = current.get(key)
+ return default if current is None else current
+
+
+def _parse_tray(raw: Dict[str, Any]) -> Tray:
+ return Tray(
+ tray_id=_first_defined(_to_int(raw.get("tray_id")), _to_int(raw.get("id")), 0),
+ name=_first_defined(raw.get("tray_name"), raw.get("name")),
+ material=_first_defined(raw.get("tray_sub_brands"), raw.get("tray_type"), raw.get("material")),
+ color=_first_defined(raw.get("tray_color"), raw.get("color")),
+ remain=_first_defined(_to_float(raw.get("remain")), _to_float(raw.get("remain_percent")), _to_float(raw.get("remain_weight"))),
+ humidity=raw.get("humidity"),
+ temp=_first_defined(_to_float(raw.get("temp")), _to_float(raw.get("temperature"))),
+ status=raw.get("status"),
+ **{
+ "tag_uid": raw.get("tag_uid"),
+ "tray_uuid": raw.get("tray_uuid"),
+ "total_len": _to_int(raw.get("total_len"))
+ }
+ )
+
+
+def parse_ams(report_payload: Dict[str, Any]) -> List[AMSUnit]:
+ """Extract AMS units and trays from a device//report payload."""
+ # Some firmwares nest AMS under "print" -> "ams", others at the root
+ ams_root = _dict_lookup(report_payload, "ams") or _dict_lookup(report_payload, "print", "ams") or {}
+ ams_list = ams_root.get("ams") or ams_root.get("modules") or []
+ if not isinstance(ams_list, list):
+ return []
+
+ result: List[AMSUnit] = []
+ for ams in ams_list:
+ if not isinstance(ams, dict):
+ continue
+ trays_raw = ams.get("tray") or ams.get("trays") or []
+ trays: List[Tray] = []
+ if isinstance(trays_raw, list):
+ for entry in trays_raw:
+ if isinstance(entry, dict):
+ trays.append(_parse_tray(entry))
+
+ result.append(
+ AMSUnit(
+ ams_id=_first_defined(_to_int(ams.get("ams_id")), _to_int(ams.get("id")), 0),
+ active_tray=_first_defined(
+ _to_int(ams.get("active_tray")),
+ _to_int(ams.get("active_slot")),
+ _to_int(ams_root.get("active_tray")),
+ _to_int(ams_root.get("active_slot")),
+ _to_int(ams_root.get("tray_now")),
+ _to_int(ams_root.get("tray_tar")),
+ ),
+ trays=trays,
+ humidity=ams.get("humidity"),
+ temp=_first_defined(_to_float(ams.get("temp")), _to_float(ams.get("temperature"))),
+ status=ams.get("status"),
+ )
+ )
+
+ return result
+
+
+def parse_active_tray(report_payload: Dict[str, Any]) -> Optional[int]:
+ """Convenience helper to grab the active tray from a report payload."""
+ ams_root = report_payload.get("ams") or _dict_lookup(report_payload, "print", "ams") or {}
+ return _first_defined(
+ _to_int(ams_root.get("active_tray")),
+ _to_int(ams_root.get("active_slot")),
+ _to_int(ams_root.get("tray_now")),
+ _to_int(ams_root.get("tray_tar")),
+ )
+
+
+def main() -> None:
+ """Simple CLI: python -m app.services.ams_parser report.json > parsed.json"""
+ import json
+ import sys
+
+ if len(sys.argv) < 2:
+ data = json.load(sys.stdin)
+ else:
+ with open(sys.argv[1], "r", encoding="utf-8") as handle:
+ data = json.load(handle)
+
+ parsed = parse_ams(data)
+ json.dump(parsed, sys.stdout, indent=2, ensure_ascii=True)
+ sys.stdout.write("\n")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/app/services/ams_sync.py b/app/services/ams_sync.py
new file mode 100644
index 0000000..18a281d
--- /dev/null
+++ b/app/services/ams_sync.py
@@ -0,0 +1,205 @@
+"""
+Helper to sync AMS slot data into Spool records.
+Wir können optional neue Spools anlegen, falls kein Match vorhanden ist.
+"""
+from datetime import datetime
+from typing import List, Dict, Any, Optional
+from sqlmodel import Session, select
+
+from app.database import engine
+from app.models.spool import Spool
+from app.models.material import Material
+from app.routes.notification_routes import trigger_notification_sync
+
+
+def _now_iso() -> str:
+ return datetime.utcnow().isoformat()
+
+
+def _to_int(value: Any):
+ try:
+ if value is None:
+ return None
+ return int(value)
+ except Exception:
+ return None
+
+
+def _get_default_material_id(session: Session) -> Optional[str]:
+ mat = session.exec(select(Material)).first()
+ return mat.id if mat else None
+
+
+def _ensure_material(session: Session, tray_type: Optional[str], tray_color: Optional[str]) -> Optional[str]:
+ """Findet oder legt ein Material (Bambu Lab, Name=tray_type) an."""
+ name = tray_type or "Unknown"
+ brand = "Bambu Lab"
+ existing = session.exec(
+ select(Material).where(Material.name == name, Material.brand == brand)
+ ).first()
+ if existing:
+ return existing.id
+ try:
+ mat = Material(
+ name=name,
+ brand=brand,
+ color=f"#{tray_color[:6]}" if tray_color else None,
+ density=1.24,
+ diameter=1.75,
+ )
+ session.add(mat)
+ session.commit()
+ session.refresh(mat)
+ return mat.id
+ except Exception:
+ session.rollback()
+ return None
+
+
+def sync_ams_slots(ams_units: List[Dict[str, Any]], printer_id: Optional[str] = None, auto_create: bool = False, default_material_id: Optional[str] = None) -> int:
+ """
+ Update existing Spool entries based on AMS slot data.
+ Matching priority: tag_uid -> tray_uuid -> ams_slot.
+ Optional: create new Spools if auto_create=True and eine Material-ID verfügbar ist.
+ Returns number of updated records.
+ """
+ updated = 0
+ if not ams_units:
+ return updated
+
+ with Session(engine) as session:
+ material_id = default_material_id if auto_create else None
+ for ams in ams_units:
+ # === NOTIFICATION: AMS Luftfeuchtigkeit prüfen ===
+ humidity = ams.get("humidity")
+ if humidity is not None:
+ try:
+ humidity_val = int(humidity)
+ if humidity_val > 60:
+ # Lade Printer-Name für Kontext
+ printer_name = "Unbekannt"
+ if printer_id:
+ from app.models.printer import Printer
+ printer = session.get(Printer, printer_id)
+ printer_name = printer.name if printer else printer_id
+
+ trigger_notification_sync(
+ "ams_humidity_high",
+ humidity=humidity_val,
+ printer_name=printer_name
+ )
+ except (ValueError, TypeError):
+ pass
+
+ trays = ams.get("trays") or []
+ for tray in trays:
+ tag_uid = tray.get("tag_uid") or tray.get("tag")
+ tray_uuid = tray.get("tray_uuid")
+ # slot robust auslesen, ohne 0 zu verwerfen
+ raw_slot = tray.get("tray_id")
+ if raw_slot is None:
+ raw_slot = tray.get("id")
+ if raw_slot is None:
+ raw_slot = tray.get("slot") or tray.get("tray")
+ ams_slot = _to_int(raw_slot)
+ if ams_slot is None:
+ # Versuche aus tray_id_name wie "A00-K0" die Slot-Nummer (letzte Ziffer) zu ziehen
+ name_hint = tray.get("tray_id_name") or tray.get("name")
+ if name_hint and isinstance(name_hint, str):
+ digits = "".join(filter(str.isdigit, name_hint))
+ if digits:
+ try:
+ ams_slot = int(digits[-1])
+ except Exception:
+ ams_slot = None
+ remain = tray.get("remain") or tray.get("remain_percent")
+ remain_percent = float(remain) if remain is not None else 0.0
+ tray_type = tray.get("tray_type") or tray.get("material")
+ tray_color = tray.get("tray_color") or tray.get("color")
+ weight_current = None
+ # einfache Ableitung des aktuellen Gewichts, falls vorhanden
+ if remain_percent is not None and tray.get("weight_full") and tray.get("weight_empty"):
+ try:
+ wf = float(tray.get("weight_full"))
+ we = float(tray.get("weight_empty"))
+ weight_current = we + (remain_percent / 100.0) * (wf - we)
+ except Exception:
+ weight_current = None
+
+ stmt = select(Spool)
+ if tag_uid:
+ stmt = stmt.where(Spool.tag_uid == tag_uid)
+ elif tray_uuid:
+ stmt = stmt.where(Spool.tray_uuid == tray_uuid)
+ elif ams_slot is not None:
+ stmt = stmt.where(Spool.ams_slot == ams_slot)
+ else:
+ continue
+
+ spool = session.exec(stmt).first()
+ if not spool:
+ if not auto_create:
+ continue
+ mat_id = material_id or _ensure_material(session, tray_type, tray_color)
+ if not mat_id:
+ continue
+ now = _now_iso()
+ spool = Spool(
+ material_id=mat_id,
+ printer_id=printer_id,
+ ams_slot=ams_slot,
+ last_slot=ams_slot,
+ tag_uid=tag_uid,
+ tray_uuid=tray_uuid,
+ tray_color=tray_color,
+ tray_type=tray_type,
+ remain_percent=remain_percent,
+ weight_current=weight_current,
+ last_seen=now,
+ first_seen=now,
+ used_count=0,
+ label=f"AMS Slot {ams_slot}" if ams_slot is not None else None,
+ status="Aktiv", # Neue Spulen im AMS sind "Aktiv"
+ is_open=True, # Spulen im AMS sind geöffnet
+ )
+ session.add(spool)
+ updated += 1
+ continue
+
+ # Update bestehender Spule
+ spool.ams_slot = ams_slot
+ spool.last_slot = ams_slot
+ spool.tag_uid = tag_uid or spool.tag_uid
+ spool.tray_uuid = tray_uuid or spool.tray_uuid
+ spool.tray_color = tray_color or spool.tray_color
+ spool.tray_type = tray_type or spool.tray_type
+
+ # Status aktualisieren: Wenn Spule im AMS ist, auf "Aktiv" setzen
+ # Some test doubles (DummySpool) may not have `is_empty` attribute,
+ # so use getattr with a sensible default.
+ if not getattr(spool, "is_empty", False):
+ spool.status = "Aktiv"
+ spool.is_open = True
+ # Neue Rolle erkennen: Remain steigt deutlich an
+ if remain_percent is not None and spool.remain_percent is not None and remain_percent > spool.remain_percent + 5:
+ spool.used_count = 0
+ spool.first_seen = _now_iso()
+ spool.remain_percent = remain_percent
+ if weight_current is None and remain_percent is not None and spool.weight_full is not None and spool.weight_empty is not None:
+ try:
+ wf = float(spool.weight_full)
+ we = float(spool.weight_empty)
+ weight_current = we + (remain_percent / 100.0) * (wf - we)
+ except Exception:
+ weight_current = None
+ if weight_current is not None:
+ spool.weight_current = weight_current
+ if not spool.first_seen:
+ spool.first_seen = _now_iso()
+ spool.last_seen = _now_iso()
+ if ams_slot is not None and not spool.label:
+ spool.label = f"AMS Slot {ams_slot}"
+ session.add(spool)
+ updated += 1
+ session.commit()
+ return updated
diff --git a/app/services/environment_info.py b/app/services/environment_info.py
new file mode 100644
index 0000000..d83b0a2
--- /dev/null
+++ b/app/services/environment_info.py
@@ -0,0 +1,156 @@
+import logging
+import os
+import platform
+import socket
+import sys
+from pathlib import Path
+from typing import Any, Dict, List, Optional, Tuple
+
+import yaml
+from fastapi import Request
+
+logger = logging.getLogger(__name__)
+
+_CONFIG_CACHE: Optional[Dict[str, Any]] = None
+_CONFIG_PATH = Path(__file__).resolve().parents[2] / "config.yaml"
+_DEFAULT_PORT = 8085
+_DEFAULT_HOST = "0.0.0.0"
+_CGROUP_PATH = Path("/proc/1/cgroup")
+
+
+def _load_server_config() -> Dict[str, Any]:
+ global _CONFIG_CACHE
+ if _CONFIG_CACHE is not None:
+ return _CONFIG_CACHE
+ if not _CONFIG_PATH.exists():
+ _CONFIG_CACHE = {}
+ return {}
+ try:
+ with _CONFIG_PATH.open("r", encoding="utf-8") as f:
+ raw = yaml.safe_load(f)
+ if isinstance(raw, dict):
+ _CONFIG_CACHE = raw
+ return raw
+ except Exception as exc: # pragma: no cover - defensive logging
+ logger.warning("Failed to read server config file: %s", exc)
+ _CONFIG_CACHE = {}
+ return {}
+
+
+def _parse_port(value: Any) -> Optional[int]:
+ if value is None:
+ return None
+ try:
+ return int(value)
+ except (TypeError, ValueError):
+ return None
+
+
+def _resolve_host(
+ request: Optional[Request], server_block: Dict[str, Any]
+) -> Tuple[str, str]:
+ if request is not None:
+ host = request.url.hostname
+ if host:
+ return host, "request.url.hostname"
+ env_host = os.getenv("HOST")
+ if env_host:
+ return env_host, "env.HOST"
+ config_host = server_block.get("host")
+ if isinstance(config_host, str) and config_host:
+ return config_host, "config.yaml"
+ return _DEFAULT_HOST, "fallback"
+
+
+def _resolve_port(
+ request: Optional[Request], server_block: Dict[str, Any]
+) -> Tuple[int, str]:
+ if request is not None and request.url.port:
+ return request.url.port, "request.url.port"
+ env_port = _parse_port(os.getenv("PORT"))
+ if env_port is not None:
+ return env_port, "env.PORT"
+ config_port = _parse_port(server_block.get("port"))
+ if config_port is not None:
+ return config_port, "config.yaml"
+ return _DEFAULT_PORT, "fallback"
+
+
+def _read_cgroup() -> Optional[str]:
+ try:
+ return _CGROUP_PATH.read_text()
+ except Exception:
+ return None
+
+
+def _detect_containerization() -> Tuple[bool, Optional[str], List[str]]:
+ hints: List[str] = []
+ containerized = False
+ runtime: Optional[str] = None
+
+ if Path("/.dockerenv").exists():
+ containerized = True
+ runtime = runtime or "docker"
+ hints.append("found /.dockerenv")
+
+ cgroup_text = _read_cgroup()
+ if cgroup_text:
+ lower = cgroup_text.lower()
+ if "kubepods" in lower:
+ containerized = True
+ runtime = runtime or "kubernetes"
+ hints.append("cgroup contains kubepods")
+ elif "docker" in lower or "moby" in lower:
+ containerized = True
+ runtime = runtime or "docker"
+ hints.append("cgroup contains docker/moby")
+ elif "podman" in lower:
+ containerized = True
+ runtime = runtime or "podman"
+ hints.append("cgroup contains podman")
+ elif "lxc" in lower:
+ containerized = True
+ runtime = runtime or "lxc"
+ hints.append("cgroup contains lxc")
+
+ for env_var in (
+ "KUBERNETES_SERVICE_HOST",
+ "CONTAINER",
+ "CONTAINERIZED",
+ "DOTNET_RUNNING_IN_CONTAINER",
+ "CI_CONTAINER",
+ ):
+ if os.getenv(env_var):
+ containerized = True
+ runtime = runtime or env_var.lower()
+ hints.append(f"env {env_var} is set")
+ break
+
+ return containerized, runtime, hints
+
+
+def build_environment_snapshot(request: Optional[Request] = None) -> Dict[str, Any]:
+ config = _load_server_config()
+ server_block = config.get("server", {}) if isinstance(config, dict) else {}
+ host, host_source = _resolve_host(request, server_block)
+ port, port_source = _resolve_port(request, server_block)
+ containerized, container_runtime, container_hints = _detect_containerization()
+
+ return {
+ "platform": platform.system(),
+ "platform_release": platform.release(),
+ "platform_details": platform.platform(),
+ "architecture": platform.machine(),
+ "hostname": socket.gethostname(),
+ "python_version": sys.version,
+ "python_executable": sys.executable,
+ "server": {
+ "host": host,
+ "host_source": host_source,
+ "port": port,
+ "port_source": port_source,
+ },
+ "containerized": containerized,
+ "container_runtime": container_runtime,
+ "container_hints": container_hints,
+ }
diff --git a/app/services/eta/__init__.py b/app/services/eta/__init__.py
new file mode 100644
index 0000000..4182e29
--- /dev/null
+++ b/app/services/eta/__init__.py
@@ -0,0 +1,13 @@
+"""
+ETA (Estimated Time Remaining) Modul für BambuLab Drucker.
+
+WICHTIG:
+- ETA ist REINE ANZEIGE-INFORMATION
+- Hat KEINE Auswirkung auf Job-Logik
+- Kann None sein (dann UI zeigt "—")
+- Darf niemals negativ sein
+"""
+
+from .eta_selector import calculate_eta
+
+__all__ = ["calculate_eta"]
diff --git a/app/services/eta/bambu_a_series_eta.py b/app/services/eta/bambu_a_series_eta.py
new file mode 100644
index 0000000..5ba7779
--- /dev/null
+++ b/app/services/eta/bambu_a_series_eta.py
@@ -0,0 +1,64 @@
+"""
+Layer-basierte ETA für Bambu A-Serie (A1, A1 Mini).
+
+Grund: Bambu-eigene ETA ist bei A-Serie unzuverlässig.
+Strategie: Berechne ETA basierend auf durchschnittlicher Layer-Zeit.
+"""
+
+from datetime import datetime
+from typing import Optional
+
+
+def estimate_remaining_time_from_layers(
+ started_at: datetime,
+ layer_num: int,
+ total_layer_num: int,
+ now: Optional[datetime] = None,
+ min_layers_for_eta: int = 5,
+) -> Optional[int]:
+ """
+ Layer-basierte ETA-Berechnung.
+
+ Args:
+ started_at: Wann der Job gestartet wurde
+ layer_num: Aktueller Layer
+ total_layer_num: Gesamt-Layer
+ now: Aktueller Zeitpunkt (für Tests)
+ min_layers_for_eta: Minimum Layers bevor ETA berechnet wird
+
+ Returns:
+ Verbleibende Sekunden (int) oder None wenn ETA nicht sinnvoll
+ """
+ # Validierung: Pflichtfelder
+ if not started_at:
+ return None
+ if layer_num is None or total_layer_num is None:
+ return None
+
+ # Zu früh für sinnvolle ETA
+ if layer_num < min_layers_for_eta:
+ return None
+
+ # Job ist fertig oder über 100%
+ if total_layer_num <= layer_num:
+ return 0
+
+ # Berechne verstrichene Zeit
+ current_time = now if now else datetime.utcnow()
+ elapsed = (current_time - started_at).total_seconds()
+
+ # Verhindere Division durch 0
+ if layer_num <= 0:
+ return None
+
+ # Berechne durchschnittliche Zeit pro Layer
+ avg_time_per_layer = elapsed / layer_num
+
+ # Berechne verbleibende Layer
+ remaining_layers = total_layer_num - layer_num
+
+ # Berechne ETA
+ eta_seconds = avg_time_per_layer * remaining_layers
+
+ # Sicherheit: Niemals negativ
+ return max(0, int(eta_seconds))
diff --git a/app/services/eta/bambu_p_series_eta.py b/app/services/eta/bambu_p_series_eta.py
new file mode 100644
index 0000000..af881a0
--- /dev/null
+++ b/app/services/eta/bambu_p_series_eta.py
@@ -0,0 +1,40 @@
+"""
+Hybrid-ETA für Bambu P-Serie (P1P, P1S).
+
+Grund: Bambu-ETA ist brauchbar, aber nicht immer stabil.
+Strategie: Verwende max(Bambu-ETA, Layer-ETA) für konservative Schätzung.
+"""
+
+from typing import Optional
+
+
+def estimate_eta_p_series(
+ bambu_remaining_time: Optional[int],
+ layer_eta: Optional[int],
+) -> Optional[int]:
+ """
+ Hybrid-ETA für P-Serie.
+
+ Args:
+ bambu_remaining_time: Von Bambu gemeldete Restzeit (Sekunden)
+ layer_eta: Layer-basierte ETA (Sekunden)
+
+ Returns:
+ Konservativere (größere) ETA in Sekunden, oder None
+ """
+ # Bereinige negative/0-Werte
+ bambu_valid = bambu_remaining_time if (bambu_remaining_time and bambu_remaining_time > 0) else None
+ layer_valid = layer_eta if (layer_eta and layer_eta > 0) else None
+
+ # Beide None → keine ETA
+ if bambu_valid is None and layer_valid is None:
+ return None
+
+ # Nur eine vorhanden
+ if bambu_valid is None:
+ return layer_valid
+ if layer_valid is None:
+ return bambu_valid
+
+ # Beide vorhanden: Nimm die konservativere (größere)
+ return max(bambu_valid, layer_valid)
diff --git a/app/services/eta/bambu_x_series_eta.py b/app/services/eta/bambu_x_series_eta.py
new file mode 100644
index 0000000..4359ba0
--- /dev/null
+++ b/app/services/eta/bambu_x_series_eta.py
@@ -0,0 +1,27 @@
+"""
+ETA für Bambu X-Serie (X1, X1C).
+
+Grund: Bambu-eigene ETA ist bei X-Serie zuverlässig und präzise.
+Strategie: Verwende Bambu-ETA direkt.
+"""
+
+from typing import Optional
+
+
+def estimate_eta_x_series(
+ bambu_remaining_time: Optional[int],
+) -> Optional[int]:
+ """
+ ETA für X-Serie (direkt von Bambu).
+
+ Args:
+ bambu_remaining_time: Von Bambu gemeldete Restzeit (Sekunden)
+
+ Returns:
+ Bambu-ETA in Sekunden, oder None
+ """
+ # Validierung
+ if bambu_remaining_time is None or bambu_remaining_time <= 0:
+ return None
+
+ return bambu_remaining_time
diff --git a/app/services/eta/eta_selector.py b/app/services/eta/eta_selector.py
new file mode 100644
index 0000000..fcf009d
--- /dev/null
+++ b/app/services/eta/eta_selector.py
@@ -0,0 +1,71 @@
+"""
+Zentrale ETA-Auswahl basierend auf Drucker-Modell.
+"""
+
+from datetime import datetime
+from typing import Optional
+
+from .bambu_a_series_eta import estimate_remaining_time_from_layers
+from .bambu_p_series_eta import estimate_eta_p_series
+from .bambu_x_series_eta import estimate_eta_x_series
+
+
+# Modell-Mapping
+A_SERIES_MODELS = {"A1", "A1MINI", "A1 MINI"}
+P_SERIES_MODELS = {"P1P", "P1S"}
+X_SERIES_MODELS = {"X1", "X1C", "X1E"}
+
+
+def calculate_eta(
+ printer_model: Optional[str],
+ started_at: Optional[datetime],
+ layer_num: Optional[int],
+ total_layer_num: Optional[int],
+ bambu_remaining_time: Optional[int],
+) -> Optional[int]:
+ """
+ Zentrale ETA-Berechnung basierend auf Drucker-Modell.
+
+ Args:
+ printer_model: Modell-Name (z.B. "X1C", "P1S", "A1")
+ started_at: Job-Start-Zeit
+ layer_num: Aktueller Layer
+ total_layer_num: Gesamt-Layer
+ bambu_remaining_time: Von Bambu gemeldete Restzeit (Sekunden)
+
+ Returns:
+ ETA in Sekunden, oder None wenn nicht berechenbar
+ """
+ # Kein Modell → keine spezifische ETA
+ if not printer_model:
+ return None
+
+ # Normalisiere Modell-Name
+ model_upper = printer_model.upper().strip()
+
+ # A-Serie: Layer-basierte ETA
+ if model_upper in A_SERIES_MODELS:
+ return estimate_remaining_time_from_layers(
+ started_at=started_at,
+ layer_num=layer_num,
+ total_layer_num=total_layer_num,
+ )
+
+ # P-Serie: Hybrid-ETA
+ if model_upper in P_SERIES_MODELS:
+ layer_eta = estimate_remaining_time_from_layers(
+ started_at=started_at,
+ layer_num=layer_num,
+ total_layer_num=total_layer_num,
+ )
+ return estimate_eta_p_series(
+ bambu_remaining_time=bambu_remaining_time,
+ layer_eta=layer_eta,
+ )
+
+ # X-Serie: Bambu-ETA direkt
+ if model_upper in X_SERIES_MODELS:
+ return estimate_eta_x_series(bambu_remaining_time)
+
+ # Unbekanntes Modell → keine ETA
+ return None
diff --git a/app/services/job_parser.py b/app/services/job_parser.py
new file mode 100644
index 0000000..9c0c7e9
--- /dev/null
+++ b/app/services/job_parser.py
@@ -0,0 +1,158 @@
+"""
+Helpers to extract current job/print status from Bambu report payloads.
+"""
+from typing import Any, Dict, Optional
+
+__all__ = ["parse_job"]
+
+
+def _first_defined(*values: Any) -> Any:
+ for value in values:
+ if value is not None:
+ return value
+ return None
+
+
+def _to_int(value: Any) -> Optional[int]:
+ try:
+ if value is None:
+ return None
+ return int(value)
+ except Exception:
+ return None
+
+
+def _dict_lookup(obj: Dict[str, Any], *keys: str) -> Any:
+ current: Any = obj
+ for key in keys:
+ if not isinstance(current, dict):
+ return None
+ current = current.get(key)
+ return current
+
+
+def parse_job(report_payload: Dict[str, Any]) -> Dict[str, Any]:
+ """
+ Extract job/print status fields from a device//report payload.
+ Returns a flat dict with common fields for easy consumption.
+ """
+ # Primary: PrinterData (mapped) falls back to raw report
+ printer_mapped = report_payload.get("printer") if isinstance(report_payload, dict) else None
+ if printer_mapped and isinstance(printer_mapped, dict):
+ pd = printer_mapped
+ temps = pd.get("temperature") or {}
+ layer = pd.get("layer") or {}
+ job_pd = pd.get("job") or {}
+ return {
+ "gcode_state": pd.get("state") or pd.get("sub_state"),
+ "progress_percent": _to_int(pd.get("progress")),
+ "remain_time_s": _to_int(job_pd.get("time_remaining")),
+ "gcode_file": job_pd.get("file"),
+ "project_id": None,
+ "task_id": None,
+ "subtask_id": None,
+ "job_id": None,
+ "profile_id": None,
+ "job_attr": None,
+ "mc_stage": pd.get("sub_state"),
+ "mc_print_stage": pd.get("sub_state"),
+ "mc_print_sub_stage": None,
+ "upgrade_state": None,
+ "upgrade_module": None,
+ "upgrade_message": None,
+ "job_stage": None,
+ "tray_target": None,
+ "tray_current": None,
+ "tray_previous": None,
+ "virtual_tray": None,
+ "nozzle_temp": temps.get("nozzle"),
+ "bed_temp": temps.get("bed"),
+ "layer_current": layer.get("current"),
+ "layer_total": layer.get("total"),
+ }
+
+ root = _dict_lookup(report_payload, "print") or report_payload
+
+ job_block = root.get("job") or {}
+ upgrade_block = {}
+ if isinstance(report_payload, dict):
+ upgrade_block = report_payload.get("upgrade_state") or {}
+ upgrade_block = upgrade_block or root.get("upgrade_state") or {}
+
+ file_name = _first_defined(
+ root.get("gcode_file"),
+ root.get("file"),
+ job_block.get("file"),
+ job_block.get("gcode_file"),
+ )
+ percent = _first_defined(
+ _to_int(root.get("percent")),
+ _to_int(root.get("mc_percent")),
+ _to_int(root.get("gcode_file_prepare_percent")),
+ )
+ remain_time = _first_defined(
+ _to_int(root.get("remain_time")),
+ _to_int(root.get("mc_remaining_time")),
+ )
+
+ ams_block = root.get("ams") or {}
+ tray_target = _to_int(ams_block.get("tray_tar"))
+ tray_current = _to_int(ams_block.get("tray_now"))
+ tray_prev = _to_int(ams_block.get("tray_pre"))
+ virtual_tray = _dict_lookup(root, "vt_tray") or _dict_lookup(root, "vir_slot")
+ vt = None
+ if isinstance(virtual_tray, dict):
+ vt = {
+ "id": _to_int(virtual_tray.get("id")),
+ "type": virtual_tray.get("tray_type") or virtual_tray.get("tray_id_name") or virtual_tray.get("tray_name"),
+ "color": virtual_tray.get("tray_color"),
+ "weight": virtual_tray.get("tray_weight"),
+ "remain": _to_int(virtual_tray.get("remain")),
+ }
+
+ return {
+ "gcode_state": _first_defined(root.get("gcode_state"), root.get("print_state"), root.get("state")),
+ "progress_percent": percent,
+ "remain_time_s": remain_time,
+ "gcode_file": file_name,
+ "project_id": root.get("project_id"),
+ "task_id": root.get("task_id"),
+ "subtask_id": root.get("subtask_id"),
+ "job_id": root.get("job_id"),
+ "profile_id": root.get("profile_id"),
+ "job_attr": root.get("job_attr"),
+ "mc_stage": root.get("mc_stage"),
+ "mc_print_stage": root.get("mc_print_stage"),
+ "mc_print_sub_stage": root.get("mc_print_sub_stage"),
+ "upgrade_state": upgrade_block.get("status"),
+ "upgrade_module": upgrade_block.get("module"),
+ "upgrade_message": upgrade_block.get("message"),
+ "job_stage": job_block.get("cur_stage"),
+ "tray_target": tray_target,
+ "tray_current": tray_current,
+ "tray_previous": tray_prev,
+ "virtual_tray": vt,
+ }
+
+
+def main() -> None:
+ """
+ Simple CLI:
+ python -m app.services.job_parser report.json > job.json
+ """
+ import json
+ import sys
+
+ if len(sys.argv) < 2:
+ data = json.load(sys.stdin)
+ else:
+ with open(sys.argv[1], "r", encoding="utf-8") as handle:
+ data = json.load(handle)
+
+ parsed = parse_job(data)
+ json.dump(parsed, sys.stdout, indent=2, ensure_ascii=True)
+ sys.stdout.write("\n")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/app/services/job_tracking_service.py b/app/services/job_tracking_service.py
new file mode 100644
index 0000000..44ea65c
--- /dev/null
+++ b/app/services/job_tracking_service.py
@@ -0,0 +1,1232 @@
+"""
+Job Tracking Service - Zentrale Verwaltung für Job-Tracking und Filament-Verbrauch
+
+Verwaltet:
+- Aktive Jobs pro Drucker (RAM-basiert)
+- Job-Status-Übergänge (IDLE -> RUNNING -> FINISH)
+- Filament-Verbrauch Berechnung
+- Spulen-Gewicht Updates
+- Multi-Spool Job Tracking
+
+Wird genutzt von:
+- mqtt_runtime.py (Live MQTT Messages)
+- mqtt_routes.py (HTTP API Fallback)
+"""
+
+from typing import Dict, Any, Optional, List, cast
+from datetime import datetime
+from sqlmodel import Session, select
+import logging
+import json
+from pathlib import Path
+import tempfile
+import os
+import threading
+from json import JSONDecodeError
+
+from app.models.job import Job
+from app.models.spool import Spool
+from app.models.printer import Printer
+from app.database import engine
+from app.routes.notification_routes import trigger_notification_sync
+
+
+_snapshot_lock = threading.Lock()
+
+
+class JobTrackingService:
+ """Singleton Service für Job-Tracking und Verbrauch-Berechnung"""
+
+ def __init__(self):
+ self.active_jobs: Dict[str, Dict[str, Any]] = {} # cloud_serial -> job_info
+ self.last_gstate: Dict[str, str] = {} # cloud_serial -> last_state
+ self.logger = logging.getLogger("job_tracking")
+ self.snapshots_file = Path("data/job_snapshots.json") # Persistente Job-Fingerprints
+
+
+ def _calc_usage(
+ self,
+ spool: Optional[Spool],
+ start_remain: Optional[float],
+ end_remain: Optional[float],
+ start_total_len: Optional[int]
+ ) -> tuple[float, float]:
+ """
+ Berechnet Verbrauch in mm und g
+
+ Args:
+ spool: Spulen-Objekt (für Gewichtsberechnung)
+ start_remain: Start-Restmenge in %
+ end_remain: End-Restmenge in %
+ start_total_len: Totale Länge in mm
+
+ Returns:
+ (used_mm, used_g)
+ """
+ if start_remain is None or end_remain is None:
+ return 0.0, 0.0
+
+ # FIX: Bambu Lab's "remain" ist EXTREM unzuverlässig und kann sogar steigen!
+ # Wir ignorieren Anstiege (bleiben beim letzten Wert)
+ # Dies ist eine defensive Strategie, um negative Verbräuche zu vermeiden
+ used_percent = max(0.0, float(start_remain) - float(end_remain))
+
+ # Wenn remain GESTIEGEN ist (end_remain > start_remain), ist used_percent = 0
+ # Das ist technisch korrekt, aber wir verlieren Tracking-Genauigkeit
+
+ # Länge in mm
+ used_mm = (used_percent / 100.0) * float(start_total_len) if start_total_len else 0.0
+
+ # Gewicht in g
+ used_g = 0.0
+ if spool and spool.weight_full is not None and spool.weight_empty is not None:
+ used_g = (used_percent / 100.0) * (float(spool.weight_full) - float(spool.weight_empty))
+
+ return used_mm, used_g
+
+ def _get_snapshot_key(self, cloud_serial: str, printer_id: Optional[str]) -> str:
+ """
+ Bestimmt den Snapshot-Key basierend auf Drucker-Typ.
+
+ - Bambu Lab (hat cloud_serial): Verwende cloud_serial (hardware-gebunden)
+ - Klipper (kein cloud_serial): Verwende printer_id (DB-gebunden)
+ """
+ if cloud_serial:
+ return cloud_serial
+ if printer_id:
+ return f"printer_{printer_id}"
+ return "printer_unknown"
+
+ def _save_snapshot(self, cloud_serial: str, printer_id: Optional[str], job_id: str, job_name: str,
+ slot: int, layer_num: int, mc_percent: int, started_at: datetime,
+ filament_start_mm: Optional[float] = None,
+ filament_started: bool = False,
+ using_fallback: bool = False,
+ fallback_warned: bool = False):
+ """Speichert Job-Snapshot in JSON-Datei für Server-Neustart-Recovery"""
+ snapshot_key = self._get_snapshot_key(cloud_serial, printer_id)
+
+ # Neuer Snapshot
+ snapshot_data = {
+ "cloud_serial": cloud_serial,
+ "printer_id": printer_id,
+ "job_id": job_id,
+ "job_name": job_name,
+ "started_at": started_at.isoformat(),
+ "slot": slot,
+ "layer_num": layer_num,
+ "mc_percent": mc_percent,
+ "filament_started": filament_started,
+ "using_fallback": using_fallback,
+ "fallback_warned": fallback_warned,
+ }
+
+ if filament_start_mm is not None:
+ snapshot_data["filament_start_mm"] = filament_start_mm
+
+ # Ensure parent dir exists
+ try:
+ self.snapshots_file.parent.mkdir(parents=True, exist_ok=True)
+ except OSError:
+ self.logger.exception("[SNAPSHOT] Failed to create snapshot directory %s", self.snapshots_file.parent)
+ return
+
+ # Atomarer write: schreibe in tempfile, fsync, replace
+ tmp_path = None
+ try:
+ with _snapshot_lock:
+ snapshots = {}
+ if self.snapshots_file.exists():
+ try:
+ with open(self.snapshots_file, 'r', encoding='utf-8') as f:
+ snapshots = json.load(f)
+ except JSONDecodeError:
+ self.logger.error("[SNAPSHOT] Corrupt snapshot file %s - discarding", self.snapshots_file, exc_info=True)
+ snapshots = {}
+ except OSError:
+ self.logger.exception("[SNAPSHOT] Failed to read snapshot file %s", self.snapshots_file)
+ snapshots = {}
+
+ snapshots[snapshot_key] = snapshot_data
+
+ dirpath = str(self.snapshots_file.parent)
+ tf = tempfile.NamedTemporaryFile(mode='w', dir=dirpath, delete=False, encoding='utf-8')
+ tmp_path = tf.name
+ try:
+ json.dump(snapshots, tf, ensure_ascii=False, indent=2)
+ tf.flush()
+ os.fsync(tf.fileno())
+ finally:
+ tf.close()
+
+ os.replace(tmp_path, str(self.snapshots_file))
+
+ self.logger.debug("[SNAPSHOT] Saved for key=%s job=%s", snapshot_key, job_id)
+
+ except Exception:
+ self.logger.exception("[SNAPSHOT] Failed to save snapshot for key=%s", snapshot_key)
+ if tmp_path and os.path.exists(tmp_path):
+ try:
+ os.remove(tmp_path)
+ except OSError:
+ pass
+
+ def _load_snapshot(self, cloud_serial: str, printer_id: Optional[str]) -> Optional[Dict[str, Any]]:
+ """Lädt Job-Snapshot für einen Drucker"""
+ snapshot_key = self._get_snapshot_key(cloud_serial, printer_id)
+
+ if not self.snapshots_file.exists():
+ return None
+
+ try:
+ with open(self.snapshots_file, 'r', encoding='utf-8') as f:
+ snapshots = json.load(f)
+ except JSONDecodeError:
+ self.logger.error("[SNAPSHOT] Corrupt snapshot file %s - discarding", self.snapshots_file, exc_info=True)
+ return None
+ except OSError:
+ self.logger.exception("[SNAPSHOT] Failed to read snapshot file %s", self.snapshots_file)
+ return None
+
+ return snapshots.get(snapshot_key)
+
+ def _delete_snapshot(self, cloud_serial: str, printer_id: Optional[str]):
+ """Löscht Job-Snapshot nach Job-Ende"""
+ snapshot_key = self._get_snapshot_key(cloud_serial, printer_id)
+
+ if not self.snapshots_file.exists():
+ return
+
+ tmp_path = None
+ try:
+ with _snapshot_lock:
+ try:
+ with open(self.snapshots_file, 'r', encoding='utf-8') as f:
+ snapshots = json.load(f)
+ except JSONDecodeError:
+ self.logger.error("[SNAPSHOT] Corrupt snapshot file %s - discarding", self.snapshots_file, exc_info=True)
+ return
+ except OSError:
+ self.logger.exception("[SNAPSHOT] Failed to read snapshot file %s", self.snapshots_file)
+ return
+
+ if snapshot_key in snapshots:
+ del snapshots[snapshot_key]
+
+ dirpath = str(self.snapshots_file.parent)
+ tf = tempfile.NamedTemporaryFile(mode='w', dir=dirpath, delete=False, encoding='utf-8')
+ tmp_path = tf.name
+ try:
+ json.dump(snapshots, tf, ensure_ascii=False, indent=2)
+ tf.flush()
+ os.fsync(tf.fileno())
+ finally:
+ tf.close()
+
+ os.replace(tmp_path, str(self.snapshots_file))
+ self.logger.debug("[SNAPSHOT] Deleted for key=%s", snapshot_key)
+
+ except Exception:
+ self.logger.exception("[SNAPSHOT] Failed to delete snapshot for key=%s", snapshot_key)
+ if tmp_path and os.path.exists(tmp_path):
+ try:
+ os.remove(tmp_path)
+ except OSError:
+ pass
+
+ def _find_tray(self, ams_units: List[Dict[str, Any]], slot: Optional[int]) -> Optional[Dict[str, Any]]:
+ """Findet Tray-Info für einen bestimmten Slot"""
+ if slot is None:
+ return None
+
+ for unit in ams_units or []:
+ trays = unit.get("trays") or []
+ for tray in trays:
+ tid = tray.get("tray_id") if isinstance(tray, dict) else None
+ tid = tid if tid is not None else (tray.get("id") if isinstance(tray, dict) else None)
+ if tid is not None and int(tid) == int(slot):
+ return tray
+ return None
+
+ def _extract_filament_used_mm(self, parsed_payload: Dict[str, Any]) -> Optional[float]:
+ """
+ Extrahiert print.filament_used_mm aus verschiedenen möglichen Pfaden.
+
+ Args:
+ parsed_payload: Geparste MQTT Payload
+
+ Returns:
+ Filament-verbrauch in mm oder None falls nicht vorhanden
+ """
+ # Primärquelle: print.filament_used_mm
+ print_block = parsed_payload.get("print", {})
+ if isinstance(print_block, dict):
+ filament_used = print_block.get("filament_used_mm")
+ if filament_used is not None:
+ try:
+ value = float(filament_used)
+ self.logger.debug(
+ "[FILAMENT] Extracted filament_used_mm=%s from print.filament_used_mm",
+ value,
+ )
+ return value
+ except (ValueError, TypeError):
+ pass
+
+ # Alternative: print.3D.filament_used_mm
+ three_d = print_block.get("3D", {})
+ if isinstance(three_d, dict):
+ filament_used = three_d.get("filament_used_mm")
+ if filament_used is not None:
+ try:
+ value = float(filament_used)
+ self.logger.debug(
+ "[FILAMENT] Extracted filament_used_mm=%s from print.3D.filament_used_mm",
+ value,
+ )
+ return value
+ except (ValueError, TypeError):
+ pass
+
+ # Fallback: Root-level
+ filament_used = parsed_payload.get("filament_used_mm")
+ if filament_used is not None:
+ try:
+ value = float(filament_used)
+ self.logger.debug(
+ "[FILAMENT] Extracted filament_used_mm=%s from filament_used_mm",
+ value,
+ )
+ return value
+ except (ValueError, TypeError):
+ pass
+
+ return None
+
+ def _calculate_filament_from_remain(self, tray_info: Dict[str, Any]) -> Optional[float]:
+ """
+ Berechnet absoluten Filament-Verbrauch aus remain und total_len (Fallback).
+
+ Args:
+ tray_info: Tray-Info Dict mit remain und total_len
+
+ Returns:
+ Berechneter Filament-verbrauch in mm oder None falls total_len fehlt
+ """
+ if not tray_info:
+ return None
+
+ total_len = tray_info.get("total_len")
+ if total_len is None:
+ self.logger.error("[FILAMENT] Fallback unavailable: missing total_len")
+ return None
+
+ try:
+ total_len_mm = float(total_len)
+ except (ValueError, TypeError):
+ return None
+
+ remain = tray_info.get("remain")
+ if remain is None:
+ return None
+
+ try:
+ remain_percent = float(remain)
+ except (ValueError, TypeError):
+ return None
+
+ # Berechne: total_len_mm * (1 - remain_percent / 100)
+ filament_used_mm = total_len_mm * (1 - remain_percent / 100.0)
+ return max(0.0, filament_used_mm) # Keine negativen Werte
+
+ def _finalize_current(
+ self,
+ session: Session,
+ info: Dict[str, Any]
+ ) -> Optional[Dict[str, Any]]:
+ """
+ Berechnet finalen Verbrauch für aktuellen Slot
+
+ Returns:
+ {"spool_id": str, "slot": int, "used_mm": float, "used_g": float}
+ """
+ if info.get("slot") is None:
+ return None
+
+ spool = session.get(Spool, info.get("spool_id")) if info.get("spool_id") else None
+
+ used_mm, used_g = self._calc_usage(
+ spool,
+ info.get("start_remain"),
+ info.get("last_remain"),
+ info.get("start_total_len")
+ )
+
+ return {
+ "spool_id": info.get("spool_id"),
+ "slot": info.get("slot"),
+ "used_mm": used_mm,
+ "used_g": used_g
+ }
+
+ def process_message(
+ self,
+ cloud_serial: str,
+ parsed_payload: Dict[str, Any],
+ printer_id: Optional[str],
+ ams_data: Optional[List[Dict[str, Any]]] = None
+ ) -> Optional[Dict[str, Any]]:
+ """
+ Hauptfunktion: Verarbeitet eine MQTT Message für Job-Tracking
+
+ Args:
+ cloud_serial: Drucker Serial (eindeutige ID)
+ parsed_payload: Geparste MQTT Payload
+ printer_id: Drucker-ID aus Datenbank
+ ams_data: Geparste AMS-Daten (optional)
+
+ Returns:
+ Job-Info dict oder None
+ """
+ if not cloud_serial or not printer_id:
+ return None
+
+ # Aktuellen Status extrahieren
+ current_gstate = (
+ parsed_payload.get("print", {}).get("gcode_state") or
+ parsed_payload.get("gcode_state") or
+ ""
+ ).upper()
+
+ # Vorherigen Status merken
+ prev_gstate = self.last_gstate.get(cloud_serial)
+ self.last_gstate[cloud_serial] = current_gstate
+
+ # Hat dieser Drucker einen aktiven Job?
+ has_active_job = cloud_serial in self.active_jobs
+
+ # State-Mapping für Bambu Lab Drucker
+ PRINT_STATES = {
+ "PRINTING", "RUNNING",
+ "PURGING", "CHANGING_FILAMENT", "CALIBRATING" # Zählt als aktiver Druck
+ }
+ COMPLETED_STATES = {"FINISH", "FINISHED", "COMPLETED", "COMPLETE"}
+ FAILED_STATES = {"FAILED", "ERROR", "EXCEPTION"}
+ ABORTED_STATES = {"ABORT", "ABORTED", "STOPPED", "CANCELLED", "CANCELED"}
+
+ # ===================================================================
+ # JOB START
+ # ===================================================================
+ if not has_active_job and current_gstate in PRINT_STATES:
+ return self._handle_job_start(
+ cloud_serial,
+ parsed_payload,
+ printer_id,
+ ams_data
+ )
+
+ # ===================================================================
+ # JOB RUNNING (Update)
+ # ===================================================================
+ if has_active_job and current_gstate in PRINT_STATES:
+ return self._handle_job_update(
+ cloud_serial,
+ parsed_payload,
+ ams_data
+ )
+
+ # ===================================================================
+ # JOB FINISH
+ # ===================================================================
+ if has_active_job and (
+ current_gstate in COMPLETED_STATES or
+ current_gstate in FAILED_STATES or
+ current_gstate in ABORTED_STATES
+ ):
+ return self._handle_job_finish(
+ cloud_serial,
+ parsed_payload,
+ ams_data,
+ current_gstate,
+ COMPLETED_STATES,
+ FAILED_STATES,
+ ABORTED_STATES
+ )
+
+ return None
+
+ def _handle_job_start(
+ self,
+ cloud_serial: str,
+ parsed_payload: Dict[str, Any],
+ printer_id: str,
+ ams_data: Optional[List[Dict[str, Any]]]
+ ) -> Optional[Dict[str, Any]]:
+ """Erstellt einen neuen Job oder restored existierenden nach Server-Neustart"""
+ try:
+ with Session(engine) as session:
+ from datetime import datetime, timedelta
+
+ # Extrahiere Job-Informationen aus MQTT-Payload
+ job_name = (
+ parsed_payload.get("print", {}).get("subtask_name") or
+ parsed_payload.get("print", {}).get("gcode_file") or
+ parsed_payload.get("subtask_name") or
+ parsed_payload.get("gcode_file") or
+ parsed_payload.get("file") or
+ "Unnamed Job"
+ )
+
+ # Aktuelle Layer/Fortschritt aus MQTT
+ current_layer = parsed_payload.get("print", {}).get("layer_num") or 0
+ current_percent = parsed_payload.get("print", {}).get("mc_percent") or 0
+
+ # Aktiven Slot finden
+ active_slot = None
+ ams_block = parsed_payload.get("print", {}).get("ams") or {}
+ tray_now = ams_block.get("tray_now")
+ tray_tar = ams_block.get("tray_tar")
+
+ if tray_tar is not None and tray_tar != 255:
+ active_slot = int(tray_tar)
+ elif tray_now is not None and tray_now != 255:
+ active_slot = int(tray_now)
+
+ # === SNAPSHOT-BASIERTE ERKENNUNG: Server-Neustart vs. Neuer Job ===
+ snapshot = self._load_snapshot(cloud_serial, printer_id)
+ existing_jobs = session.exec(
+ select(Job)
+ .where(Job.printer_id == printer_id)
+ .where(Job.status == "running")
+ .order_by(cast(Any, Job.started_at))
+ ).all()
+
+ # CLEANUP: Duplikate löschen (behalte ältesten)
+ if len(existing_jobs) > 1:
+ self.logger.warning(
+ f"[JOB START] Found {len(existing_jobs)} duplicate running jobs. "
+ f"Cleaning up duplicates."
+ )
+ for dup in existing_jobs[1:]:
+ self.logger.info(f"[JOB START] Deleting duplicate job={dup.id}")
+ session.delete(dup)
+ session.commit()
+ existing_jobs = [existing_jobs[0]]
+
+ existing_job = existing_jobs[0] if existing_jobs else None
+
+ # Prüfe ob existing Job wiederhergestellt werden soll
+ should_restore = False
+ if existing_job and snapshot:
+ # Validierung: Ist das der gleiche Druck?
+ job_age = datetime.utcnow() - existing_job.started_at
+
+ # Check 1: Job zu alt (>48h) → stale
+ if job_age > timedelta(hours=48):
+ self.logger.warning(
+ f"[JOB START] Stale job (age={job_age.total_seconds()/3600:.1f}h). "
+ f"Marking as failed."
+ )
+ existing_job.status = "failed"
+ existing_job.finished_at = datetime.utcnow()
+ session.add(existing_job)
+ session.commit()
+ self._delete_snapshot(cloud_serial, printer_id)
+ existing_job = None
+
+ # Check 2: Fortschritt gemacht? (Layer/Percent gestiegen)
+ elif (current_layer >= snapshot.get("layer_num", 0) and
+ current_percent >= snapshot.get("mc_percent", 0)):
+ # Fortschritt passt → gleicher Druck, Server-Neustart!
+ should_restore = True
+ self.logger.info(
+ f"[JOB START] Detected server restart. "
+ f"Restoring job={existing_job.id} "
+ f"(layer: {snapshot.get('layer_num')}→{current_layer}, "
+ f"progress: {snapshot.get('mc_percent')}%→{current_percent}%)"
+ )
+
+ else:
+ # Fortschritt NICHT gestiegen → neuer Druck!
+ self.logger.warning(
+ f"[JOB START] Progress mismatch (new print detected). "
+ f"Marking old job as failed. "
+ f"(layer: {current_layer} vs snapshot {snapshot.get('layer_num')}, "
+ f"percent: {current_percent}% vs snapshot {snapshot.get('mc_percent')}%)"
+ )
+ existing_job.status = "failed"
+ existing_job.finished_at = datetime.utcnow()
+ session.add(existing_job)
+ session.commit()
+ self._delete_snapshot(cloud_serial, printer_id)
+ existing_job = None
+
+ elif existing_job and not snapshot:
+ # Job existiert aber kein Snapshot → vermutlich alter stale Job
+ self.logger.warning(
+ f"[JOB START] Found running job without snapshot. Marking as failed."
+ )
+ existing_job.status = "failed"
+ existing_job.finished_at = datetime.utcnow()
+ session.add(existing_job)
+ session.commit()
+ existing_job = None
+
+ # === JOB RESTORE (nach Server-Neustart) ===
+ if should_restore and existing_job:
+ # Tray-Info für remain-Tracking
+ tray_info = self._find_tray(ams_data or [], active_slot) if active_slot is not None else None
+ start_remain = tray_info.get("remain") if tray_info else None
+ total_len = tray_info.get("total_len") if tray_info else None
+
+ # Job in RAM wiederherstellen
+ # Lade filament_start_mm und filament_started aus Snapshot
+ assert snapshot is not None
+ filament_start_mm = snapshot.get("filament_start_mm")
+ filament_started = snapshot.get("filament_started", False)
+ using_fallback = snapshot.get("using_fallback", False)
+ fallback_warned = snapshot.get("fallback_warned", False)
+
+ # Wenn filament_start_mm vorhanden, setze filament_started = True
+ if filament_start_mm is not None:
+ filament_started = True
+
+ self.active_jobs[cloud_serial] = {
+ "job_id": existing_job.id,
+ "printer_id": printer_id,
+ "slot": active_slot,
+ "spool_id": existing_job.spool_id,
+ "start_remain": start_remain,
+ "last_remain": start_remain,
+ "start_total_len": total_len,
+ "usages": [],
+ "filament_start_mm": filament_start_mm,
+ "filament_started": filament_started,
+ "using_fallback": using_fallback,
+ "fallback_warned": fallback_warned,
+ }
+
+ return {"job_id": existing_job.id, "status": "restored"}
+
+ # === NEUER JOB ERSTELLEN ===
+
+ # Spule finden (via ams_slot)
+ spool = None
+ if active_slot is not None:
+ spool = session.exec(
+ select(Spool)
+ .where(Spool.printer_id == printer_id)
+ .where(Spool.ams_slot == active_slot)
+ ).first()
+
+ # Job erstellen
+ new_job = Job(
+ printer_id=printer_id,
+ spool_id=spool.id if spool else None,
+ name=job_name,
+ started_at=datetime.utcnow(),
+ filament_used_mm=0,
+ filament_used_g=0,
+ status="running"
+ )
+
+ session.add(new_job)
+ session.commit()
+ session.refresh(new_job)
+
+ # Spulen-Status aktualisieren (für ALLE Spulen, nicht nur mit spool_number)
+ if spool and not spool.is_empty:
+ if spool.status != "Aktiv":
+ spool.status = "Aktiv"
+ spool.is_open = True
+ session.add(spool)
+ session.commit()
+
+ # === NOTIFICATION: Job ohne Spule gestartet ===
+ if not spool:
+ printer = session.get(Printer, printer_id)
+ printer_name = printer.name if printer else "Unbekannt"
+ trigger_notification_sync(
+ "job_no_spool",
+ job_name=new_job.name,
+ printer_name=printer_name
+ )
+
+ # Job-Info in RAM speichern
+ tray_info = self._find_tray(ams_data or [], active_slot) if active_slot is not None else None
+ start_remain = tray_info.get("remain") if tray_info else None
+ total_len = tray_info.get("total_len") if tray_info else None
+
+ # WICHTIG: filament_start_mm wird NICHT beim Job-Start gesetzt
+ # Es wird erst bei layer_num >= 1 gesetzt (in _handle_job_update)
+ self.active_jobs[cloud_serial] = {
+ "job_id": new_job.id,
+ "printer_id": printer_id,
+ "slot": active_slot,
+ "spool_id": spool.id if spool else None,
+ "start_remain": start_remain,
+ "last_remain": start_remain,
+ "start_total_len": total_len,
+ "usages": [],
+ "filament_start_mm": None, # Wird bei layer_num >= 1 gesetzt
+ "filament_started": False, # Guard-Flag gegen doppelte Events
+ "using_fallback": False, # Flag für Fallback-Modus
+ "fallback_warned": False # Flag für einmalige Warnung
+ }
+
+ # === SNAPSHOT SPEICHERN (für Server-Neustart-Recovery) ===
+ self._save_snapshot(
+ cloud_serial=cloud_serial,
+ printer_id=printer_id,
+ job_id=new_job.id,
+ job_name=job_name,
+ slot=active_slot or 0,
+ layer_num=current_layer,
+ mc_percent=current_percent,
+ started_at=new_job.started_at
+ )
+
+ self.logger.info(
+ f"[JOB START] printer={printer_id} job={new_job.id} "
+ f"name={new_job.name} slot={active_slot} "
+ f"(snapshot saved: layer={current_layer}, progress={current_percent}%)"
+ )
+
+ return {"job_id": new_job.id, "status": "started"}
+
+ except Exception as e:
+ self.logger.error(f"[JOB START] Failed: {e}", exc_info=True)
+ return None
+
+ def _handle_job_update(
+ self,
+ cloud_serial: str,
+ parsed_payload: Dict[str, Any],
+ ams_data: Optional[List[Dict[str, Any]]]
+ ) -> Optional[Dict[str, Any]]:
+ """Aktualisiert laufenden Job (Slot-Wechsel, Verbrauch)"""
+ job_info = self.active_jobs.get(cloud_serial)
+ if not job_info:
+ return None
+
+ try:
+ with Session(engine) as session:
+ job = session.get(Job, job_info.get("job_id"))
+ if not job:
+ # Job in DB nicht gefunden - cleanup
+ del self.active_jobs[cloud_serial]
+ return None
+
+ # Initialize usage accumulators to avoid UnboundLocalError
+ total_used_mm = 0.0
+ total_used_g = 0.0
+
+ # Aktuelle Layer-Nummer prüfen
+ current_layer = parsed_payload.get("print", {}).get("layer_num") or 0
+
+ # === LAYER-BASIERTER FILAMENT-START ===
+ # Filament-Tracking startet erst bei layer_num >= 1
+ if current_layer >= 1 and not job_info.get("filament_started"):
+ # Guard-Flag setzen (einmalig)
+ job_info["filament_started"] = True
+
+ # Extrahiere Primärquelle
+ current_filament = self._extract_filament_used_mm(parsed_payload)
+
+ if current_filament is not None:
+ # Primärquelle verfügbar
+ job_info["filament_start_mm"] = current_filament
+ job.filament_start_mm = current_filament
+ job_info["using_fallback"] = False
+
+ self.logger.info(
+ f"[FILAMENT START] Tracking started at layer={current_layer}, "
+ f"start_mm={current_filament:.1f} (primary source) for job={job.id}"
+ )
+ else:
+ # Fallback: Berechne aus remain und total_len
+ current_tray = self._find_tray(ams_data or [], job_info.get("slot"))
+ if current_tray and current_tray.get("total_len") is not None:
+ filament_start = self._calculate_filament_from_remain(current_tray)
+ if filament_start is not None:
+ job_info["filament_start_mm"] = filament_start
+ job.filament_start_mm = filament_start
+ job_info["using_fallback"] = True
+
+ # Warnung einmalig
+ if not job_info.get("fallback_warned"):
+ self.logger.warning(
+ "[FILAMENT] Using fallback calculation for job=%s",
+ job.id,
+ )
+ job_info["fallback_warned"] = True
+
+ self.logger.info(
+ f"[FILAMENT START] Tracking started at layer={current_layer}, "
+ f"start_mm={filament_start:.1f} (fallback) for job={job.id}"
+ )
+ else:
+ self.logger.error(
+ f"[FILAMENT START] Failed to calculate fallback for job={job.id}: "
+ f"invalid remain or total_len"
+ )
+ else:
+ # Sauber abbrechen + Fehler loggen
+ self.logger.error(
+ f"[FILAMENT START] Cannot start tracking for job={job.id}: "
+ f"no filament_used_mm and no total_len"
+ )
+ job_info["filament_start_mm"] = None
+
+ # Snapshot + DB synchron
+ if job_info.get("filament_start_mm") is not None:
+ self._save_snapshot(
+ cloud_serial=cloud_serial,
+ printer_id=job_info.get("printer_id"),
+ job_id=job.id,
+ job_name=job.name,
+ slot=job_info.get("slot") or 0,
+ layer_num=current_layer,
+ mc_percent=parsed_payload.get("print", {}).get("mc_percent") or 0,
+ started_at=job.started_at,
+ filament_start_mm=job_info.get("filament_start_mm"),
+ filament_started=True,
+ using_fallback=job_info.get("using_fallback", False),
+ fallback_warned=job_info.get("fallback_warned", False),
+ )
+ session.add(job)
+ session.commit()
+
+ # Aktuellen Slot prüfen
+ ams_block = parsed_payload.get("print", {}).get("ams") or {}
+ current_slot = ams_block.get("tray_tar")
+ if current_slot == 255:
+ current_slot = ams_block.get("tray_now")
+ if current_slot == 255:
+ current_slot = None
+
+ # Slot-Wechsel erkennen
+ if current_slot is not None and job_info.get("slot") != current_slot:
+ # Finalize old slot
+ usage = self._finalize_current(session, job_info)
+ if usage:
+ job_info.setdefault("usages", []).append(usage)
+
+ # Start new slot
+ tray_new = self._find_tray(ams_data or [], current_slot)
+ spool_new = session.exec(
+ select(Spool)
+ .where(Spool.printer_id == job_info.get("printer_id"))
+ .where(Spool.ams_slot == current_slot)
+ ).first()
+
+ job_info.update({
+ "slot": current_slot,
+ "spool_id": spool_new.id if spool_new else None,
+ "start_remain": tray_new.get("remain") if tray_new else None,
+ "last_remain": tray_new.get("remain") if tray_new else None,
+ "start_total_len": tray_new.get("total_len") if tray_new else None
+ })
+
+ if spool_new and not job.spool_id:
+ job.spool_id = spool_new.id
+
+ # Update last_remain + Spulen-Gewicht
+ current_tray = self._find_tray(ams_data or [], job_info.get("slot"))
+ if current_tray:
+ current_remain = current_tray.get("remain")
+
+ # FIX: Bambu Lab's "remain" kann willkürlich steigen/fallen
+ # Wir akzeptieren nur SINKENDE Werte (Filamentverbrauch)
+ # Bei steigenden Werten behalten wir den letzten niedrigen Wert
+ if current_remain is not None:
+ last_remain = job_info.get("last_remain")
+ if last_remain is None or current_remain <= last_remain:
+ # Remain ist gesunken (normaler Verbrauch) oder erster Wert
+ job_info["last_remain"] = current_remain
+ else:
+ # remain ist GESTIEGEN - Bambu Lab Bug!
+ self.logger.warning(
+ f"[JOB UPDATE] Remain INCREASED: {last_remain}% -> {current_remain}% "
+ f"(slot={job_info.get('slot')}, printer={cloud_serial}). "
+ f"Ignoring increase, keeping last value."
+ )
+
+ # Spulen-Gewicht live aktualisieren
+ spool_id = job_info.get("spool_id")
+ if spool_id:
+ spool = session.get(Spool, spool_id)
+ if spool:
+ used_mm, used_g = self._calc_usage(
+ spool,
+ job_info.get("start_remain"),
+ job_info.get("last_remain"),
+ job_info.get("start_total_len")
+ )
+
+ if used_g > 0 and spool.weight_full is not None:
+ spool.weight_current = float(spool.weight_full) - float(used_g)
+ session.add(spool)
+
+ # === FILAMENT-BERECHNUNG (NEUE LOGIK) ===
+ # Verwende Delta-Methode ab layer_num >= 1
+ start_mm_raw = job_info.get("filament_start_mm")
+ start_mm = None
+ if start_mm_raw is not None:
+ try:
+ start_mm = float(start_mm_raw)
+ except (TypeError, ValueError):
+ start_mm = None
+
+ if start_mm is not None:
+ # Filament-Tracking ist aktiv (layer_num >= 1 erreicht)
+ current_filament = self._extract_filament_used_mm(parsed_payload)
+
+ if current_filament is not None:
+ # Primärquelle verfügbar
+ if current_filament < start_mm:
+ self.logger.warning(
+ "[FILAMENT] Current filament_used_mm (%s) is less than start_mm (%s) for job=%s",
+ current_filament,
+ start_mm,
+ job.id,
+ )
+ else:
+ job_filament_used_mm = current_filament - start_mm
+
+ # Wechsel von Fallback zu Primärquelle?
+ if job_info.get("using_fallback"):
+ self.logger.info(
+ f"[FILAMENT] Switched from fallback to primary source for job={job.id}"
+ )
+ job_info["using_fallback"] = False
+
+ # Gewicht berechnen (für Kompatibilität)
+ # Verwende weiterhin remain-Methode für Gewicht, da wir keine absolute Gewichtsquelle haben
+ total_used_g = sum(u.get("used_g", 0) for u in job_info.get("usages", []))
+ if job_info.get("spool_id"):
+ sp = session.get(Spool, job_info.get("spool_id"))
+ if sp:
+ _, current_g = self._calc_usage(
+ sp,
+ job_info.get("start_remain"),
+ job_info.get("last_remain"),
+ job_info.get("start_total_len")
+ )
+ total_used_g += current_g
+
+ job.filament_used_mm = max(0.0, job_filament_used_mm)
+ job.filament_used_g = total_used_g
+ else:
+ # Primärquelle nicht verfügbar, aber Fallback aktiv?
+ if job_info.get("using_fallback"):
+ # Fallback: Berechne weiterhin aus remain-Delta
+ if job_info.get("start_total_len") is None:
+ self.logger.error(
+ "[FILAMENT] Fallback delta unavailable: missing total_len for job=%s",
+ job.id,
+ )
+ # Keine Quelle verfÜgbar, behalte letzten Wert
+ pass
+ else:
+ total_used_mm = sum(u.get("used_mm", 0) for u in job_info.get("usages", []))
+ total_used_g = sum(u.get("used_g", 0) for u in job_info.get("usages", []))
+
+ if job_info.get("spool_id"):
+ sp = session.get(Spool, job_info.get("spool_id"))
+ if sp:
+ current_mm, current_g = self._calc_usage(
+ sp,
+ job_info.get("start_remain"),
+ job_info.get("last_remain"),
+ job_info.get("start_total_len")
+ )
+ total_used_mm += current_mm
+ total_used_g += current_g
+
+ job.filament_used_mm = max(0.0, total_used_mm)
+ job.filament_used_g = total_used_g
+ else:
+ # Keine Quelle verfügbar, behalte letzten Wert
+ pass
+ else:
+ # Filament-Tracking noch nicht gestartet (layer_num < 1)
+ # Setze Verbrauch auf 0
+ job.filament_used_mm = 0.0
+ job.filament_used_g = 0.0
+
+ session.add(job)
+
+ # === SNAPSHOT AKTUALISIEREN (Layer/Progress) ===
+ current_layer = parsed_payload.get("print", {}).get("layer_num") or 0
+ current_percent = parsed_payload.get("print", {}).get("mc_percent") or 0
+ if current_layer > 0 or current_percent > 0:
+ self._save_snapshot(
+ cloud_serial=cloud_serial,
+ printer_id=job_info.get("printer_id"),
+ job_id=job.id,
+ job_name=job.name,
+ slot=job_info.get("slot") or 0,
+ layer_num=current_layer,
+ mc_percent=current_percent,
+ started_at=job.started_at,
+ filament_start_mm=job_info.get("filament_start_mm"),
+ filament_started=job_info.get("filament_started", False),
+ using_fallback=job_info.get("using_fallback", False),
+ fallback_warned=job_info.get("fallback_warned", False),
+ )
+ session.commit()
+
+ return {"job_id": job.id, "status": "updated", "used_g": total_used_g}
+
+ except Exception as e:
+ self.logger.error(f"[JOB UPDATE] Failed: {e}", exc_info=True)
+ return None
+
+ def _handle_job_finish(
+ self,
+ cloud_serial: str,
+ parsed_payload: Dict[str, Any],
+ ams_data: Optional[List[Dict[str, Any]]],
+ current_gstate: str,
+ completed_states: set,
+ failed_states: set,
+ aborted_states: set
+ ) -> Optional[Dict[str, Any]]:
+ """Beendet aktiven Job"""
+ job_info = self.active_jobs.get(cloud_serial)
+ if not job_info:
+ return None
+
+ try:
+ with Session(engine) as session:
+ job = session.get(Job, job_info.get("job_id"))
+ if not job:
+ del self.active_jobs[cloud_serial]
+ return None
+
+ # Initialize accumulators to ensure defined values in all code paths
+ total_used_mm = 0.0
+ total_used_g = 0.0
+ final_used_mm = 0.0
+
+ # Finalize letzten Slot
+ final_tray = self._find_tray(ams_data or [], job_info.get("slot"))
+ if final_tray:
+ job_info["last_remain"] = final_tray.get("remain")
+
+ usage = self._finalize_current(session, job_info)
+ if usage:
+ job_info.setdefault("usages", []).append(usage)
+
+ # === FINALE FILAMENT-BERECHNUNG (NEUE LOGIK) ===
+ if job_info.get("filament_start_mm") is not None:
+ # Filament-Tracking war aktiv
+ final_filament = self._extract_filament_used_mm(parsed_payload)
+
+ if final_filament is not None:
+ # Primärquelle verfügbar
+ try:
+ start_mm = float(job_info["filament_start_mm"])
+ except (TypeError, ValueError):
+ start_mm = None
+ if start_mm is None:
+ self.logger.error(
+ "[FILAMENT] Invalid filament_start_mm for job=%s; cannot finalize from primary source",
+ job.id,
+ )
+ elif final_filament < start_mm:
+ self.logger.warning(
+ "[FILAMENT] Final filament_used_mm (%s) is less than start_mm (%s) for job=%s",
+ final_filament,
+ start_mm,
+ job.id,
+ )
+ else:
+ final_used_mm = final_filament - start_mm
+ else:
+ # Fallback: Berechne aus finalem remain-Delta
+ if job_info.get("start_total_len") is None:
+ self.logger.error(
+ "[FILAMENT] Fallback finalize unavailable: missing total_len for job=%s",
+ job.id,
+ )
+ else:
+ total_used_mm = sum(u.get("used_mm", 0) for u in job_info.get("usages", []))
+ if job_info.get("spool_id"):
+ sp = session.get(Spool, job_info.get("spool_id"))
+ if sp:
+ current_mm, _ = self._calc_usage(
+ sp,
+ job_info.get("start_remain"),
+ job_info.get("last_remain"),
+ job_info.get("start_total_len")
+ )
+ total_used_mm += current_mm
+ final_used_mm = total_used_mm
+ else:
+ # Filament-Tracking nie gestartet (layer_num < 1 während gesamten Jobs)
+ # Berechne Verbrauch aus finalisierten Usages (remain-delta)
+ total_used_mm = sum(u.get("used_mm", 0) for u in job_info.get("usages", []))
+ if job_info.get("spool_id"):
+ sp = session.get(Spool, job_info.get("spool_id"))
+ if sp:
+ # Wenn _finalize_current bereits ein Usage-Entry für den aktuellen
+ # Slot erstellt hat (variable `usage`), dann ist der aktuelle
+ # Verbrauch bereits in `job_info['usages']` enthalten und
+ # darf nicht erneut addiert werden.
+ if not usage:
+ current_mm, _ = self._calc_usage(
+ sp,
+ job_info.get("start_remain"),
+ job_info.get("last_remain"),
+ job_info.get("start_total_len")
+ )
+ total_used_mm += current_mm
+ final_used_mm = total_used_mm
+
+ # Gewicht berechnen (für Kompatibilität)
+ total_used_g = sum(u.get("used_g", 0) for u in job_info.get("usages", []))
+ if job_info.get("spool_id"):
+ sp = session.get(Spool, job_info.get("spool_id"))
+ if sp:
+ # Wenn `usage` existiert, wurde der aktuelle Slot bereits
+ # finalisiert und ist in `usages` enthalten; vermeide Duplikate.
+ if not usage:
+ _, current_g = self._calc_usage(
+ sp,
+ job_info.get("start_remain"),
+ job_info.get("last_remain"),
+ job_info.get("start_total_len")
+ )
+ total_used_g += current_g
+
+ job.filament_used_mm = max(0.0, final_used_mm)
+ job.filament_used_g = total_used_g
+ # Ensure variables used in logging are set
+ total_used_mm = final_used_mm
+ job.finished_at = datetime.utcnow()
+
+ # Status-Mapping: Bambu gcode_state → Job Status
+ if current_gstate in completed_states:
+ job.status = "completed"
+ elif current_gstate in aborted_states:
+ # ABORT, ABORTED, STOPPED, CANCELLED, CANCELED → aborted
+ if current_gstate in {"CANCELLED", "CANCELED"}:
+ job.status = "cancelled"
+ elif current_gstate in {"ABORT", "ABORTED"}:
+ job.status = "aborted"
+ else: # STOPPED
+ job.status = "stopped"
+ elif current_gstate in failed_states:
+ # FAILED, ERROR, EXCEPTION → failed/error/exception
+ if current_gstate == "EXCEPTION":
+ job.status = "exception"
+ elif current_gstate == "ERROR":
+ job.status = "error"
+ else:
+ job.status = "failed"
+ else:
+ # Fallback
+ job.status = "failed"
+
+ # Spool ID setzen (falls noch nicht gesetzt)
+ if not job.spool_id and job_info.get("usages"):
+ first_spool = next((u.get("spool_id") for u in job_info["usages"] if u.get("spool_id")), None)
+ if first_spool:
+ job.spool_id = first_spool
+
+ # WICHTIG: Finales Spulen-Gewicht aktualisieren!
+ # Bei abgebrochenen/fehlgeschlagenen Jobs wird das Gewicht sonst nicht gesetzt
+
+ # Alle verwendeten Spulen aktualisieren
+ updated_spools = set()
+
+ # 1. Multi-Spool: Finalisierte Slots
+ for usage in job_info.get("usages", []):
+ spool_id = usage.get("spool_id")
+ if spool_id and spool_id not in updated_spools:
+ spool = session.get(Spool, spool_id)
+ if spool and spool.weight_current is not None:
+ used_g = usage.get("used_g", 0)
+ # Vom aktuellen Gewicht abziehen, nicht von weight_full!
+ new_weight = max(0, float(spool.weight_current) - used_g)
+ spool.weight_current = new_weight
+ session.add(spool)
+ updated_spools.add(spool_id)
+
+ # 2. Aktueller Slot (falls nicht bereits in usages)
+ current_spool_id = job_info.get("spool_id")
+ if current_spool_id and current_spool_id not in updated_spools:
+ spool = session.get(Spool, current_spool_id)
+ if spool and spool.weight_current is not None:
+ # Berechne Verbrauch für aktuellen Slot
+ used_mm, used_g = self._calc_usage(
+ spool,
+ job_info.get("start_remain"),
+ job_info.get("last_remain"),
+ job_info.get("start_total_len")
+ )
+ if used_g > 0:
+ new_weight = max(0, float(spool.weight_current) - used_g)
+ spool.weight_current = new_weight
+ session.add(spool)
+
+ session.add(job)
+ session.commit()
+ session.refresh(job)
+
+ self.logger.info(
+ f"[JOB FINISH] job={job.id} status={job.status} "
+ f"used_mm={total_used_mm:.1f} used_g={total_used_g:.1f}"
+ )
+
+ # Lade Printer für Notification-Kontext
+ printer = session.get(Printer, job.printer_id)
+ printer_name = printer.name if printer else "Unbekannt"
+
+ # === NOTIFICATIONS TRIGGERN ===
+ # 1. Job failed (FAILED/ERROR/EXCEPTION)
+ if job.status in ["failed", "error", "exception"]:
+ trigger_notification_sync(
+ "job_failed",
+ job_name=job.name,
+ printer_name=printer_name,
+ status=job.status.upper()
+ )
+
+ # 2. Job aborted (ABORTED/STOPPED/CANCELLED)
+ if job.status in ["aborted", "stopped", "cancelled"]:
+ trigger_notification_sync(
+ "job_aborted",
+ job_name=job.name,
+ printer_name=printer_name,
+ status=job.status.upper()
+ )
+
+ # 3. Job ohne Tracking (kein Verbrauch oder keine Spule)
+ if not job.spool_id or total_used_g == 0:
+ trigger_notification_sync(
+ "job_no_tracking",
+ job_name=job.name,
+ printer_name=printer_name
+ )
+
+ # Cleanup RAM
+ del self.active_jobs[cloud_serial]
+
+ # === SNAPSHOT LÖSCHEN (Job ist fertig) ===
+ self._delete_snapshot(cloud_serial, job_info.get("printer_id"))
+
+ return {"job_id": job.id, "status": job.status, "used_g": total_used_g}
+
+ except Exception as e:
+ self.logger.error(f"[JOB FINISH] Failed: {e}", exc_info=True)
+ # Cleanup auch bei Fehler
+ if cloud_serial in self.active_jobs:
+ del self.active_jobs[cloud_serial]
+ return None
+
+
+# Singleton Instance
+job_tracking_service = JobTrackingService()
diff --git a/app/services/live_state.py b/app/services/live_state.py
new file mode 100644
index 0000000..b3467fe
--- /dev/null
+++ b/app/services/live_state.py
@@ -0,0 +1,23 @@
+from datetime import datetime
+from typing import Any, Dict, Optional
+
+# Simple in-memory live state store for printers
+# key: device_id (cloud_serial), value: { device, ts, payload }
+live_state: Dict[str, Dict[str, Any]] = {}
+
+
+def set_live_state(device_id: str, payload: Any) -> None:
+ live_state[device_id] = {
+ "device": device_id,
+ "ts": datetime.utcnow().isoformat(),
+ "payload": payload,
+ }
+ print(f"[live_state] Updated: device={device_id}, keys={len(live_state)}")
+
+
+def get_live_state(device_id: str) -> Optional[Dict[str, Any]]:
+ return live_state.get(device_id)
+
+
+def get_all_live_state() -> Dict[str, Dict[str, Any]]:
+ return live_state
diff --git a/app/services/log_reader.py b/app/services/log_reader.py
new file mode 100644
index 0000000..91ab232
--- /dev/null
+++ b/app/services/log_reader.py
@@ -0,0 +1,74 @@
+import os
+from pathlib import Path
+from typing import List, Dict, Optional
+
+
+MODULE_WHITELIST = {"app", "bambu", "klipper", "mqtt", "scanner", "admin"}
+LOG_ROOT = Path("logs")
+MAX_LIMIT = 1000
+
+
+class LogAccessError(Exception):
+ pass
+
+
+def resolve_log_path(module: str) -> Path:
+ if module not in MODULE_WHITELIST:
+ raise ValueError("Invalid module")
+ # Prevent any path tricks
+ if "/" in module or "\\" in module or ".." in module:
+ raise ValueError("Invalid module")
+ filename = f"{module}.log"
+ return LOG_ROOT / module / filename
+
+
+def list_modules() -> List[str]:
+ return sorted(MODULE_WHITELIST)
+
+
+def _line_matches(line: str, level: Optional[str], search: Optional[str]) -> bool:
+ if level:
+ lvl = level.upper()
+ if lvl not in line.upper():
+ return False
+ if search:
+ if search.lower() not in line.lower():
+ return False
+ return True
+
+
+def read_logs(
+ module: str,
+ limit: int = 200,
+ offset: int = 0,
+ level: Optional[str] = None,
+ search: Optional[str] = None,
+ allow_admin: bool = False,
+) -> Dict[str, object]:
+ if limit > MAX_LIMIT:
+ limit = MAX_LIMIT
+ if offset < 0:
+ offset = 0
+ if module == "admin" and not allow_admin:
+ raise LogAccessError("Admin logs require elevated access")
+ path = resolve_log_path(module)
+ if not path.exists():
+ return {"module": module, "items": [], "count": 0}
+
+ matched: List[str] = []
+ try:
+ with path.open("r", encoding="utf-8") as f:
+ for line in f:
+ line = line.rstrip("\n")
+ if not _line_matches(line, level, search):
+ continue
+ matched.append(line)
+ except FileNotFoundError:
+ return {"module": module, "items": [], "count": 0}
+ except Exception:
+ # Fail safe: do not raise to API level
+ return {"module": module, "items": [], "count": 0}
+
+ total = len(matched)
+ sliced = matched[offset : offset + limit] if offset < total else []
+ return {"module": module, "items": sliced, "count": total}
diff --git a/app/services/mqtt_payload_processor.py b/app/services/mqtt_payload_processor.py
new file mode 100644
index 0000000..990dd21
--- /dev/null
+++ b/app/services/mqtt_payload_processor.py
@@ -0,0 +1,91 @@
+import json
+from typing import Optional, Any, Dict, List
+
+from app.services.ams_parser import parse_ams
+from app.services.job_parser import parse_job
+from app.services.universal_mapper import UniversalMapper
+from app.services.printer_auto_detector import PrinterAutoDetector
+
+
+def process_mqtt_payload(topic: str, payload: str, printer_service_ref: Optional[Any] = None) -> Dict[str, Any]:
+ """Process an MQTT payload and return parsed/derived pieces.
+
+ This function intentionally avoids DB access, websockets, or changing
+ global state. It is defensive and will never raise — errors are
+ swallowed and sensible fallbacks returned.
+
+ Returns a dict with keys: raw, ams, job, mapped, mapped_dict, serial, capabilities
+ """
+ result: Dict[str, Any] = {
+ "raw": None,
+ "ams": [],
+ "job": {},
+ "mapped": None,
+ "mapped_dict": None,
+ "serial": None,
+ "capabilities": None,
+ }
+
+ try:
+ # serial from topic if available (device//...)
+ try:
+ parts = topic.split("/")
+ if len(parts) >= 2 and parts[0] == "device":
+ result["serial"] = parts[1]
+ except Exception:
+ result["serial"] = None
+
+ # parse JSON
+ try:
+ parsed = json.loads(payload)
+ result["raw"] = parsed
+ except Exception:
+ parsed = None
+ result["raw"] = None
+
+ # parse AMS / job only for report topics (keeps parity with callers)
+ try:
+ if parsed is not None and topic.endswith("/report"):
+ try:
+ result["ams"] = parse_ams(parsed) or []
+ except Exception:
+ result["ams"] = []
+ try:
+ result["job"] = parse_job(parsed) or {}
+ except Exception:
+ result["job"] = {}
+ except Exception:
+ # defensive catch; keep defaults
+ pass
+
+ # mapping + capability detection (no DB, no side effects)
+ if parsed is not None:
+ try:
+ detected_model = PrinterAutoDetector.detect_model_from_payload(parsed) or PrinterAutoDetector.detect_model_from_serial(result.get("serial"))
+ except Exception:
+ detected_model = None
+ try:
+ caps = PrinterAutoDetector.detect_capabilities(parsed)
+ except Exception:
+ caps = None
+
+ try:
+ model_for_mapper = detected_model or "UNKNOWN"
+ mapper = UniversalMapper(model_for_mapper)
+ mapped_obj = mapper.map(parsed)
+ result["mapped"] = mapped_obj
+ try:
+ result["mapped_dict"] = mapped_obj.to_dict() if hasattr(mapped_obj, "to_dict") else None
+ except Exception:
+ result["mapped_dict"] = None
+ except Exception:
+ result["mapped"] = None
+ result["mapped_dict"] = None
+
+ result["capabilities"] = caps
+
+ except Exception:
+ # Top-level safety — never raise
+ return result
+
+ return result
diff --git a/app/services/mqtt_runtime.py b/app/services/mqtt_runtime.py
new file mode 100644
index 0000000..4e87ba5
--- /dev/null
+++ b/app/services/mqtt_runtime.py
@@ -0,0 +1,1147 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+import logging
+from datetime import datetime, timezone
+import time
+from threading import Lock
+from typing import Any, Dict, Optional
+import yaml
+import re
+import json
+from pathlib import Path
+
+from app.services.printer_mqtt_client import PrinterMQTTClient
+from uuid import uuid4
+from services.printer_service import PrinterService
+from app.models.printer import Printer
+
+
+@dataclass(frozen=True)
+class _RuntimeConfig:
+ host: str
+ port: int
+ username: str
+ password: Optional[str]
+ client_id: str
+ protocol: str
+ tls: bool
+ model: str
+ cloud_serial: Optional[str] = None
+ printer_id: Optional[str] = None
+ printer_name: Optional[str] = None
+
+
+def _load_mqtt_logging_config() -> dict:
+ """Load mqtt_logging config from config.yaml"""
+ try:
+ config_path = Path(__file__).resolve().parents[2] / "config.yaml"
+ with open(config_path, "r", encoding="utf-8") as f:
+ config = yaml.safe_load(f)
+ return config.get("mqtt_logging", {})
+ except Exception as e:
+ # Fallback defaults if config missing
+ return {
+ "enabled": True,
+ "smart_logging": {
+ "enabled": False,
+ "trigger_type": "command",
+ "trigger_value": "RUNNING",
+ "max_duration_hours": 4,
+ "buffer_minutes": 5
+ },
+ "limits": {
+ "max_size_mb": 100,
+ "max_payload_chars": 1000,
+ "full_payload_enabled": False,
+ "full_payload_file": "logs/mqtt/full_payloads.jsonl"
+ },
+ "ams_climate": {
+ "enabled": True,
+ "log_file": "logs/mqtt/ams_climate.jsonl",
+ "max_size_mb": 50
+ }
+ }
+
+
+_client_instance: Optional[PrinterMQTTClient] = None
+_client_config: Optional[_RuntimeConfig] = None
+_connected_since: Optional[datetime] = None
+
+# Global runtime state exposed to status endpoint. Updated on connect and on_message.
+_runtime_state: Dict[str, Optional[Any]] = {
+ "connected": False,
+ "cloud_serial": None,
+ "connected_since": None,
+ "last_seen": None,
+ "broker": None,
+ "port": None,
+ "client_id": None,
+ "protocol": None,
+ "qos": 1,
+}
+
+_topic_stats_lock = Lock()
+_topic_stats: Dict[str, Dict[str, Any]] = {}
+_transport_connected_since: Optional[datetime] = None
+
+# Explicit runtime subscriptions (independent of received messages)
+_subscribed_topics_lock = Lock()
+_subscribed_topics: set[str] = set()
+
+# Live message buffer (last 50 messages for UI display)
+_messages_lock = Lock()
+_messages_buffer: list[Dict[str, Any]] = []
+_messages_max_size = 50
+
+# MQTT Logging Config (loaded once at import)
+_mqtt_logging_config = _load_mqtt_logging_config()
+
+# Smart Logging State
+_smart_logging_active = False # Ist Smart Logging gerade aktiv?
+_smart_logging_start_time: Optional[datetime] = None # Wann gestartet?
+_smart_logging_buffer_timer: Optional[float] = None # Buffer-Ende Timestamp
+_smart_logging_lock = Lock() # Thread-Safety
+
+# AMS Climate Whitelist System
+_ams_climate_logger: Optional[Any] = None
+_ams_climate_lock = Lock()
+
+# Full Payload Logger (optional)
+_full_payload_logger: Optional[Any] = None
+_full_payload_lock = Lock()
+
+
+# ===================================================================
+# SMART LOGGING FUNCTIONS
+# ===================================================================
+
+def _should_log_message(topic: str, payload: str) -> bool:
+ """Check if message should be logged based on smart logging config"""
+ config = _mqtt_logging_config
+
+ # Basic logging disabled?
+ if not config.get("enabled", True):
+ return False
+
+ # Smart logging disabled? -> Always log
+ smart_config = config.get("smart_logging", {})
+ if not smart_config.get("enabled", False):
+ return True
+
+ # Smart logging enabled -> Check if active
+ with _smart_logging_lock:
+ # If in buffer period, keep logging
+ if _smart_logging_buffer_timer:
+ if time.time() < _smart_logging_buffer_timer:
+ return True
+ else:
+ # Buffer expired, stop logging
+ return False
+
+ # If actively logging, check max duration
+ if _smart_logging_active and _smart_logging_start_time:
+ max_hours = smart_config.get("max_duration_hours", 4)
+ elapsed = (datetime.now(timezone.utc) - _smart_logging_start_time).total_seconds() / 3600
+ if elapsed > max_hours:
+ _stop_smart_logging("Max duration reached")
+ return False
+ return True
+
+ # Not active
+ return False
+
+
+def _check_start_trigger(payload: str) -> bool:
+ """Check if payload contains start trigger"""
+ smart_config = _mqtt_logging_config.get("smart_logging", {})
+ trigger_type = smart_config.get("trigger_type", "command")
+ trigger_value = smart_config.get("trigger_value", "RUNNING")
+
+ if trigger_type == "command":
+ # Case-insensitive search for command string
+ return trigger_value.lower() in str(payload).lower()
+
+ elif trigger_type == "temperature":
+ # Search for any temperature >= trigger_value
+ # Pattern matches both "nozzle_temp" and "nozzle_temper" variants
+ temp_pattern = r'"(?:nozzle_temp(?:er)?|bed_temp(?:er)?|temp)"\s*:\s*"?(\d+\.?\d*)"?'
+ matches = re.findall(temp_pattern, str(payload), re.IGNORECASE)
+
+ try:
+ trigger_temp = float(trigger_value)
+ for match in matches:
+ if float(match) >= trigger_temp:
+ return True
+ except (ValueError, TypeError):
+ pass
+
+ elif trigger_type == "humidity":
+ # Search for any humidity >= trigger_value
+ # Pattern matches "humidity" field
+ humidity_pattern = r'"humidity"\s*:\s*"?(\d+\.?\d*)"?'
+ matches = re.findall(humidity_pattern, str(payload), re.IGNORECASE)
+
+ try:
+ trigger_humidity = float(trigger_value)
+ for match in matches:
+ if float(match) >= trigger_humidity:
+ return True
+ except (ValueError, TypeError):
+ pass
+
+ return False
+
+
+def _check_stop_trigger(payload: str) -> bool:
+ """Check if payload contains finish/error/complete indicator"""
+ payload_lower = str(payload).lower()
+
+ # Common stop keywords (erweiterbar)
+ stop_keywords = [
+ "finish", "complete", "done", "error",
+ "failed", "cancelled", "aborted", "stopped"
+ ]
+
+ return any(kw in payload_lower for kw in stop_keywords)
+
+
+def _start_smart_logging(reason: str = "trigger detected"):
+ """Start smart logging session"""
+ global _smart_logging_active, _smart_logging_start_time, _smart_logging_buffer_timer
+
+ from datetime import datetime, timezone
+ with _smart_logging_lock:
+ if not _smart_logging_active:
+ _smart_logging_active = True
+ _smart_logging_start_time = datetime.now(timezone.utc)
+ _smart_logging_buffer_timer = None
+
+ logger = logging.getLogger("3D_drucker")
+ logger.info(f"Smart Logging STARTED: {reason}")
+ print(f"[MQTT Smart Logging] STARTED: {reason}")
+
+
+def _stop_smart_logging(reason: str = "trigger detected"):
+ """Stop smart logging with buffer period"""
+ global _smart_logging_active, _smart_logging_buffer_timer
+
+ smart_config = _mqtt_logging_config.get("smart_logging", {})
+ buffer_minutes = smart_config.get("buffer_minutes", 5)
+
+ from datetime import datetime, timezone
+ with _smart_logging_lock:
+ if _smart_logging_active:
+ _smart_logging_active = False
+ # Set buffer timer (keep logging for N more minutes)
+ _smart_logging_buffer_timer = time.time() + (buffer_minutes * 60)
+
+ logger = logging.getLogger("3D_drucker")
+ logger.info(f"Smart Logging STOPPED: {reason} (buffer: {buffer_minutes}min)")
+ print(f"[MQTT Smart Logging] STOPPED: {reason} (buffer: {buffer_minutes}min)")
+
+
+# ===================================================================
+# AMS CLIMATE WHITELIST SYSTEM
+# ===================================================================
+
+def _is_ams_climate_data(topic: str, payload: str) -> bool:
+ """
+ Check if this message contains AMS climate data.
+ AMS data is always logged, regardless of smart logging state.
+ """
+ # Quick check: Must be device report
+ if "/report" not in topic:
+ return False
+
+ # Check if payload contains AMS structure
+ if '"ams"' in payload and ('"temp"' in payload or '"humidity"' in payload):
+ return True
+
+ return False
+
+
+def _extract_ams_climate(payload: str) -> Optional[Dict[str, Any]]:
+ """
+ Extract AMS climate data from payload.
+
+ Returns dict with:
+ - temperature: float
+ - humidity: int (processed value)
+ - humidity_raw: int (raw sensor value)
+
+ Returns None if extraction fails.
+ """
+ try:
+ data = json.loads(payload)
+
+ # Navigate to AMS data: print.ams.ams[0]
+ ams_list = data.get("print", {}).get("ams", {}).get("ams", [])
+
+ if not ams_list or len(ams_list) == 0:
+ return None
+
+ # Get first AMS unit
+ ams_unit = ams_list[0]
+
+ temp_str = ams_unit.get("temp")
+ humidity_str = ams_unit.get("humidity")
+ humidity_raw_str = ams_unit.get("humidity_raw")
+
+ # Convert to numbers
+ result = {}
+
+ if temp_str:
+ result["temperature"] = float(temp_str)
+
+ if humidity_str:
+ result["humidity"] = int(humidity_str)
+
+ if humidity_raw_str:
+ result["humidity_raw"] = int(humidity_raw_str)
+
+ # Only return if we got at least temp or humidity
+ if result:
+ return result
+
+ return None
+
+ except (json.JSONDecodeError, ValueError, KeyError, TypeError):
+ return None
+
+
+def _get_ams_climate_logger():
+ """Get or create logger for AMS climate data"""
+ global _ams_climate_logger
+
+ with _ams_climate_lock:
+ if _ams_climate_logger:
+ return _ams_climate_logger
+
+ ams_config = _mqtt_logging_config.get("ams_climate", {})
+
+ if not ams_config.get("enabled", True):
+ return None
+
+ log_file = ams_config.get("log_file", "logs/mqtt/ams_climate.jsonl")
+
+ # Ensure directory exists
+ log_dir = Path(log_file).parent
+ log_dir.mkdir(parents=True, exist_ok=True)
+
+ # Create logger
+ from logging.handlers import RotatingFileHandler
+
+ logger = logging.getLogger("AMS_Climate")
+ logger.setLevel(logging.INFO)
+ logger.propagate = False # Don't propagate to root
+
+ # RotatingFileHandler
+ max_size_mb = ams_config.get("max_size_mb", 50)
+ handler = RotatingFileHandler(
+ log_file,
+ maxBytes=max_size_mb * 1024 * 1024,
+ backupCount=3,
+ encoding="utf-8"
+ )
+ handler.setFormatter(logging.Formatter("%(message)s")) # Just the message (JSONL)
+ logger.addHandler(handler)
+
+ _ams_climate_logger = logger
+ return logger
+
+
+def _log_ams_climate(payload: str):
+ """
+ Log AMS climate data to separate file.
+ This always logs, regardless of smart logging state.
+ """
+ from datetime import datetime, timezone
+ try:
+ climate_data = _extract_ams_climate(payload)
+
+ if not climate_data:
+ return # No valid data
+
+ logger = _get_ams_climate_logger()
+
+ if not logger:
+ return # AMS logging disabled
+
+ # Create log entry
+ log_entry = {
+ "ts": datetime.now(timezone.utc).isoformat(),
+ **climate_data
+ }
+
+ logger.info(json.dumps(log_entry, ensure_ascii=False))
+
+ except Exception:
+ # Silent fail - don't break main logging
+ pass
+
+
+# ===================================================================
+# FULL PAYLOAD LOGGER (OPTIONAL)
+# ===================================================================
+
+def _get_full_payload_logger():
+ """Get or create logger for full payloads"""
+ global _full_payload_logger
+
+ with _full_payload_lock:
+ if _full_payload_logger:
+ return _full_payload_logger
+
+ limits = _mqtt_logging_config.get("limits", {})
+
+ if not limits.get("full_payload_enabled", False):
+ return None
+
+ log_file = limits.get("full_payload_file", "logs/mqtt/full_payloads.jsonl")
+
+ # Ensure directory exists
+ log_dir = Path(log_file).parent
+ log_dir.mkdir(parents=True, exist_ok=True)
+
+ # Create logger
+ from logging.handlers import RotatingFileHandler
+
+ logger = logging.getLogger("MQTT_FullPayload")
+ logger.setLevel(logging.INFO)
+ logger.propagate = False
+
+ max_size_mb = limits.get("max_size_mb", 100)
+ handler = RotatingFileHandler(
+ log_file,
+ maxBytes=max_size_mb * 1024 * 1024,
+ backupCount=3,
+ encoding="utf-8"
+ )
+ handler.setFormatter(logging.Formatter("%(message)s"))
+ logger.addHandler(handler)
+
+ _full_payload_logger = logger
+ return logger
+
+
+def _write_full_payload(topic: str, payload: str):
+ """Write full (untruncated) payload to separate file"""
+ try:
+ logger = _get_full_payload_logger()
+
+ if not logger:
+ return
+
+ log_entry = {
+ "ts": datetime.now(timezone.utc).isoformat(),
+ "topic": topic,
+ "payload": payload # FULL payload, no truncation
+ }
+ logger.info(json.dumps(log_entry, ensure_ascii=False))
+ except Exception:
+ pass
+
+
+# ===================================================================
+# STATUS HELPER
+# ===================================================================
+
+def get_smart_logging_status() -> Dict[str, Any]:
+ """
+ Get current smart logging status.
+ Useful for displaying in UI or API.
+ """
+ with _smart_logging_lock:
+ status = {
+ "enabled": _mqtt_logging_config.get("smart_logging", {}).get("enabled", False),
+ "active": _smart_logging_active,
+ "start_time": _smart_logging_start_time.isoformat() if _smart_logging_start_time else None,
+ "in_buffer": _smart_logging_buffer_timer is not None and time.time() < _smart_logging_buffer_timer,
+ }
+
+ if _smart_logging_active and _smart_logging_start_time:
+ elapsed_seconds = (datetime.now(timezone.utc) - _smart_logging_start_time).total_seconds()
+ status["elapsed_hours"] = round(elapsed_seconds / 3600, 2)
+
+ return status
+
+
+def _build_printer_connect_payload(printer: Printer) -> Optional[Dict[str, Any]]:
+ if not printer:
+ return None
+ if getattr(printer, "printer_type", "") not in ("bambu", "bambu_lab"):
+ return None
+ ip = getattr(printer, "ip_address", None)
+ api_key = getattr(printer, "api_key", None)
+ cloud_serial = getattr(printer, "cloud_serial", None)
+ if not ip or not api_key or not cloud_serial:
+ return None
+ # WICHTIG: PrinterMQTTClient verwendet IMMER Port 8883 (TLS)
+ # Auch wenn in der DB ein anderer Port gespeichert ist (z.B. 6000 für Tests)
+ port = 8883
+
+ # MQTT-Protokoll bestimmen: Modell-basiert mit Fallback
+ protocol = str(getattr(printer, "mqtt_version", None) or "")
+
+ # Falls nicht gesetzt: Modell-basierte Auto-Erkennung
+ if not protocol:
+ from app.services.printer_auto_detector import PrinterAutoDetector
+ model = getattr(printer, "model", None)
+ if model and model.upper() in PrinterAutoDetector.MODEL_MQTT_PROTOCOL:
+ protocol = PrinterAutoDetector.MODEL_MQTT_PROTOCOL[model.upper()]
+ logger = logging.getLogger("mqtt_runtime")
+ logger.info(f"[MQTT] Modell '{model}' → Auto-Protokoll {protocol}")
+ else:
+ # Letzter Fallback: MQTT v5
+ protocol = "5"
+
+ client_id = f"filamenthub_{getattr(printer, 'name', 'printer')}_{str(getattr(printer, 'id', ''))[:6]}"
+ return {
+ "host": ip,
+ "port": port,
+ "client_id": client_id,
+ "username": "bblp",
+ "password": api_key,
+ "protocol": protocol,
+ "tls": True,
+ "cloud_serial": cloud_serial,
+ "printer_id": getattr(printer, "id", ""),
+ "printer_name": getattr(printer, "name", None) or client_id,
+ "printer_model": getattr(printer, "model", "X1C"),
+ }
+
+
+def apply_auto_connect(printer: Optional[Printer]) -> Dict[str, Any]:
+ """Apply the auto_connect flag for a printer by connecting or disconnecting."""
+ logger = logging.getLogger("mqtt_runtime")
+ if not printer:
+ logger.warning("apply_auto_connect called without printer data")
+ return {"success": False, "message": "printer missing"}
+
+ if getattr(printer, "auto_connect", False):
+ payload = _build_printer_connect_payload(printer)
+ if not payload:
+ logger.warning("Printer %s missing data, cannot auto-connect", getattr(printer, "id", ""))
+ return {"success": False, "message": "printer missing required data"}
+ logger.info("Auto-connect enabled → connecting printer %s (%s)", getattr(printer, "name", printer.id), printer.id)
+ result = connect(payload)
+ if not isinstance(result, dict):
+ logger.error("Auto-connect failed: unexpected result type %s", type(result).__name__)
+ return {"success": False, "message": "invalid runtime response"}
+ if not result.get("success"):
+ logger.error(
+ "Auto-connect failed for printer %s: %s",
+ getattr(printer, "id", ""),
+ result.get("error") or "connect returned false",
+ )
+ return result
+
+ logger.info("Auto-connect disabled → disconnecting runtime (printer %s)", getattr(printer, "id", ""))
+ return disconnect()
+
+
+def _add_message(topic: str, payload: str, timestamp: datetime) -> None:
+ """Add message to ring buffer (FIFO, max 50)."""
+ t = _normalize_topic(topic)
+ if not t:
+ return
+
+ msg_entry = {
+ "topic": t,
+ "payload": payload[:200] if payload else "", # Truncate long payloads
+ "timestamp": _iso_utc(timestamp),
+ }
+
+ with _messages_lock:
+ _messages_buffer.append(msg_entry)
+ # Keep only last 50 messages
+ if len(_messages_buffer) > _messages_max_size:
+ _messages_buffer.pop(0)
+
+
+def get_messages(limit: int = 50) -> list[Dict[str, Any]]:
+ """Get last N messages (most recent first)."""
+ with _messages_lock:
+ # Return in reverse order (newest first)
+ return list(reversed(_messages_buffer[-limit:]))
+
+
+def _iso_utc(dt: datetime) -> str:
+ if dt.tzinfo is None:
+ dt = dt.replace(tzinfo=timezone.utc)
+ return dt.astimezone(timezone.utc).isoformat().replace("+00:00", "Z")
+
+
+def _reset_topic_stats() -> None:
+ with _topic_stats_lock:
+ _topic_stats.clear()
+
+
+def _reset_subscribed_topics() -> None:
+ with _subscribed_topics_lock:
+ _subscribed_topics.clear()
+
+
+def _normalize_topic(topic: str) -> str:
+ return (topic or "").strip()
+
+
+def register_subscription(topic: str) -> None:
+ t = _normalize_topic(topic)
+ if not t:
+ return
+ with _subscribed_topics_lock:
+ _subscribed_topics.add(t)
+
+
+def unregister_subscription(topic: str) -> None:
+ t = _normalize_topic(topic)
+ if not t:
+ return
+ with _subscribed_topics_lock:
+ _subscribed_topics.discard(t)
+
+
+def clear_subscriptions() -> None:
+ _reset_subscribed_topics()
+
+
+def _record_topic(topic: str) -> None:
+ t = _normalize_topic(topic)
+ if not t:
+ return
+ now = datetime.now(timezone.utc)
+ with _topic_stats_lock:
+ entry = _topic_stats.get(t)
+ if entry is None:
+ _topic_stats[t] = {"count": 1, "last_seen": now}
+ return
+ entry["count"] = int(entry.get("count", 0)) + 1
+ entry["last_seen"] = now
+
+
+def _aggregate_topic_stats() -> Dict[str, Any]:
+ """Summarize topic stats for status response."""
+ with _topic_stats_lock:
+ total_topics = len(_topic_stats)
+ total_messages = sum(int(v.get("count", 0)) for v in _topic_stats.values())
+ last_dt = None
+ for v in _topic_stats.values():
+ dt = v.get("last_seen")
+ if isinstance(dt, datetime):
+ if last_dt is None or dt > last_dt:
+ last_dt = dt
+
+ return {
+ "subscriptions_count": len(_subscribed_topics),
+ "topics_count": len(_subscribed_topics),
+ "message_count": total_messages,
+ "last_message_time": _iso_utc(last_dt) if last_dt else None,
+ }
+
+
+def _format_uptime(connected_since: Optional[str]) -> Optional[str]:
+ if not connected_since:
+ return None
+ try:
+ # Accept ISO with trailing Z or without
+ dt = datetime.fromisoformat(str(connected_since).replace("Z", "+00:00"))
+ delta = datetime.now(timezone.utc) - dt.astimezone(timezone.utc)
+ total_seconds = int(delta.total_seconds())
+ hours, remainder = divmod(total_seconds, 3600)
+ minutes, seconds = divmod(remainder, 60)
+ return f"{hours:02d}:{minutes:02d}:{seconds:02d}"
+ except Exception:
+ return None
+
+
+def topics() -> Dict[str, Any]:
+ """Return the list of subscribed topics (not message stats)."""
+ connected_flag = bool(_runtime_state.get("connected"))
+
+ with _subscribed_topics_lock:
+ items = sorted(list(_subscribed_topics))
+ count = len(items)
+
+ return {
+ "connected": connected_flag,
+ "items": items,
+ "count": count,
+ }
+
+
+def _is_connected(client: PrinterMQTTClient) -> bool:
+ try:
+ inner = getattr(client, "client", None)
+ if inner is not None and hasattr(inner, "is_connected"):
+ return bool(inner.is_connected())
+ except Exception:
+ pass
+
+ connected_flag = getattr(client, "connected", None)
+ if isinstance(connected_flag, bool):
+ return connected_flag
+
+ return False
+
+
+def connect(config: Dict[str, Any]) -> Dict[str, Any]:
+ """Create exactly one runtime PrinterMQTTClient instance and connect.
+
+ Expected config keys (minimal):
+ - host (str)
+ Optional:
+ - port (int, default 8883) (PrinterMQTTClient currently connects to 8883 internally)
+ - username (str, default 'bblp')
+ - password (str | None)
+ - client_id (str, default 'filamenthub_runtime')
+ - protocol (str, default '311') # '5' | '311' | '31'
+ - tls (bool, default True)
+ - model (str, default 'X1C')
+ """
+ global _client_instance, _client_config, _connected_since
+
+ try:
+ _reset_topic_stats()
+
+ host = str(config.get("host") or config.get("broker") or config.get("ip") or "").strip()
+ if not host:
+ return {"success": False, "error": "missing host"}
+
+ port_raw = config.get("port", 8883)
+ try:
+ port = int(port_raw)
+ except Exception:
+ port = 8883
+
+ tls = bool(config.get("tls", True))
+ if not tls:
+ return {"success": False, "error": "tls must be enabled for PrinterMQTTClient"}
+
+ if port != 8883:
+ return {"success": False, "error": "port must be 8883 for PrinterMQTTClient"}
+
+ username = str(config.get("username") or "bblp")
+ password = config.get("password")
+ password = None if password in ("", None) else str(password)
+
+ client_id = str(config.get("client_id") or "filamenthub_runtime")
+ protocol = str(config.get("protocol") or "311")
+ model = str(config.get("model") or "X1C")
+
+ # 1) If an instance exists and is connected -> disconnect first
+ if _client_instance is not None and _is_connected(_client_instance):
+ disconnect()
+
+ # Initialize runtime state for UI status (do NOT mark connected True here).
+ try:
+ _runtime_state.update({
+ "connected": False,
+ "cloud_serial": config.get("cloud_serial"),
+ "connected_since": None,
+ "last_seen": None,
+ "broker": host,
+ "port": port,
+ "client_id": client_id,
+ "protocol": protocol,
+ })
+ except Exception:
+ pass
+
+ # 2) Create new instance (Source of Truth)
+ printer_service = PrinterService()
+
+ # Register printer in PrinterService by cloud_serial if provided
+ cloud_serial = config.get("cloud_serial")
+ try:
+ printer_name = config.get("printer_name") or client_id
+ printer_model = config.get("printer_model") or model
+ printer_id_cfg = config.get("printer_id") or ""
+ if cloud_serial:
+ try:
+ printer_service.register_printer(key=cloud_serial, name=printer_name, model=printer_model, printer_id=printer_id_cfg, source="mqtt_connect")
+ except Exception:
+ pass
+ except Exception:
+ pass
+
+ # Track default subscription (device//report) as soon as config is known.
+ if cloud_serial:
+ try:
+ register_subscription(f"device/{cloud_serial}/report")
+ except Exception:
+ pass
+
+ # Log client initialization
+ print(f"[MQTT] INIT client_id={client_id} model={model}")
+
+ client = PrinterMQTTClient(
+ ip=host,
+ model=model,
+ name=client_id,
+ mqtt_version=protocol,
+ printer_service=printer_service,
+ username=username,
+ password=password,
+ debug=False,
+ )
+
+ # Wrap on_message to record topics without changing existing behavior.
+ try:
+ inner = getattr(client, "client", None)
+ if inner is not None:
+ prev_on_message = getattr(inner, "on_message", None)
+
+ def _runtime_on_message(c, u, msg):
+ try:
+ topic = getattr(msg, "topic", "")
+ _record_topic(topic)
+
+ # Get payload
+ payload = getattr(msg, "payload", b"")
+ if isinstance(payload, bytes):
+ try:
+ payload = payload.decode("utf-8", errors="replace")
+ except Exception:
+ payload = str(payload)
+
+ # Add message to live buffer for UI
+ from datetime import datetime, timezone
+ _add_message(topic, payload, datetime.now(timezone.utc))
+
+ # ===================================================================
+ # AMS CLIMATE WHITELIST - Always log, regardless of smart logging
+ # ===================================================================
+ if _is_ams_climate_data(topic, payload):
+ _log_ams_climate(payload)
+
+ # ===================================================================
+ # SMART LOGGING: Check triggers
+ # ===================================================================
+ if _mqtt_logging_config.get("smart_logging", {}).get("enabled", False):
+ if _check_start_trigger(payload):
+ _start_smart_logging("Start trigger found")
+ if _check_stop_trigger(payload):
+ _stop_smart_logging("Stop trigger found")
+
+ # ===================================================================
+ # MAIN LOGGING: Check if we should log this message
+ # ===================================================================
+ if _should_log_message(topic, payload):
+ # Get truncation limits from config
+ limits = _mqtt_logging_config.get("limits", {})
+ max_payload_chars = limits.get("max_payload_chars", 1000)
+ full_payload_enabled = limits.get("full_payload_enabled", False)
+
+ # Truncate payload for main log
+ payload_short = payload
+ if len(payload) > max_payload_chars:
+ payload_short = payload[:max_payload_chars] + "...[truncated]"
+
+ # Write to standard log (truncated)
+ try:
+ mqtt_logger = logging.getLogger("3D_drucker")
+ log_entry = {
+ "ts": datetime.now(timezone.utc).isoformat(),
+ "topic": topic,
+ "payload": payload_short
+ }
+ mqtt_logger.info(json.dumps(log_entry, ensure_ascii=False))
+ except Exception:
+ pass
+
+ # Write to full payload file if enabled
+ if full_payload_enabled:
+ _write_full_payload(topic, payload)
+ except Exception:
+ pass
+ # Update runtime state immediately when a message for a device arrives
+ try:
+ topic = getattr(msg, "topic", "") or ""
+ parts = topic.split("/")
+ cloud_serial = None
+ if len(parts) > 1 and parts[0] == "device":
+ cloud_serial = parts[1]
+ if cloud_serial:
+ try:
+ from datetime import datetime, timezone
+ now = datetime.now(timezone.utc)
+ ts = _iso_utc(now)
+ # Unconditionally mark runtime connected when any report arrives
+ _runtime_state["connected"] = True
+ _runtime_state["last_seen"] = ts
+ _runtime_state["cloud_serial"] = cloud_serial
+ if not _runtime_state.get("connected_since"):
+ _runtime_state["connected_since"] = ts
+ # keep broker/client_id/protocol as previously set
+
+ # Update live_state for JSON Inspector
+ if topic.endswith("/report"):
+ try:
+ from app.services.live_state import set_live_state
+ payload_str = getattr(msg, "payload", b"")
+ if isinstance(payload_str, bytes):
+ payload_str = payload_str.decode("utf-8", errors="replace")
+ parsed = json.loads(payload_str)
+ set_live_state(cloud_serial, parsed)
+
+ # === AMS SYNC: Auto-create/update spools ===
+ ams_data_parsed = None
+ printer_id_for_tracking = None
+
+ try:
+ from app.services.ams_parser import parse_ams
+ from app.services.ams_sync import sync_ams_slots
+ from app.database import engine
+ from sqlmodel import Session, select
+ from app.models.printer import Printer
+
+ ams_data_parsed = parse_ams(parsed)
+ if ams_data_parsed:
+ # Find printer by cloud_serial
+ with Session(engine) as session:
+ printer = session.exec(
+ select(Printer).where(Printer.cloud_serial == cloud_serial)
+ ).first()
+
+ printer_id_for_tracking = printer.id if printer else None
+
+ updated_count = sync_ams_slots(
+ [dict(unit) for unit in ams_data_parsed],
+ printer_id=printer_id_for_tracking,
+ auto_create=True
+ )
+ if updated_count > 0:
+ logger = logging.getLogger("mqtt_runtime")
+ logger.info(f"[AMS SYNC] Updated {updated_count} spools for printer {printer_id_for_tracking or cloud_serial}")
+ except Exception as e:
+ logger = logging.getLogger("mqtt_runtime")
+ logger.error(f"[AMS SYNC] Failed: {e}")
+
+ # === JOB TRACKING (NEW: Centralized Service) ===
+ try:
+ from app.services.job_tracking_service import job_tracking_service
+ from app.database import engine
+ from sqlmodel import Session, select
+ from app.models.printer import Printer
+
+ # Printer ID ermitteln (falls nicht schon von AMS Sync)
+ if not printer_id_for_tracking:
+ with Session(engine) as session:
+ printer = session.exec(
+ select(Printer).where(Printer.cloud_serial == cloud_serial)
+ ).first()
+ printer_id_for_tracking = printer.id if printer else None
+
+ if printer_id_for_tracking:
+ # Zentraler Service verarbeitet Job-Tracking
+ result = job_tracking_service.process_message(
+ cloud_serial=cloud_serial,
+ parsed_payload=parsed,
+ printer_id=printer_id_for_tracking,
+ ams_data=[dict(unit) for unit in ams_data_parsed] if ams_data_parsed else None
+ )
+
+ if result and result.get("status") == "started":
+ logger = logging.getLogger("mqtt_runtime")
+ logger.info(f"[JOB TRACKING] Job started: {result.get('job_id')}")
+ elif result and result.get("status") in ["completed", "failed", "cancelled", "aborted"]:
+ logger = logging.getLogger("mqtt_runtime")
+ logger.info(f"[JOB TRACKING] Job finished: {result.get('job_id')} status={result.get('status')} used_g={result.get('used_g', 0):.1f}g")
+
+ except Exception as job_err:
+ logger = logging.getLogger("mqtt_runtime")
+ logger.error(f"[JOB TRACKING] Failed: {job_err}", exc_info=True)
+ except Exception as e:
+ print(f"[mqtt_runtime] ERROR set_live_state: {e}")
+ except Exception:
+ pass
+ except Exception:
+ pass
+ if callable(prev_on_message):
+ return prev_on_message(c, u, msg)
+
+ inner.on_message = _runtime_on_message
+ # Set userdata for the underlying paho client so on_connect can
+ # subscribe to the specific cloud_serial topic.
+ try:
+ connection_id = str(uuid4())
+ cloud_serial = config.get("cloud_serial")
+ inner.user_data_set({
+ "connection_id": connection_id,
+ "client_id": client_id,
+ "cloud_serial": cloud_serial,
+ })
+ except Exception:
+ # Non-fatal: don't break connect if userdata cannot be set
+ pass
+ except Exception:
+ # Never fail connect due to stats wiring
+ pass
+
+ # 3) Connect
+ print(f"[MQTT] CONNECT host={host} port={port} tls={tls} user={username}")
+ try:
+ logging.getLogger("3D_drucker").info(f"connect host={host} port={port} tls={tls} client_id={client_id}")
+ except Exception:
+ pass
+ client.connect()
+
+ # 4) Set instance immediately so disconnect() can clean up on timeout
+ _client_instance = client
+ _client_config = _RuntimeConfig(
+ host=host,
+ port=port,
+ username=username,
+ password=password,
+ client_id=client_id,
+ protocol=protocol,
+ tls=tls,
+ model=model,
+ cloud_serial=config.get("cloud_serial"),
+ printer_id=config.get("printer_id"),
+ printer_name=config.get("printer_name"),
+ )
+
+ # 5) Wait (max 5s) until the underlying paho client is actually connected.
+ # NOTE: Do NOT consider 'connected' == application-level connected here.
+ # The UI should rely solely on _runtime_state which will be updated
+ # when a device//report arrives.
+ deadline = time.monotonic() + 5.0
+ while time.monotonic() < deadline:
+ if _is_connected(client):
+ # Underlying MQTT transport connected — report connect success
+ # to caller. Mark runtime_state as connected so UI sees state
+ # immediately, even bevor device//report eintrifft.
+ now = datetime.now(timezone.utc)
+ ts = _iso_utc(now)
+ _transport_connected_since = now
+ try:
+ _runtime_state.update({
+ "connected": True,
+ "connected_since": ts,
+ "last_seen": ts if not _runtime_state.get("last_seen") else _runtime_state.get("last_seen"),
+ })
+ except Exception:
+ pass
+ try:
+ logging.getLogger("3D_drucker").info(f"connected host={host} client_id={client_id}")
+ except Exception:
+ pass
+ return {
+ "success": True,
+ "connected": True,
+ "host": host,
+ "port": port,
+ "client_id": client_id,
+ "protocol": protocol,
+ }
+ time.sleep(0.1)
+
+ # If we get here, connect did not complete deterministically.
+ try:
+ logging.getLogger("MQTT").error(f"connect timeout host={host} client_id={client_id}")
+ except Exception:
+ pass
+ disconnect()
+ # Also ensure runtime state reflects disconnected
+ try:
+ _runtime_state.update({"connected": False, "connected_since": None})
+ except Exception:
+ pass
+ return {
+ "success": False,
+ "error": "connect timeout (5s) - broker unreachable or auth failed",
+ }
+
+ except Exception as exc:
+ # 4) Exception: reset instance
+ _client_instance = None
+ _client_config = None
+ _connected_since = None
+ try:
+ logging.getLogger("3D_drucker").error(f"connect error: {exc}")
+ except Exception:
+ pass
+ return {"success": False, "error": str(exc)}
+
+
+def disconnect() -> Dict[str, Any]:
+ """Disconnect the single runtime instance, if any."""
+ global _client_instance, _client_config, _connected_since, _transport_connected_since
+
+ if _client_instance is None:
+ _reset_topic_stats()
+ return {"success": True, "connected": False, "note": "already disconnected"}
+
+ try:
+ if hasattr(_client_instance, "disconnect"):
+ # If the class ever grows a proper API, prefer it.
+ _client_instance.disconnect() # type: ignore[attr-defined]
+ else:
+ inner = getattr(_client_instance, "client", None)
+ if inner is not None:
+ try:
+ if hasattr(inner, "loop_stop"):
+ inner.loop_stop()
+ finally:
+ if hasattr(inner, "disconnect"):
+ inner.disconnect()
+ finally:
+ _client_instance = None
+ _client_config = None
+ _connected_since = None
+ _transport_connected_since = None
+ _reset_topic_stats()
+ _reset_subscribed_topics()
+ try:
+ # mark runtime as disconnected; keep broker/cloud_serial metadata
+ _runtime_state.update({"connected": False, "connected_since": None})
+ except Exception:
+ pass
+ try:
+ logging.getLogger("3D_drucker").info("disconnect")
+ except Exception:
+ pass
+
+ return {"success": True, "connected": False}
+
+
+def status() -> Dict[str, Any]:
+ """Return minimal runtime status for the single instance."""
+ # Return the explicit runtime state only (no guessing, no heuristics).
+ try:
+ # Shallow copy as base
+ base = dict(_runtime_state)
+
+ # Wenn Transport verbunden ist, aber runtime_state.connected False, setze True
+ transport_connected = _client_instance is not None and _is_connected(_client_instance)
+ if transport_connected and not base.get("connected"):
+ base["connected"] = True
+
+ # connected_since ableiten
+ if not base.get("connected_since"):
+ if _transport_connected_since:
+ base["connected_since"] = _iso_utc(_transport_connected_since)
+ elif _connected_since:
+ base["connected_since"] = _iso_utc(_connected_since)
+
+ # Aggregate topic/message statistics
+ stats = _aggregate_topic_stats()
+ base.update(stats)
+
+ # QoS fallback
+ if base.get("qos") is None:
+ base["qos"] = 1
+
+ # Uptime derived from connected_since
+ base["uptime"] = _format_uptime(base.get("connected_since"))
+
+ # last_message_time fallback to last_seen if missing
+ if not base.get("last_message_time") and base.get("last_seen"):
+ base["last_message_time"] = base.get("last_seen")
+
+ return base
+ except Exception as exc:
+ return {"connected": False, "error": str(exc)}
diff --git a/app/services/printer_auto_detector.py b/app/services/printer_auto_detector.py
new file mode 100644
index 0000000..59ec434
--- /dev/null
+++ b/app/services/printer_auto_detector.py
@@ -0,0 +1,103 @@
+from typing import Optional, Dict, Any
+
+
+class PrinterAutoDetector:
+ """
+ Erkennt automatisch:
+ - Modell (X1C, P1S, P1P, A1, A1MINI, X1E, H2D)
+ - MQTT-Protokoll (5 / 311 / 31)
+ - Capability-Map (AMS vorhanden? LiDAR? Chamber sensor?)
+ """
+
+ MODEL_MAP_PREFIX = {
+ "00M09A": "X1C",
+ "0309DA": "A1MINI",
+ "0309DB": "A1",
+ "01P1S": "P1S",
+ "01P1P": "P1P",
+ "01X1E": "X1E",
+ "H2D": "H2D",
+ }
+
+ # MQTT Protokoll-Zuordnung nach Modell
+ # Premium-Modelle (X1C, X1E, P1P, P1S, H2D) verwenden MQTT v5 für vollständige Daten
+ # Budget-Modelle (A1, A1 Mini) verwenden MQTT v3.1.1
+ MODEL_MQTT_PROTOCOL = {
+ "X1C": "5",
+ "X1E": "5",
+ "P1S": "5",
+ "P1P": "5",
+ "H2D": "5", # H2D ist neuestes Premium-Modell → MQTT v5
+ "A1": "311",
+ "A1MINI": "311",
+ }
+
+ @staticmethod
+ def detect_model_from_serial(serial: Optional[str]) -> Optional[str]:
+ if not serial:
+ return None
+ for prefix, model in PrinterAutoDetector.MODEL_MAP_PREFIX.items():
+ if serial.startswith(prefix):
+ return model
+ return None
+
+ @staticmethod
+ def detect_model_from_payload(data: Dict[str, Any]) -> Optional[str]:
+ dev = data.get("device", {}) if isinstance(data, dict) else {}
+ model = dev.get("model") or dev.get("machine", {}).get("model") if isinstance(dev, dict) else None
+ if model:
+ return str(model).upper()
+ return None
+
+ @staticmethod
+ def detect_capabilities(data: Dict[str, Any]) -> Dict[str, bool]:
+ caps = {
+ "has_ams": False,
+ "has_lidar": False,
+ "has_chamber_temp": False,
+ "has_aux_fan": False,
+ }
+ if isinstance(data, dict):
+ if any(k in data for k in ("ams", "filament", "material_system")):
+ caps["has_ams"] = True
+ if "lidar" in str(data).lower():
+ caps["has_lidar"] = True
+ temp_block = data.get("temperature", {}) if isinstance(data.get("temperature"), dict) else {}
+ if any("chamber" in str(v).lower() for v in temp_block.values()) or "chamber" in temp_block:
+ caps["has_chamber_temp"] = True
+ cooling = data.get("cooling") if isinstance(data.get("cooling"), dict) else {}
+ if cooling.get("fan_2_speed") is not None:
+ caps["has_aux_fan"] = True
+ return caps
+
+ @staticmethod
+ def detect_mqtt_version(protocol_detector, printer) -> Optional[str]:
+ """
+ Erkennt MQTT-Protokoll mit Modell-basierter Priorisierung.
+
+ 1. Wenn mqtt_version bereits gesetzt: verwende diese
+ 2. Wenn Modell bekannt: verwende modell-spezifisches Protokoll (harte Regel)
+ 3. Fallback: Auto-Detection über protocol_detector
+ """
+ # Falls bereits gesetzt
+ if getattr(printer, "mqtt_version", None):
+ return printer.mqtt_version
+
+ # Modell-basierte Zuordnung (höchste Priorität)
+ model = getattr(printer, "model", None)
+ if model and model.upper() in PrinterAutoDetector.MODEL_MQTT_PROTOCOL:
+ protocol = PrinterAutoDetector.MODEL_MQTT_PROTOCOL[model.upper()]
+ print(f"[MQTT] Modell '{model}' → Protokoll {protocol} (harte Regel)")
+ return protocol
+
+ # Fallback: Auto-Detection
+ try:
+ res = protocol_detector.detect(printer.ip_address, printer.api_key, port=printer.port or 8883)
+ if isinstance(res, dict) and res.get("detected"):
+ detected_protocol = str(res.get("protocol"))
+ print(f"[MQTT] Auto-Detection → Protokoll {detected_protocol}")
+ return detected_protocol
+ except Exception as e:
+ print(f"[MQTT] Auto-Detection fehlgeschlagen: {e}")
+ return None
+ return None
diff --git a/app/services/printer_data.py b/app/services/printer_data.py
new file mode 100644
index 0000000..b5e3c8a
--- /dev/null
+++ b/app/services/printer_data.py
@@ -0,0 +1,70 @@
+from typing import Any, Dict, Optional
+from datetime import datetime, timezone
+
+
+class PrinterData:
+ """Einheitliches Ausgabeformat für alle Bambu Lab Drucker."""
+
+ def __init__(self) -> None:
+ self.model: Optional[str] = None
+ self.state: Optional[str] = None
+ self.progress: Optional[float] = None
+ self.sub_state: Optional[str] = None
+
+ self.temperature: Dict[str, Optional[float]] = {
+ "nozzle": None,
+ "bed": None,
+ "chamber": None,
+ }
+
+ self.fan: Dict[str, Optional[float]] = {
+ "part_cooling": None,
+ "aux": None,
+ "chamber": None,
+ }
+
+ self.layer: Dict[str, Optional[int]] = {
+ "current": None,
+ "total": None,
+ }
+
+ self.speed_mode: Optional[str] = None
+
+ self.light: Dict[str, Optional[Any]] = {
+ "state": None,
+ "brightness": None,
+ }
+
+ self.ams: Optional[Any] = None
+ # Liste der geparsten AMS-Units (Multi-AMS fähig)
+ self.ams_units: list = []
+
+ self.job: Dict[str, Optional[Any]] = {
+ "file": None,
+ "time_elapsed": None,
+ "time_remaining": None,
+ }
+
+ self.error: Optional[Any] = None
+ self.extra: Dict[str, Any] = {}
+ self.timestamp: str = datetime.now(timezone.utc).isoformat()
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Serialisierbare Repräsentation für WebSocket/API (ohne geteilte Referenzen)."""
+ return {
+ "model": self.model,
+ "state": self.state,
+ "progress": self.progress,
+ "sub_state": self.sub_state,
+ "temperature": dict(self.temperature),
+ "fan": dict(self.fan),
+ "layer": dict(self.layer),
+ "speed_mode": self.speed_mode,
+ "light": dict(self.light),
+ "ams": self.ams,
+ "ams_units": list(self.ams_units),
+ "job": dict(self.job),
+ "error": self.error,
+ "extra": dict(self.extra),
+ "timestamp": self.timestamp,
+ }
diff --git a/app/services/printer_mqtt_client.py b/app/services/printer_mqtt_client.py
new file mode 100644
index 0000000..3831eed
--- /dev/null
+++ b/app/services/printer_mqtt_client.py
@@ -0,0 +1,9 @@
+"""Re-export wrapper for the runtime PrinterMQTTClient.
+
+This keeps `app.services.printer_mqtt_client` as the stable import path while
+reusing the existing implementation in `services.printer_mqtt_client`.
+"""
+
+from services.printer_mqtt_client import PrinterMQTTClient
+
+__all__ = ["PrinterMQTTClient"]
diff --git a/app/services/spool_number_service.py b/app/services/spool_number_service.py
new file mode 100644
index 0000000..95bb70f
--- /dev/null
+++ b/app/services/spool_number_service.py
@@ -0,0 +1,207 @@
+"""
+Service für Spulen-Nummern-Verwaltung
+
+Implementiert das Spulen-Nummern-System gemäß Spezifikation v4:
+- Automatische Nummernvergabe mit Recycling
+- Denormalisierung von Material-Daten
+- Snapshot-Erstellung für Job-Historie
+"""
+from typing import Optional
+from sqlmodel import Session, select, func
+from datetime import datetime
+
+from app.models.spool import Spool
+from app.models.material import Material
+
+
+def get_next_spool_number(session: Session) -> int:
+ """
+ Findet die niedrigste freie Spulen-Nummer (Recycling-System)
+
+ Beispiel:
+ - Spulen #1, #2, #4 existieren
+ - #3 wurde gelöscht
+ - Ergebnis: 3 (recycelt die gelöschte Nummer)
+
+ Args:
+ session: SQLModel Session
+
+ Returns:
+ int: Nächste freie Spulen-Nummer
+ """
+ # Hole alle verwendeten Nummern
+ stmt = select(Spool.spool_number).where(Spool.spool_number.is_not(None))
+ used_numbers = set(num for num in session.exec(stmt).all() if num is not None)
+
+ # Finde erste Lücke (Recycling)
+ for i in range(1, 10000):
+ if i not in used_numbers:
+ return i
+
+ # Fallback: MAX + 1 (sollte nie erreicht werden)
+ max_num = session.exec(
+ select(func.max(Spool.spool_number))
+ ).one_or_none()
+ return (max_num or 0) + 1
+
+
+def assign_spool_number(spool: Spool, session: Session) -> Optional[int]:
+ """
+ Denormalisiert Material-Daten und behält Spulen-Nummern bei
+
+ INTELLIGENTES LAGER-SYSTEM:
+ - Spulen-Nummern sind OPTIONAL und werden NUR manuell vom Benutzer vergeben
+ - Wenn eine Nummer gesetzt ist, bleibt sie DAUERHAFT (auch bei RFID-Erkennung)
+ - RFID (tray_uuid) und Nummer werden VERKNÜPFT (nicht entfernt!)
+ - Neue RFID-Spulen bekommen KEINE automatische Nummer → Benutzer entscheidet
+
+ Diese Funktion:
+ 1. Behält vorhandene spool_number (wird NIE überschrieben)
+ 2. Vergibt KEINE automatischen Nummern mehr
+ 3. Kopiert name, vendor aus material-Tabelle (Denormalisierung)
+ 4. Extrahiert Farbe aus tray_color (falls vorhanden)
+
+ Args:
+ spool: Spool-Objekt (wird modifiziert)
+ session: SQLModel Session
+
+ Returns:
+ Optional[int]: Vorhandene Spulen-Nummer oder None
+ """
+ # KEINE automatische Nummern-Vergabe mehr!
+ # spool.spool_number bleibt wie es ist (None oder vom Benutzer gesetzt)
+
+ # Denormalisierung durchführen
+ _denormalize_spool_data(spool, session)
+
+ return spool.spool_number
+
+
+def _denormalize_spool_data(spool: Spool, session: Session) -> None:
+ """
+ Denormalisiert Material-Daten und Farbe für schnelle Suche
+
+ Interne Hilfsfunktion für assign_spool_number()
+
+ Args:
+ spool: Spool-Objekt (wird modifiziert)
+ session: SQLModel Session
+ """
+ # 1. Material-Daten kopieren (denormalisieren für schnelle Suche)
+ if spool.material_id:
+ material = session.get(Material, spool.material_id)
+ if material:
+ spool.name = material.name
+ spool.vendor = material.brand
+
+ # 2. Farbe extrahieren oder setzen
+ if not spool.color and spool.tray_color:
+ # Bambu-Spule: Extrahiere Farbe aus Hex-Code
+ spool.color = extract_color_from_hex(spool.tray_color)
+ elif not spool.color:
+ # Fallback: unknown
+ spool.color = "unknown"
+
+
+def extract_color_from_hex(hex_color: str) -> str:
+ """
+ Konvertiert Bambu Hex-Farbe (z.B. "000000FF") zu lesbarem Namen
+
+ Vereinfachte Farb-Erkennung basierend auf RGB-Werten.
+
+ Args:
+ hex_color: Hex-String (z.B. "FF0000FF" für rot)
+
+ Returns:
+ str: Farb-Name (black, white, red, green, blue, yellow, mixed, unknown)
+
+ Beispiele:
+ "000000FF" → "black"
+ "FFFFFFFF" → "white"
+ "FF0000FF" → "red"
+ "00FF00FF" → "green"
+ """
+ if not hex_color or len(hex_color) < 6:
+ return "unknown"
+
+ # Erste 6 Zeichen = RGB (letzte 2 = Alpha-Channel)
+ rgb_hex = hex_color[:6]
+
+ try:
+ # Konvertiere zu RGB-Werten (0-255)
+ r = int(rgb_hex[0:2], 16)
+ g = int(rgb_hex[2:4], 16)
+ b = int(rgb_hex[4:6], 16)
+
+ # Einfache Farb-Erkennung
+ if r < 50 and g < 50 and b < 50:
+ return "black"
+ elif r > 200 and g > 200 and b > 200:
+ return "white"
+ elif r > 150 and g < 100 and b < 100:
+ return "red"
+ elif r < 100 and g > 150 and b < 100:
+ return "green"
+ elif r < 100 and g < 100 and b > 150:
+ return "blue"
+ elif r > 150 and g > 150 and b < 100:
+ return "yellow"
+ elif r > 150 and g < 100 and b > 150:
+ return "purple"
+ elif r > 150 and g > 100 and b < 100:
+ return "orange"
+ else:
+ return "mixed"
+ except (ValueError, IndexError):
+ return "unknown"
+
+
+def create_job_snapshot(spool: Spool) -> dict:
+ """
+ Erstellt Snapshot-Daten für Job-Historie
+
+ Dieser Snapshot wird in der job-Tabelle gespeichert und bewahrt
+ die Spulen-Daten zum Zeitpunkt des Job-Starts auf.
+
+ Auch wenn die Spule später gelöscht oder die Nummer recycelt wird,
+ bleibt die Historie korrekt.
+
+ Args:
+ spool: Spool-Objekt
+
+ Returns:
+ dict: Snapshot-Daten für job-Tabelle
+
+ Beispiel:
+ {
+ "spool_number": 3,
+ "spool_name": "PLA Basic",
+ "spool_vendor": "Bambu Lab",
+ "spool_color": "black",
+ "spool_created_at": "2024-11-01T10:00:00"
+ }
+ """
+ return {
+ "spool_number": spool.spool_number,
+ "spool_name": spool.name,
+ "spool_vendor": spool.vendor,
+ "spool_color": spool.color,
+ "spool_created_at": spool.created_at,
+ }
+
+
+def update_spool_denormalized_fields(spool: Spool, session: Session) -> None:
+ """
+ Aktualisiert denormalisierte Felder einer Spule
+
+ Nützlich wenn sich Material-Daten ändern und Spule aktualisiert werden soll.
+
+ Args:
+ spool: Spool-Objekt (wird modifiziert)
+ session: SQLModel Session
+ """
+ if spool.material_id:
+ material = session.get(Material, spool.material_id)
+ if material:
+ spool.name = material.name
+ spool.vendor = material.brand
diff --git a/app/services/universal_mapper.py b/app/services/universal_mapper.py
new file mode 100644
index 0000000..3bf52ec
--- /dev/null
+++ b/app/services/universal_mapper.py
@@ -0,0 +1,492 @@
+from datetime import datetime, timezone
+from typing import Any, Dict, Optional
+
+from sqlmodel import Session, select
+from app.database import engine
+from app.services.ams_parser import parse_ams
+from app.services.printer_data import PrinterData
+from app.models.settings import Setting
+
+
+class UniversalMapper:
+ """
+ Universeller Mapper für verschiedene Druckerplattformen.
+
+ Unterstützt:
+ - BambuLab (X1C/X1E/P1/P1P/P1S/A1/A1MINI, H2D-ähnliche Payloads)
+ - Klipper / Moonraker
+ - Fallback auf generische Strukturen
+ """
+
+ def __init__(self, model: Optional[str] = None) -> None:
+ self.model_hint = (model or "").upper().strip() or None
+
+ # ------------------------------------------------------------------ #
+ # Public Entry
+ # ------------------------------------------------------------------ #
+ def map(self, data: Dict[str, Any]) -> PrinterData:
+ pd = PrinterData()
+ try:
+ detected = self._detect_model(data)
+ pd.model = self.model_hint or detected or "UNKNOWN"
+ pd.timestamp = datetime.now(timezone.utc).isoformat()
+
+ model = (pd.model or "").upper()
+ if model in {"X1C", "X1E", "P1", "P1P", "P1S", "A1", "A1MINI"}:
+ self._map_bambu(data, pd)
+ elif model == "H2D":
+ self._map_bambu_h2d(data, pd)
+ elif model == "KLIPPER":
+ self._map_klipper(data, pd)
+ else:
+ if self._looks_like_bambu(data):
+ self._map_bambu(data, pd)
+ elif self._looks_like_klipper(data):
+ self._map_klipper(data, pd)
+ else:
+ self._map_generic(data, pd)
+ except Exception:
+ # Niemals Exceptions werfen – best effort
+ pass
+ return pd
+
+ # ------------------------------------------------------------------ #
+ # Modell-Erkennung
+ # ------------------------------------------------------------------ #
+ def _detect_model(self, data: Dict[str, Any]) -> Optional[str]:
+ if "gcode_state" in data or "mc_percent" in data or "lights_report" in data:
+ return "X1C"
+ if "mc_print" in data or "ams" in data or "vt_tray" in data:
+ return "X1C"
+ if "temperature" in data and "cooling_fan" in data and "material_system" in data:
+ return "H2D"
+ if self._looks_like_klipper(data):
+ return "KLIPPER"
+ return None
+
+ def _looks_like_bambu(self, data: Dict[str, Any]) -> bool:
+ keys = set(data.keys())
+ markers = {
+ "gcode_state",
+ "mc_percent",
+ "mc_remaining_time",
+ "print",
+ "ams",
+ "lights_report",
+ "nozzle_temper",
+ "bed_temper",
+ }
+ return bool(keys & markers)
+
+ def _looks_like_klipper(self, data: Dict[str, Any]) -> bool:
+ if isinstance(data.get("status"), dict):
+ return True
+ status_keys = {"print_stats", "heater_bed", "extruder", "display_status"}
+ if any(k in data for k in status_keys):
+ return True
+ return False
+
+ # ------------------------------------------------------------------ #
+ # Bambu – Hauptmapper (X1C, P1, A1, A1 Mini)
+ # ------------------------------------------------------------------ #
+ def _map_bambu(self, data: Dict[str, Any], out: PrinterData) -> None:
+ # STATE
+ pr = data.get("print") or data.get("print_status") or {}
+ state = (
+ data.get("gcode_state")
+ or self._translate_mc_stage(data.get("mc_print_stage"))
+ or pr.get("gcode_state")
+ or pr.get("state")
+ or data.get("state")
+ )
+ out.state = state
+
+ # PROGRESS
+ progress_candidates = [
+ data.get("mc_percent"),
+ data.get("percent"),
+ pr.get("progress") if isinstance(pr, dict) else None,
+ pr.get("percent") if isinstance(pr, dict) else None,
+ pr.get("mc_percent") if isinstance(pr, dict) else None,
+ pr.get("gcode_file_prepare_percent") if isinstance(pr, dict) else None,
+ data.get("gcode_file_prepare_percent"),
+ ]
+ out.progress = self._first_defined_float(progress_candidates)
+
+ # TEMPERATURE
+ nozzle = (
+ data.get("nozzle_temper")
+ or data.get("nozzle_temp")
+ or data.get("extruder_temp")
+ or self._deep_get(data, ["extruder", "temp"])
+ or self._deep_get(data, ["device", "extruder", "info", 0, "temp"])
+ or self._deep_get(data, ["temperature", "nozzle"])
+ )
+ bed = (
+ data.get("bed_temper")
+ or self._deep_get(data, ["device", "bed", "info", "temp"])
+ )
+ chamber = self._deep_get(data, ["device", "ctc", "info", "temp"])
+
+ heater = data.get("heater") or {}
+ if nozzle is None:
+ nozzle = heater.get("nozzle_temper") or heater.get("nozzle_temp")
+ if bed is None:
+ bed = heater.get("bed_temper") or heater.get("bed_temp")
+ if chamber is None:
+ chamber = heater.get("chamber_temper") or heater.get("chamber_temp")
+
+ temp_block = data.get("temperature") or {}
+ nozzle = nozzle if nozzle is not None else temp_block.get("nozzle")
+ bed = bed if bed is not None else temp_block.get("bed")
+ chamber = chamber if chamber is not None else temp_block.get("chamber")
+
+ self._set(out.temperature, "nozzle", self._safe_float(nozzle))
+ self._set(out.temperature, "bed", self._safe_float(bed))
+ self._set(out.temperature, "chamber", self._safe_float(chamber))
+
+ # LAYER
+ current_layer = (
+ data.get("layer_num")
+ or self._deep_get(pr, ["layer_num"])
+ or self._deep_get(data, ["print", "3D", "layer_num"])
+ )
+ total_layer = (
+ data.get("total_layer_num")
+ or (pr.get("total_layer") if isinstance(pr, dict) else None)
+ or self._deep_get(data, ["print", "3D", "total_layer_num"])
+ )
+ self._set(out.layer, "current", self._safe_int(current_layer))
+ self._set(out.layer, "total", self._safe_int(total_layer))
+ if (
+ out.layer["current"] is not None
+ and out.layer["total"] is not None
+ and out.layer["current"] > out.layer["total"]
+ ):
+ out.layer["current"] = out.layer["total"]
+
+ # FANS
+ cooling = data.get("cooling") or {}
+ cooling_fan = data.get("cooling_fan") or {}
+ self._set(
+ out.fan,
+ "part_cooling",
+ self._first_defined_float(
+ [
+ data.get("cooling_fan_speed"),
+ data.get("heatbreak_fan_speed"),
+ cooling.get("fan_1_speed"),
+ cooling_fan.get("toolhead_fan"),
+ data.get("fan_speed"),
+ ]
+ ),
+ )
+ self._set(
+ out.fan,
+ "aux",
+ self._first_defined_float(
+ [
+ cooling.get("fan_2_speed"),
+ cooling_fan.get("heatbreak_fan"),
+ data.get("heatbreak_fan_speed"),
+ ]
+ ),
+ )
+ self._set(
+ out.fan,
+ "chamber",
+ self._first_defined_float(
+ [
+ cooling.get("fan_3_speed"),
+ cooling_fan.get("chamber_fan"),
+ data.get("big_fan1_speed"),
+ data.get("big_fan2_speed"),
+ ]
+ ),
+ )
+
+ # LIGHTS
+ lights = data.get("lights_report") or data.get("light") or []
+ if isinstance(lights, list):
+ for l in lights:
+ if l.get("node") == "chamber_light":
+ self._set(out.light, "state", l.get("mode"))
+ if l.get("strength") is not None:
+ self._set(out.light, "brightness", self._safe_float(l.get("strength")))
+ elif isinstance(lights, dict):
+ self._set(out.light, "state", lights.get("light_state") or lights.get("on"))
+ self._set(out.light, "brightness", self._safe_float(lights.get("light_strength") or lights.get("strength")))
+
+ # AMS
+ try:
+ ams_units = parse_ams(data)
+ except Exception:
+ ams_units = []
+
+ mode = self._get_setting_value("ams_mode", default="single")
+ if mode == "single":
+ out.ams_units = [ams_units[0]] if ams_units else []
+ elif mode == "multi":
+ out.ams_units = ams_units
+ else:
+ out.ams_units = ams_units
+
+ # JOB
+ job_block = data.get("job") or pr.get("job") if isinstance(pr, dict) else {}
+ self._set(out.job, "file", data.get("gcode_file") or data.get("file") or pr.get("file") if isinstance(pr, dict) else None)
+ self._set(out.job, "time_remaining", self._first_defined_float([
+ data.get("mc_remaining_time"),
+ data.get("remain_time"),
+ pr.get("time_remaining") if isinstance(pr, dict) else None,
+ job_block.get("remaining") if isinstance(job_block, dict) else None,
+ ]))
+ self._set(out.job, "time_elapsed", self._first_defined_float([
+ job_block.get("elapsed") if isinstance(job_block, dict) else None,
+ pr.get("time_elapsed") if isinstance(pr, dict) else None,
+ ]))
+
+ # SPEED MODE
+ self._set_attr(out, "speed_mode", data.get("print_speed_mode") or pr.get("speed_level") if isinstance(pr, dict) else None)
+
+ # ERROR
+ self._set_attr(out, "error", data.get("err") or data.get("mc_err") or (pr.get("error_code") if isinstance(pr, dict) else None))
+
+ # EXTRA
+ known = {
+ "gcode_state",
+ "mc_print_stage",
+ "state",
+ "mc_percent",
+ "percent",
+ "print",
+ "print_status",
+ "device",
+ "heater",
+ "cooling",
+ "cooling_fan",
+ "lights_report",
+ "light",
+ "ams",
+ "filament",
+ "material_system",
+ "vt_tray",
+ "vir_slot",
+ "gcode_file",
+ "file",
+ "remain_time",
+ "mc_remaining_time",
+ "job",
+ "layer_num",
+ "total_layer_num",
+ "big_fan1_speed",
+ "big_fan2_speed",
+ "heatbreak_fan_speed",
+ "cooling_fan_speed",
+ "nozzle_temper",
+ "bed_temper",
+ "spd_lvl",
+ "mc_err",
+ "err",
+ "temperature",
+ }
+ for k, v in data.items():
+ if k not in known:
+ out.extra[k] = v
+
+ # ------------------------------------------------------------------ #
+ # Bambu H2D
+ # ------------------------------------------------------------------ #
+ def _map_bambu_h2d(self, data: Dict[str, Any], out: PrinterData) -> None:
+ temp = data.get("temperature", {}) if isinstance(data, dict) else {}
+ self._set(out.temperature, "nozzle", self._safe_float(temp.get("nozzle")))
+ self._set(out.temperature, "bed", self._safe_float(temp.get("bed")))
+ self._set(out.temperature, "chamber", self._safe_float(temp.get("chamber")))
+
+ fan = data.get("cooling_fan", {}) if isinstance(data, dict) else {}
+ self._set(out.fan, "part_cooling", self._safe_float(fan.get("toolhead_fan")))
+ self._set(out.fan, "chamber", self._safe_float(fan.get("chamber_fan")))
+
+ job = data.get("job", {}) if isinstance(data, dict) else {}
+ self._set(out.job, "file", job.get("name"))
+ self._set(out.job, "time_elapsed", job.get("elapsed"))
+ self._set(out.job, "time_remaining", job.get("remaining"))
+
+ out.state = data.get("state") or job.get("state")
+ out.progress = self._safe_float(job.get("progress") if isinstance(job, dict) else None)
+
+ out.ams = data.get("material_system")
+ out.error = data.get("error")
+
+ known = {"temperature", "cooling_fan", "job", "material_system", "error", "state", "progress"}
+ for k, v in data.items():
+ if k not in known:
+ out.extra[k] = v
+
+ # ------------------------------------------------------------------ #
+ # Klipper / Moonraker
+ # ------------------------------------------------------------------ #
+ def _map_klipper(self, data: Dict[str, Any], out: PrinterData) -> None:
+ status = data.get("status") or data
+
+ print_stats = status.get("print_stats", {})
+ heater_bed = status.get("heater_bed", {})
+ extruder = status.get("extruder", {})
+ fan = status.get("fan") or status.get("part_fan") or {}
+ display_status = status.get("display_status", {})
+
+ out.state = print_stats.get("state")
+ prog = print_stats.get("progress")
+ if prog is not None:
+ out.progress = float(prog) * 100.0 if prog <= 1 else float(prog)
+
+ self._set(out.temperature, "nozzle", self._safe_float(extruder.get("temperature")))
+ self._set(out.temperature, "bed", self._safe_float(heater_bed.get("temperature")))
+ chamber = None
+ temp_sensors = status.get("temperature_sensor") or {}
+ if isinstance(temp_sensors, dict):
+ chamber_sensor = temp_sensors.get("chamber") or temp_sensors.get("Chamber")
+ if isinstance(chamber_sensor, dict):
+ chamber = chamber_sensor.get("temperature")
+ self._set(out.temperature, "chamber", self._safe_float(chamber))
+
+ fan_speed = fan.get("speed")
+ if fan_speed is not None:
+ try:
+ out.fan["part_cooling"] = float(fan_speed) * 100.0 if float(fan_speed) <= 1 else float(fan_speed)
+ except Exception:
+ out.fan["part_cooling"] = None
+
+ cur_layer = display_status.get("layer")
+ total_layer = display_status.get("total_layer")
+ self._set(out.layer, "current", self._safe_int(cur_layer))
+ self._set(out.layer, "total", self._safe_int(total_layer))
+ if (
+ out.layer["current"] is not None
+ and out.layer["total"] is not None
+ and out.layer["current"] > out.layer["total"]
+ ):
+ out.layer["current"] = out.layer["total"]
+
+ out.job["file"] = print_stats.get("filename")
+ out.job["time_elapsed"] = print_stats.get("print_duration") or print_stats.get("total_duration")
+ out.job["time_remaining"] = print_stats.get("time_remaining") or display_status.get("estimated_time_remaining")
+
+ if out.state and str(out.state).lower() == "error":
+ out.error = "print_error"
+
+ known_top = {"status"}
+ for k, v in data.items():
+ if k not in known_top:
+ out.extra[k] = v
+
+ known_status = {
+ "print_stats",
+ "heater_bed",
+ "extruder",
+ "fan",
+ "part_fan",
+ "display_status",
+ "temperature_sensor",
+ }
+ for k, v in status.items():
+ if k not in known_status:
+ out.extra[f"status.{k}"] = v
+
+ # ------------------------------------------------------------------ #
+ # Fallback – alles in extra
+ # ------------------------------------------------------------------ #
+ def _map_generic(self, data: Dict[str, Any], out: PrinterData) -> None:
+ try:
+ out.extra = dict(data)
+ except Exception:
+ out.extra = {}
+
+ # ------------------------------------------------------------------ #
+ # Helper
+ # ------------------------------------------------------------------ #
+ def _translate_mc_stage(self, stage: Any) -> Optional[str]:
+ mapping = {
+ "0": None,
+ "1": "IDLE",
+ "2": "HEATING",
+ "3": "PRINTING",
+ "4": "PAUSED",
+ "5": "FINISHED",
+ }
+ if stage is None:
+ return None
+ return mapping.get(str(stage).strip(), None)
+
+ def _safe_int(self, val: Any) -> Optional[int]:
+ try:
+ if val is None:
+ return None
+ return int(val)
+ except Exception:
+ return None
+
+ def _safe_float(self, val: Any) -> Optional[float]:
+ try:
+ if val is None:
+ return None
+ return float(val)
+ except Exception:
+ return None
+
+ def _deep_get(self, data: Any, path: list, default=None):
+ cur = data
+ try:
+ for p in path:
+ if isinstance(cur, list) and isinstance(p, int):
+ cur = cur[p]
+ elif isinstance(cur, dict):
+ cur = cur[p]
+ else:
+ return default
+ return cur
+ except Exception:
+ return default
+
+ def _first_defined_float(self, values) -> Optional[float]:
+ for v in values:
+ f = self._safe_float(v)
+ if f is not None:
+ return f
+ return None
+
+ def _set(self, target: Dict[str, Any], key: str, value: Any) -> None:
+ if value is not None:
+ target[key] = value
+
+ def _set_attr(self, obj: Any, attr: str, value: Any) -> None:
+ if value is not None:
+ setattr(obj, attr, value)
+
+ def _get_setting_value(self, key: str, default: Optional[str] = None) -> Optional[str]:
+ """
+ Minimaler Settings-Reader fǬr Mapper-Kontext ohne Request/Dependency.
+ Öffnet kurzzeitig eine Session, liest den Wert oder liefert Default.
+ """
+ try:
+ with Session(engine) as session:
+ setting = session.exec(select(Setting).where(Setting.key == key)).first()
+ return setting.value if setting and setting.value is not None else default
+ except Exception:
+ return default
+
+ def map_ams_block(self, data: Dict[str, Any], out: PrinterData) -> None:
+ """
+ Separater AMS-Mapping-Helper (für Tests), nutzt parse_ams und beachtet ams_mode.
+ """
+ try:
+ ams_units = parse_ams(data)
+ except Exception:
+ ams_units = []
+
+ mode = self._get_setting_value("ams_mode", default="single")
+ if mode == "single":
+ out.ams_units = [ams_units[0]] if ams_units else []
+ elif mode == "multi":
+ out.ams_units = ams_units
+ else:
+ out.ams_units = ams_units
diff --git a/app/static/css/debug-theme.css b/app/static/css/debug-theme.css
new file mode 100644
index 0000000..8b9a560
--- /dev/null
+++ b/app/static/css/debug-theme.css
@@ -0,0 +1,224 @@
+:root {
+ --bg-dark: #0a0e1a;
+ --bg-card: #141824;
+ --bg-card-hover: #1a1f35;
+ --border: rgba(255, 255, 255, 0.08);
+ --border-hover: rgba(102, 126, 234, 0.5);
+ --shadow-soft: 0 12px 35px rgba(6, 8, 18, 0.6);
+ --text: #e0e4ea;
+ --text-dim: #9ca3af;
+ --accent: #4fc3f7;
+ --accent-gradient: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
+ --success: #10b981;
+ --error: #ef4444;
+ --warning: #f59e0b;
+ --card-gradient: linear-gradient(135deg, rgba(26, 31, 53, 0.8) 0%, #141824 100%);
+ --radius-lg: 16px;
+ --radius-md: 12px;
+ --radius-sm: 8px;
+ --spacing-sm: 0.65rem;
+ --spacing-md: 1.25rem;
+ --spacing-lg: 1.75rem;
+ --font-main: 'Inter', 'Segoe UI', system-ui, sans-serif;
+ --font-mono: 'JetBrains Mono', Consolas, monospace;
+}
+
+body {
+ background-color: var(--bg-dark);
+ color: var(--text);
+ font-family: var(--font-main);
+ line-height: 1.5;
+}
+
+a {
+ color: var(--accent);
+}
+
+.panel,
+.stat-card,
+.large-stat-card {
+ background: var(--card-gradient);
+ border: 1px solid var(--border);
+ border-radius: var(--radius-lg);
+ padding: var(--spacing-md);
+ position: relative;
+ overflow: hidden;
+ transition: transform 0.3s ease, border-color 0.3s ease, box-shadow 0.3s ease;
+ box-shadow: var(--shadow-soft);
+}
+
+.panel:hover,
+.stat-card:hover,
+.large-stat-card:hover {
+ border-color: var(--border-hover);
+ transform: translateY(-4px);
+}
+
+.panel::before,
+.stat-card::before,
+.large-stat-card::before {
+ content: '';
+ position: absolute;
+ top: 0;
+ right: 0;
+ width: 140px;
+ height: 140px;
+ background: radial-gradient(circle, rgba(102, 126, 234, 0.15) 0%, transparent 80%);
+ pointer-events: none;
+}
+
+.system-grid,
+.mqtt-overview-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(280px, 1fr));
+ gap: var(--spacing-lg);
+}
+
+.info-grid {
+ display: grid;
+ grid-template-columns: repeat(2, minmax(0, 1fr));
+ gap: 1rem;
+ margin-top: 1rem;
+}
+
+.card-header {
+ display: flex;
+ align-items: center;
+ justify-content: space-between;
+ margin-bottom: 1rem;
+}
+
+.eyebrow {
+ font-size: 0.75rem;
+ letter-spacing: 0.18em;
+ text-transform: uppercase;
+ color: var(--text-dim);
+ font-weight: 600;
+}
+
+.info-label {
+ font-size: 0.75rem;
+ color: var(--text-dim);
+ text-transform: uppercase;
+}
+
+.info-value {
+ font-size: 0.95rem;
+ font-weight: 600;
+ color: var(--text);
+ font-family: var(--font-mono);
+}
+
+.status-badge {
+ display: inline-flex;
+ align-items: center;
+ gap: 0.35rem;
+ padding: 0.35rem 0.9rem;
+ border-radius: var(--radius-sm);
+ font-size: 0.75rem;
+ font-weight: 700;
+ text-transform: uppercase;
+ letter-spacing: 0.08em;
+}
+
+.status-badge.status-ok {
+ background: rgba(16, 185, 129, 0.12);
+ color: var(--success);
+}
+
+.status-badge.status-error {
+ background: rgba(239, 68, 68, 0.12);
+ color: var(--error);
+}
+
+.status-badge.status-warn {
+ background: rgba(245, 158, 11, 0.12);
+ color: var(--warning);
+}
+
+.pro-badge {
+ padding: 0.35rem 0.9rem;
+ border-radius: var(--radius-sm);
+ font-size: 0.75rem;
+ font-weight: 700;
+ text-transform: uppercase;
+ letter-spacing: 0.08em;
+ background: linear-gradient(135deg, #10b981 0%, #059669 100%);
+ color: white;
+ box-shadow: 0 3px 12px rgba(16, 185, 129, 0.4);
+}
+
+.btn {
+ border: 1px solid var(--border);
+ border-radius: var(--radius-md);
+ padding: 0.55rem 1.25rem;
+ font-weight: 600;
+ background: rgba(255, 255, 255, 0.04);
+ color: var(--text);
+ transition: background 0.3s ease, transform 0.3s ease, border-color 0.3s ease;
+}
+
+.btn:hover {
+ border-color: var(--border-hover);
+ background: rgba(255, 255, 255, 0.08);
+ transform: translateY(-2px);
+}
+
+.btn-primary {
+ background: var(--accent-gradient);
+ border-color: transparent;
+ color: white;
+ box-shadow: 0 12px 24px rgba(102, 126, 234, 0.3);
+}
+
+.btn-secondary {
+ background: rgba(255, 255, 255, 0.05);
+}
+
+.form-input,
+input[type='text'],
+input[type='number'],
+select {
+ background: rgba(255, 255, 255, 0.02);
+ border: 1px solid var(--border);
+ border-radius: var(--radius-sm);
+ color: var(--text);
+ padding: 0.55rem 0.9rem;
+ font-family: var(--font-mono);
+ transition: border 0.3s ease, box-shadow 0.3s ease;
+}
+
+.form-input:focus,
+input[type='text']:focus,
+input[type='number']:focus,
+select:focus {
+ border-color: var(--border-hover);
+ box-shadow: 0 0 0 3px rgba(102, 126, 234, 0.2);
+ outline: none;
+}
+
+.large-stat-card .large-stat-value {
+ font-size: 2.2rem;
+ font-family: var(--font-mono);
+ margin-top: 0.35rem;
+}
+
+.stat-card .large-stat-value {
+ font-size: 1.6rem;
+ font-family: var(--font-mono);
+}
+
+.system-grid .panel {
+ min-height: 180px;
+}
+
+.panel h1,
+.panel h2,
+.panel h3,
+.panel p {
+ margin: 0;
+}
+
+.system-grid .panel .subtitle {
+ color: var(--text-dim);
+}
diff --git a/app/static/css/debug_tabs.css b/app/static/css/debug_tabs.css
new file mode 100644
index 0000000..0c9cf9d
--- /dev/null
+++ b/app/static/css/debug_tabs.css
@@ -0,0 +1,23 @@
+.debug-tabs {
+ display: flex;
+ gap: 12px;
+ margin: 20px 0;
+ padding-bottom: 6px;
+ border-bottom: 1px solid rgba(255,255,255,0.06);
+}
+.debug-tab {
+ padding: 8px 16px;
+ background: rgba(255,255,255,0.05);
+ color: rgba(255,255,255,0.75);
+ border-radius: 6px;
+ cursor: pointer;
+ transition: all 0.15s ease;
+}
+.debug-tab:hover {
+ background: rgba(255,255,255,0.12);
+ color: white;
+}
+.debug-tab.active {
+ background: linear-gradient(135deg, #2d72d9, #4698f7);
+ color: white;
+}
diff --git a/app/static/css/log_viewer.css b/app/static/css/log_viewer.css
new file mode 100644
index 0000000..f33ec21
--- /dev/null
+++ b/app/static/css/log_viewer.css
@@ -0,0 +1,63 @@
+/* Minimal CSS for new Log Viewer */
+.log-entries { display:flex; flex-direction:column; gap:8px; }
+.log-line { border:1px solid rgba(255,255,255,0.08); border-radius:10px; padding:10px 12px; background:rgba(0,0,0,0.18); }
+.log-line { cursor: pointer; }
+.log-summary { display:flex; align-items:center; gap:10px; }
+.log-toggle-icon { margin-left:auto; opacity:0.6; font-size:12px; }
+.log-line.expanded .log-toggle-icon { opacity:0.75; }
+.log-level { min-width:76px; font-weight:700; opacity:0.9; }
+.log-timestamp { font-size:12px; opacity:0.55; margin-right:8px; white-space:nowrap; }
+.log-message { flex:1; opacity:0.95; }
+.log-message { line-height:1.4; }
+.log-toggle { border:1px solid rgba(255,255,255,0.12); background:rgba(255,255,255,0.06); color:inherit; border-radius:8px; padding:6px 10px; cursor:pointer; }
+.log-stacktrace { display:none; margin-top:10px; white-space:pre-wrap; font-family:ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; font-size:12px; opacity:0.85; }
+.log-line.expanded .log-stacktrace { display:block; }
+.log-info .log-level { opacity:0.85; }
+.log-debug { opacity:0.75; }
+.log-warning { border-color: rgba(255,200,0,0.25); }
+.log-error { border-color: rgba(255,80,80,0.30); }
+/* Toolbar styles */
+.log-toolbar {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ gap: 12px;
+ padding: 10px 12px;
+ margin-bottom: 12px;
+ background: rgba(0,0,0,0.25);
+ border: 1px solid rgba(255,255,255,0.06);
+ border-radius: 12px;
+}
+
+.log-toolbar-left,
+.log-toolbar-right {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+}
+
+.log-btn {
+ background: rgba(255,255,255,0.06);
+ border: 1px solid rgba(255,255,255,0.12);
+ color: #e6e6e6;
+ padding: 6px 12px;
+ border-radius: 8px;
+ cursor: pointer;
+}
+
+.log-btn:hover {
+ background: rgba(255,255,255,0.10);
+}
+
+.log-select,
+.log-search {
+ background: rgba(0,0,0,0.35);
+ border: 1px solid rgba(255,255,255,0.12);
+ color: #e6e6e6;
+ padding: 6px 10px;
+ border-radius: 8px;
+}
+
+.log-search {
+ min-width: 180px;
+}
diff --git a/app/static/debug.css b/app/static/debug.css
new file mode 100644
index 0000000..5be3806
--- /dev/null
+++ b/app/static/debug.css
@@ -0,0 +1,2248 @@
+/* MQTT Status-Badge Farblogik */
+.status-badge.status-ok {
+ background: rgba(46,204,113,0.2);
+ color: #8ef0b5;
+}
+.status-badge.status-error {
+ background: rgba(231,76,60,0.2);
+ color: #ff9a8a;
+}
+.status-badge.status-warn {
+ background: rgba(241,196,15,0.18);
+ color: #ffd666;
+}
+.status-badge.status-idle {
+ background: rgba(255,255,255,0.08);
+ color: rgba(255,255,255,0.7);
+}
+
+.status-dot.dot-ok {
+ background: #28a745;
+}
+.status-dot.dot-error {
+ background: #dc3545;
+}
+.status-dot.dot-warn {
+ background: #ffc107;
+}
+.status-dot.dot-idle {
+ background: #888;
+}
+/* MQTT Connection Card – Modernes Design */
+.mqtt-connection-card {
+ border: 2px solid var(--accent);
+ box-shadow: 0 4px 24px 0 rgba(79,195,247,0.07);
+ border-radius: 13px;
+ background: var(--bg-card);
+ padding: 18px 16px 12px 16px;
+ margin-bottom: 22px;
+ max-width: 440px;
+}
+.mqtt-conn-header {
+ margin-bottom: 24px;
+}
+.mqtt-conn-title {
+ font-size: 1.25rem;
+ font-weight: 700;
+ color: var(--accent);
+ margin-bottom: 2px;
+}
+.mqtt-conn-desc {
+ font-size: 0.95rem;
+ color: var(--text-dim);
+ margin-bottom: 0;
+}
+.status-indicator {
+ margin-bottom: 16px;
+ background: #181c20;
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ padding: 8px 12px;
+ font-size: 1rem;
+}
+.status-dot {
+ width: 12px;
+ height: 12px;
+ margin-right: 8px;
+}
+.status-text {
+ font-size: 1rem;
+ font-weight: 700;
+ color: #fff;
+}
+.status-detail {
+ font-size: 0.85rem;
+ color: var(--text-dim);
+}
+.printer-select {
+ margin-bottom: 8px;
+}
+.printer-chip {
+ font-size: 0.95rem;
+ font-weight: 700;
+ border-radius: 999px;
+ padding: 7px 14px;
+ border: 1.5px solid var(--border);
+ background: rgba(255,255,255,0.04);
+ color: var(--text);
+ margin-bottom: 4px;
+ transition: all 0.2s;
+}
+.printer-chip.active {
+ background: var(--accent);
+ color: #fff;
+ border-color: var(--accent);
+ box-shadow: 0 2px 8px 0 rgba(79,195,247,0.12);
+}
+.printer-chip:hover {
+ border-color: var(--accent);
+ background: rgba(79,195,247,0.18);
+ color: var(--accent);
+}
+.form-label {
+ font-size: 0.85rem;
+ font-weight: 700;
+ color: var(--text-dim);
+ margin-bottom: 4px;
+ text-transform: uppercase;
+ letter-spacing: 0.04em;
+}
+.form-input {
+ font-size: 1rem;
+ font-family: 'Consolas', monospace;
+ background: #181c20;
+ color: #fff;
+ border: 1.2px solid var(--border);
+ border-radius: 7px;
+ padding: 8px 10px;
+ margin-bottom: 0;
+ transition: border 0.2s;
+}
+.form-input:focus {
+ border-color: var(--accent);
+ outline: none;
+}
+.form-row {
+ display: grid;
+ grid-template-columns: 1fr 1fr;
+ gap: 10px;
+ margin-bottom: 0;
+}
+.action-buttons {
+ display: flex;
+ gap: 10px;
+ margin-top: 16px;
+}
+/* Kompaktere MQTT-Buttons */
+.btn-connect {
+ background: linear-gradient(135deg, #66bb6a, #43a047);
+ color: #fff;
+ font-size: 0.95rem;
+ font-weight: 700;
+ border-radius: 7px;
+ border: none;
+ padding: 7px 0 7px 0;
+ min-width: 90px;
+ flex: 1;
+ transition: box-shadow 0.2s, transform 0.2s;
+}
+.btn-connect:hover {
+ box-shadow: 0 8px 24px rgba(102, 187, 106, 0.18);
+ transform: translateY(-2px);
+}
+/* Kompaktere MQTT-Buttons */
+.btn-disconnect {
+ background: linear-gradient(135deg, #ef5350, #d32f2f);
+ color: #fff;
+ font-size: 0.95rem;
+ font-weight: 700;
+ border-radius: 7px;
+ border: none;
+ padding: 7px 0 7px 0;
+ min-width: 90px;
+ flex: 1;
+ transition: box-shadow 0.2s, transform 0.2s;
+}
+.btn-disconnect:hover {
+ box-shadow: 0 8px 24px rgba(239, 83, 80, 0.18);
+ transform: translateY(-2px);
+}
+/* Kompaktere MQTT-Buttons */
+.btn-secondary {
+ background: rgba(255,255,255,0.08);
+ color: #fff;
+ font-size: 0.95rem;
+ font-weight: 700;
+ border-radius: 7px;
+ border: none;
+ padding: 7px 0 7px 0;
+ min-width: 90px;
+ flex: 1;
+ transition: box-shadow 0.2s, transform 0.2s;
+}
+.btn-secondary:hover {
+ background: var(--accent);
+ color: #fff;
+}
+.info-box {
+ background: rgba(79, 195, 247, 0.13);
+ border-left: 3px solid var(--accent);
+ padding: 10px 12px;
+ border-radius: 7px;
+ font-size: 0.97rem;
+ color: #b3e5fc;
+ margin-top: 12px;
+ display: flex;
+ align-items: flex-start;
+ gap: 7px;
+}
+.info-box strong {
+ color: #fff;
+}
+@media (max-width: 700px) {
+ .mqtt-connection-card {
+ padding: 8px 2px 6px 2px;
+ max-width: 100%;
+ }
+ .form-row {
+ grid-template-columns: 1fr;
+ gap: 6px;
+ }
+ .action-buttons {
+ flex-direction: column;
+ gap: 6px;
+ }
+}
+/* MQTT Connection Card Styles */
+.status-indicator {
+ display: flex;
+ align-items: center;
+ gap: 10px;
+ padding: 12px 16px;
+ background: rgba(0,0,0,0.3);
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ margin-bottom: 20px;
+}
+
+.status-dot {
+ width: 12px;
+ height: 12px;
+ border-radius: 50%;
+ background: var(--text-dim);
+ animation: pulse 2s infinite;
+}
+.status-dot.connected { background: var(--success); }
+.status-dot.disconnected { background: var(--error); animation: none; }
+
+.status-text {
+ font-size: 14px;
+ font-weight: 600;
+}
+
+.status-detail {
+ font-size: 12px;
+ color: var(--text-dim);
+ font-family: 'Consolas', monospace;
+}
+
+.form-group {
+ margin-bottom: 16px;
+}
+
+.form-label {
+ display: block;
+ font-size: 11px;
+ font-weight: 600;
+ color: var(--text-dim);
+ margin-bottom: 8px;
+ text-transform: uppercase;
+ letter-spacing: 0.05em;
+}
+
+.form-input {
+ width: 100%;
+ padding: 10px 14px;
+ background: rgba(0,0,0,0.3);
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ color: var(--text);
+ font-size: 14px;
+ font-family: 'Consolas', monospace;
+}
+.form-input:focus {
+ outline: none;
+ border-color: var(--accent);
+ box-shadow: 0 0 0 3px rgba(79, 195, 247, 0.1);
+}
+
+.form-row {
+ display: grid;
+ grid-template-columns: 1fr 1fr;
+ gap: 16px;
+}
+
+.printer-select {
+ display: flex;
+ gap: 10px;
+ flex-wrap: wrap;
+}
+
+.printer-chip {
+ padding: 8px 16px;
+ background: rgba(255,255,255,0.05);
+ border: 1px solid var(--border);
+ border-radius: 20px;
+ cursor: pointer;
+ transition: all 0.2s ease;
+ font-size: 14px;
+ font-weight: 600;
+}
+.printer-chip:hover {
+ background: rgba(79, 195, 247, 0.15);
+ border-color: var(--accent);
+}
+.printer-chip.active {
+ background: rgba(79, 195, 247, 0.25);
+ border-color: var(--accent);
+ color: var(--accent);
+}
+.printer-chip-loading {
+ padding: 8px 16px;
+ color: var(--text-dim);
+ font-style: italic;
+}
+
+.action-buttons {
+ display: flex;
+ gap: 12px;
+ margin-top: 20px;
+}
+
+.btn-connect {
+ flex: 1;
+ background: linear-gradient(135deg, #66bb6a, #43a047);
+}
+.btn-connect:hover {
+ transform: translateY(-2px);
+ box-shadow: 0 8px 16px rgba(102, 187, 106, 0.3);
+}
+.btn-disconnect {
+ flex: 1;
+ background: linear-gradient(135deg, #ef5350, #d32f2f);
+}
+.btn-disconnect:hover {
+ transform: translateY(-2px);
+ box-shadow: 0 8px 16px rgba(239, 83, 80, 0.3);
+}
+
+.info-box {
+ background: rgba(79, 195, 247, 0.1);
+ border-left: 3px solid var(--accent);
+ padding: 12px 16px;
+ border-radius: 6px;
+ font-size: 13px;
+ line-height: 1.6;
+ color: var(--text-dim);
+}
+.info-box strong {
+ color: var(--text);
+}
+
+.btn-loading {
+ position: relative;
+ pointer-events: none;
+}
+.btn-loading::after {
+ content: "";
+ position: absolute;
+ right: 16px;
+ top: 50%;
+ width: 16px;
+ height: 16px;
+ margin-top: -8px;
+ border: 2px solid rgba(255,255,255,0.3);
+ border-top-color: white;
+ border-radius: 50%;
+ animation: spin 0.6s linear infinite;
+}
+@keyframes spin {
+ to { transform: rotate(360deg); }
+}
+
+@media (max-width: 640px) {
+ .form-row {
+ grid-template-columns: 1fr;
+ }
+ .action-buttons {
+ flex-direction: column;
+ }
+}
+/* Printer Status Farben */
+.status-online {
+ color: #66bb6a !important;
+ font-weight: bold;
+}
+.status-offline {
+ color: #ef5350 !important;
+ font-weight: bold;
+}
+/* FilamentHub Debug Center Styling */
+
+:root {
+ --bg-dark: #0a0a0a;
+ --bg-card: #1a1a1a;
+ --bg-card-hover: #222;
+ --border: #2a2a2a;
+ --text: #f0f0f0;
+ --text-dim: #888;
+ --accent: #4fc3f7;
+ --accent-hover: #29b6f6;
+ --success: #66bb6a;
+ --warning: #ffa726;
+ --error: #ef5350;
+ --gradient: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
+}
+
+* {
+ margin: 0;
+ padding: 0;
+ box-sizing: border-box;
+}
+
+body {
+ background: var(--bg-dark);
+ color: var(--text);
+ font-family: 'Segoe UI', system-ui, -apple-system, sans-serif;
+ line-height: 1.6;
+ min-height: 100vh;
+}
+
+.container {
+ max-width: 1400px;
+ margin: 0 auto;
+ padding: 20px;
+}
+
+/* HEADER */
+.header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 20px 0;
+ margin-bottom: 30px;
+ border-bottom: 2px solid var(--border);
+}
+
+.header h1 {
+ font-size: 2rem;
+ background: var(--gradient);
+ -webkit-background-clip: text;
+ -webkit-text-fill-color: transparent;
+ background-clip: text;
+}
+
+.server-status {
+ display: flex;
+ align-items: center;
+ gap: 10px;
+ font-size: 1.1rem;
+ font-weight: 500;
+}
+
+.status-dot {
+ width: 12px;
+ height: 12px;
+ border-radius: 50%;
+ background: var(--success);
+ animation: pulse 2s infinite;
+}
+
+@keyframes pulse {
+ 0%, 100% { opacity: 1; }
+ 50% { opacity: 0.5; }
+}
+
+/* TABS */
+.tabs {
+ display: flex;
+ flex-wrap: wrap;
+ gap: 10px;
+ margin-bottom: 30px;
+ overflow-x: unset;
+ justify-content: flex-start;
+}
+
+.tab {
+ padding: 10px 16px;
+ background: var(--bg-card);
+ border: 1px solid var(--border);
+ color: var(--text-dim);
+ cursor: pointer;
+ border-radius: 8px;
+ font-size: 0.9rem;
+ transition: all 0.3s ease;
+ white-space: nowrap;
+}
+
+.tab:hover {
+ background: var(--bg-card-hover);
+ color: var(--text);
+}
+
+.tab.active {
+ background: var(--accent);
+ color: white;
+ border-color: var(--accent);
+}
+
+/* TAB CONTENT */
+.tab-content {
+ display: none;
+}
+
+.tab-content.active {
+ display: block;
+ animation: fadeIn 0.3s ease;
+}
+
+@keyframes fadeIn {
+ from { opacity: 0; transform: translateY(10px); }
+ to { opacity: 1; transform: translateY(0); }
+}
+
+/* GRID */
+.grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
+ gap: 20px;
+ margin-bottom: 20px;
+}
+
+.grid-2col {
+ grid-template-columns: repeat(2, 1fr);
+}
+
+.grid-3col {
+ grid-template-columns: repeat(3, 1fr);
+}
+
+/* CONFIG LAYOUT (2-Column) */
+.config-layout {
+ display: grid;
+ grid-template-columns: 400px 1fr;
+ gap: 20px;
+}
+
+.config-left {
+ min-height: 600px;
+}
+
+.config-right {
+ min-height: 600px;
+ display: flex;
+ flex-direction: column;
+}
+
+.config-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ margin-bottom: 16px;
+}
+
+.config-header h3 {
+ margin: 0;
+}
+
+@media (max-width: 1024px) {
+ .config-layout {
+ grid-template-columns: 1fr;
+ }
+
+ .grid-3col {
+ grid-template-columns: 1fr;
+ }
+
+ .grid-2col {
+ grid-template-columns: 1fr;
+ }
+}
+
+/* CARDS */
+.card {
+ background: var(--bg-card);
+ border: 1px solid var(--border);
+ border-radius: 12px;
+ padding: 24px;
+ transition: all 0.3s ease;
+}
+
+.card:hover {
+ border-color: var(--accent);
+ box-shadow: 0 4px 20px rgba(79, 195, 247, 0.1);
+}
+
+.card.full-width {
+ grid-column: 1 / -1;
+}
+
+.card h3 {
+ margin-bottom: 16px;
+ font-size: 1.2rem;
+ color: var(--accent);
+}
+
+/* INFO GROUPS */
+.info-group {
+ display: flex;
+ flex-direction: column;
+ gap: 12px;
+}
+
+.info-item {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 8px 0;
+ border-bottom: 1px solid var(--border);
+}
+
+.info-item:last-child {
+ border-bottom: none;
+}
+
+.label {
+ color: var(--text-dim);
+ font-weight: 500;
+}
+
+.value {
+ color: var(--text);
+ font-weight: 600;
+}
+
+.badge {
+ padding: 4px 12px;
+ border-radius: 12px;
+ font-size: 0.85rem;
+ text-transform: uppercase;
+ background: var(--accent);
+ color: white;
+}
+.badge.mode-bambu {
+ background: #4fc3f7;
+}
+.badge.mode-klipper {
+ background: #66bb6a;
+}
+.badge.mode-dual {
+ background: #ab47bc;
+}
+
+.debug-mode-toggle {
+ display: flex;
+ align-items: center;
+ gap: 10px;
+ margin: 6px 0 12px 0;
+}
+.debug-mode-pill {
+ display: inline-flex;
+ border: 1px solid rgba(255,255,255,0.18);
+ border-radius: 999px;
+ overflow: hidden;
+ background: rgba(255,255,255,0.04);
+}
+.debug-mode-btn {
+ padding: 6px 12px;
+ background: transparent;
+ color: #dfe6ee;
+ border: none;
+ cursor: pointer;
+ font-weight: 700;
+ letter-spacing: 0.01em;
+ transition: background 0.15s ease, color 0.15s ease;
+}
+.debug-mode-btn + .debug-mode-btn {
+ border-left: 1px solid rgba(255,255,255,0.12);
+}
+.debug-mode-btn.active {
+ background: linear-gradient(135deg, #2e86de, #1b4f72);
+ color: #fff;
+ box-shadow: inset 0 0 0 1px rgba(255,255,255,0.12);
+}
+.debug-mode-btn:focus {
+ outline: 1px solid rgba(255,255,255,0.35);
+ outline-offset: -2px;
+}
+.debug-mode-label {
+ padding: 6px 10px;
+ border-radius: 10px;
+ background: rgba(255,255,255,0.06);
+ border: 1px solid rgba(255,255,255,0.1);
+ color: #b9c3d4;
+ font-weight: 700;
+ font-size: 0.95rem;
+}
+.mode-lite {
+ background: rgba(255,255,255,0.08);
+ color: #cbd5e1;
+}
+.mode-pro {
+ background: rgba(34,197,94,0.15);
+ color: #22c55e;
+ border: 1px solid rgba(34,197,94,0.35);
+}
+
+/* Data-Mode System (neue Implementierung) */
+body:not([data-mode="pro"]) [data-mode="pro"] {
+ display: none !important;
+}
+
+.debug-panel {
+ display: none;
+}
+.debug-panel.active-panel {
+ display: block;
+}
+.debug-panel:not(.active-panel) {
+ display: none !important;
+}
+.health-details {
+ margin-top: 10px;
+ border-top: 1px solid rgba(255,255,255,0.08);
+ padding-top: 10px;
+}
+.health-row {
+ display: flex;
+ justify-content: space-between;
+ font-size: 13px;
+ margin-bottom: 4px;
+}
+.health-ok { color: #4ade80; }
+.health-warn { color: #facc15; }
+.health-bad { color: #f87171; }
+.health-reasons {
+ margin-top: 10px;
+ padding-left: 18px;
+ color: #cdd6e0;
+ font-size: 13px;
+ line-height: 1.4;
+}
+.health-reasons li {
+ margin-bottom: 4px;
+ list-style: disc;
+ opacity: 0.9;
+}
+
+.pro-coming {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
+ gap: 10px;
+ margin-top: 10px;
+}
+.pro-coming .debug-card {
+ opacity: 0.6;
+ font-size: 0.95rem;
+}
+.scanner-pro-card {
+ position: relative;
+}
+.scanner-pro-head {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+ margin-bottom: 6px;
+}
+.pro-icon {
+ width: 20px;
+ display: inline-flex;
+ justify-content: center;
+ align-items: center;
+ color: #9ec5ff;
+ opacity: 0.9;
+}
+.pro-badge {
+ background: rgba(46, 204, 113, 0.18);
+ color: #9ef0c1;
+ border-radius: 8px;
+ padding: 2px 8px;
+ font-weight: 700;
+ font-size: 0.8rem;
+ margin-left: auto;
+}
+.pro-disabled-card {
+ background: rgba(255,255,255,0.03);
+ border-color: rgba(255,255,255,0.08);
+ opacity: 0.9;
+ cursor: default;
+}
+.pro-disabled-card:hover {
+ border-color: rgba(255,255,255,0.08);
+ box-shadow: none;
+}
+.pro-btn-disabled {
+ background: rgba(255,255,255,0.08);
+ color: #cbd5e1;
+ border: 1px solid rgba(255,255,255,0.12);
+ cursor: not-allowed;
+}
+.pro-btn-disabled:hover {
+ background: rgba(255,255,255,0.08);
+ color: #cbd5e1;
+}
+.pro-hint {
+ margin-top: 6px;
+ font-size: 0.85rem;
+ color: #cbd5e1;
+ opacity: 0.85;
+}
+.pro-only-inline {
+ display: none;
+}
+body.pro-mode .pro-only-inline {
+ display: inline-flex;
+}
+
+/* Scanner Lite UI polish */
+.scanner-info {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(220px, 1fr));
+ gap: 6px;
+ margin: 6px 0 10px 0;
+}
+.scanner-info-row {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 6px 10px;
+ background: rgba(255,255,255,0.03);
+ border: 1px solid rgba(255,255,255,0.08);
+ border-radius: 8px;
+}
+.scanner-info-label {
+ color: var(--text-dim, #a7b2c3);
+}
+.scanner-info-value {
+ font-weight: 700;
+}
+.scanner-actions {
+ display: flex;
+ gap: 8px;
+ align-items: center;
+ margin: 10px 0;
+}
+.scanner-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(260px, 1fr));
+ gap: 12px;
+}
+.scanner-card {
+ display: grid;
+ grid-template-columns: 56px 1fr auto;
+ align-items: center;
+ gap: 14px;
+ padding: 14px 16px;
+ background: rgba(255,255,255,0.03);
+ border: 1px solid rgba(255,255,255,0.08);
+ border-radius: 14px;
+ transition: border-color 0.15s ease, box-shadow 0.15s ease;
+}
+.scanner-card:hover {
+ border-color: rgba(46,134,222,0.6);
+ box-shadow: 0 12px 24px rgba(0,0,0,0.25);
+}
+.sc-left {
+ display: flex;
+ align-items: center;
+ justify-content: center;
+}
+.sc-avatar {
+ width: 44px;
+ height: 44px;
+ border-radius: 12px;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ background: rgba(255,255,255,0.05);
+ border: 1px solid rgba(255,255,255,0.06);
+ font-size: 18px;
+}
+.sc-main {
+ display: flex;
+ flex-direction: column;
+ gap: 4px;
+}
+.sc-title {
+ font-weight: 700;
+ font-size: 16px;
+}
+.sc-meta {
+ opacity: 0.75;
+ font-size: 13px;
+}
+.sc-right {
+ display: flex;
+ flex-direction: column;
+ align-items: flex-end;
+ gap: 10px;
+}
+.sc-actions {
+ display: flex;
+ gap: 10px;
+}
+.sc-add.disabled {
+ opacity: 0.45;
+ pointer-events: none;
+}
+.sc-sub {
+ opacity: 0.85;
+ font-size: 13px;
+ display: flex;
+ gap: 8px;
+}
+.sc-port-result {
+ color: #fff;
+}
+.scanner-header-card {
+ background: rgba(255,255,255,0.03);
+ border: 1px solid rgba(255,255,255,0.08);
+ border-radius: 12px;
+ padding: 12px;
+ margin-bottom: 12px;
+}
+.scanner-header-top {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ gap: 12px;
+ flex-wrap: wrap;
+}
+.scanner-hostline {
+ display: flex;
+ align-items: center;
+ gap: 6px;
+ font-weight: 700;
+ margin: 4px 0;
+}
+.scanner-range {
+ color: var(--text-dim, #a7b2c3);
+ font-size: 0.95rem;
+}
+.dot-sep {
+ color: var(--text-dim, #a7b2c3);
+}
+.badge {
+ padding: 4px 10px;
+ border-radius: 8px;
+ font-weight: 700;
+ border: 1px solid rgba(255,255,255,0.12);
+ transition: background-color 0.2s ease, color 0.2s ease, border-color 0.2s ease;
+ text-transform: uppercase;
+ letter-spacing: 0.02em;
+}
+.badge-ok { background: rgba(46,204,113,0.18); color: #9de8ba; border-color: rgba(46,204,113,0.6); }
+.badge-error { background: rgba(231,76,60,0.2); color: #ff9a8a; border-color: rgba(231,76,60,0.6); }
+.badge-warn { background: rgba(241,196,15,0.18); color: #ffd666; border-color: rgba(241,196,15,0.6); }
+.badge-idle { background: rgba(255,255,255,0.08); color: rgba(255,255,255,0.75); border-color: rgba(255,255,255,0.15); }
+.badge-testing { background: rgba(46,134,222,0.25); color: #b6d8ff; border-color: rgba(46,134,222,0.6); animation: scPulse 0.9s ease-in-out infinite; }
+.badge-failed { background: rgba(231,76,60,0.2); color: #ff9a8a; border-color: rgba(231,76,60,0.6); }
+
+.btn {
+ padding: 8px 12px;
+ border-radius: 10px;
+ border: 1px solid rgba(255,255,255,0.15);
+ background: rgba(255,255,255,0.04);
+ color: #fff;
+ cursor: pointer;
+ transition: transform 0.1s ease, box-shadow 0.1s ease, border-color 0.15s ease, background-color 0.15s ease;
+ height: 36px;
+ display: inline-flex;
+ align-items: center;
+ gap: 6px;
+}
+.btn:hover:not(:disabled) {
+ transform: translateY(-1px);
+ border-color: rgba(46,134,222,0.6);
+ box-shadow: 0 10px 20px rgba(0,0,0,0.25);
+}
+.btn:disabled {
+ opacity: 0.4;
+ cursor: not-allowed;
+ box-shadow: none;
+ transform: none;
+}
+.btn-primary {
+ background: linear-gradient(135deg, #2e86de, #1b4f72);
+ border-color: #2e86de;
+}
+.btn-secondary {
+ background: rgba(255,255,255,0.05);
+}
+.badge.mode-standalone {
+ background: #ffa726;
+}
+.btn-add-disabled {
+ background: rgba(255,255,255,0.05);
+ border-color: rgba(255,255,255,0.12);
+}
+.btn-add-active {
+ background: linear-gradient(135deg, #2e86de, #1b4f72);
+ border-color: #2e86de;
+ box-shadow: 0 8px 16px rgba(46,134,222,0.25);
+}
+
+/* Quick Scan Loading */
+.btn-loading {
+ position: relative;
+ pointer-events: none;
+ padding-right: 36px; /* mehr Platz für Spinner */
+ color: #dfe8f3 !important;
+}
+.btn-loading::after {
+ content: "";
+ position: absolute;
+ right: 12px;
+ top: 50%;
+ width: 16px;
+ height: 16px;
+ margin-top: -8px;
+ border: 2px solid rgba(255,255,255,0.55);
+ border-top-color: #fff;
+ border-radius: 50%;
+ animation: btnSpin 0.7s linear infinite;
+ filter: drop-shadow(0 0 4px rgba(255,255,255,0.55));
+}
+
+@keyframes btnSpin {
+ from { transform: rotate(0deg); }
+ to { transform: rotate(360deg); }
+}
+
+@keyframes pulse {
+ 0% { opacity: 0.6; }
+ 50% { opacity: 1; }
+ 100% { opacity: 0.6; }
+}
+
+@keyframes scPulse {
+ 0% { opacity: 0.55; transform: translateY(0); }
+ 50% { opacity: 1; transform: translateY(-1px); }
+ 100% { opacity: 0.55; transform: translateY(0); }
+}
+
+/* PROGRESS BARS */
+.resource-item {
+ margin-bottom: 20px;
+}
+
+.resource-header {
+ display: flex;
+ justify-content: space-between;
+ margin-bottom: 8px;
+ font-weight: 600;
+}
+
+.progress-bar {
+ height: 8px;
+ background: var(--border);
+ border-radius: 4px;
+ overflow: hidden;
+ margin-bottom: 4px;
+}
+
+.progress-fill {
+ height: 100%;
+ background: var(--gradient);
+ transition: width 0.5s ease;
+ border-radius: 4px;
+}
+
+small {
+ color: var(--text-dim);
+ font-size: 0.85rem;
+}
+
+/* BUTTONS */
+.btn {
+ padding: 10px 20px;
+ border: none;
+ border-radius: 8px;
+ cursor: pointer;
+ font-size: 0.95rem;
+ font-weight: 600;
+ transition: all 0.3s ease;
+ margin-top: 10px;
+}
+
+.btn-small {
+ padding: 6px 12px;
+ font-size: 0.85rem;
+ margin-top: 0;
+}
+
+.btn-primary {
+ background: var(--accent);
+ color: white;
+}
+
+.btn-primary:hover {
+ background: var(--accent-hover);
+ transform: translateY(-2px);
+}
+
+.btn-secondary {
+ background: var(--bg-card-hover);
+ color: var(--text);
+ border: 1px solid var(--border);
+}
+
+.btn-secondary:hover {
+ background: var(--border);
+}
+
+.btn-success {
+ background: var(--success);
+ color: white;
+}
+
+.btn-success:hover {
+ background: #4caf50;
+ transform: translateY(-2px);
+}
+
+.btn-success:disabled {
+ background: var(--border);
+ color: var(--text-dim);
+ cursor: not-allowed;
+ transform: none;
+}
+
+.btn-error {
+ background: var(--error);
+ color: white;
+}
+
+.btn-error:hover {
+ background: #e74c3c;
+ transform: translateY(-2px);
+}
+
+.btn-error:disabled {
+ background: var(--border);
+ color: var(--text-dim);
+ cursor: not-allowed;
+ transform: none;
+}
+
+.btn-warning {
+ background: var(--accent);
+ color: white;
+}
+
+.btn-warning:hover {
+ background: #f39c12;
+ transform: translateY(-2px);
+}
+
+.btn-warning:disabled {
+ background: var(--border);
+ color: var(--text-dim);
+ cursor: not-allowed;
+ transform: none;
+}
+
+.btn:disabled:hover {
+ transform: none;
+}
+
+/* TOGGLE SWITCHES */
+.toggle-item {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 12px 0;
+ border-bottom: 1px solid var(--border);
+}
+
+.toggle-switch {
+ position: relative;
+ width: 50px;
+ height: 26px;
+ background: var(--border);
+ border-radius: 13px;
+ cursor: pointer;
+ transition: background 0.3s ease;
+}
+
+.toggle-switch.active {
+ background: var(--success);
+}
+
+.toggle-switch::after {
+ content: '';
+ position: absolute;
+ top: 3px;
+ left: 3px;
+ width: 20px;
+ height: 20px;
+ background: white;
+ border-radius: 50%;
+ transition: transform 0.3s ease;
+}
+
+.toggle-switch.active::after {
+ transform: translateX(24px);
+}
+
+/* CONFIG EDITOR */
+.config-editor {
+ flex: 1;
+ background: #0d0d0d;
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ padding: 16px;
+ font-family: 'Consolas', 'Monaco', monospace;
+ font-size: 0.9rem;
+ line-height: 1.5;
+ color: var(--text);
+ resize: none;
+ min-height: 450px;
+ transition: border-color 0.3s ease;
+}
+
+.config-editor:focus {
+ outline: none;
+ border-color: var(--accent);
+}
+
+.config-editor.modified {
+ border-color: var(--warning);
+}
+
+.config-actions {
+ display: flex;
+ gap: 10px;
+ align-items: center;
+ margin-top: 12px;
+}
+
+.config-status {
+ margin-left: auto;
+ font-size: 0.9rem;
+ font-weight: 600;
+}
+
+.config-status.saved {
+ color: var(--success);
+}
+
+.config-status.modified {
+ color: var(--warning);
+}
+
+.config-status.error {
+ color: var(--error);
+}
+
+.config-info {
+ margin-top: 16px;
+ padding: 12px;
+ background: rgba(79, 195, 247, 0.1);
+ border-left: 3px solid var(--accent);
+ border-radius: 4px;
+}
+
+.config-info small {
+ color: var(--accent);
+}
+
+/* INPUTS */
+.select-input {
+ padding: 8px 12px;
+ background: var(--bg-dark);
+ border: 1px solid var(--border);
+ border-radius: 6px;
+ color: var(--text);
+ font-size: 0.95rem;
+ cursor: pointer;
+}
+
+.select-input:focus {
+ outline: none;
+ border-color: var(--accent);
+}
+
+/* LOADER */
+.loader {
+ text-align: center;
+ color: var(--text-dim);
+ padding: 20px;
+}
+
+/* HORIZONTAL RULE */
+hr {
+ border: none;
+ border-top: 1px solid var(--border);
+ margin: 16px 0;
+}
+
+/* LINKS */
+a {
+ color: var(--accent);
+ text-decoration: none;
+ transition: color 0.3s ease;
+}
+
+a:hover {
+ color: var(--accent-hover);
+ text-decoration: underline;
+}
+
+/* SERVICE BUTTONS */
+.service-buttons {
+ display: flex;
+ flex-wrap: wrap;
+ gap: 10px;
+ margin: 12px 0;
+}
+
+.service-note {
+ width: 100%;
+ margin: 8px 0;
+ padding: 6px 10px;
+ background: rgba(255, 167, 38, 0.1);
+ border-left: 3px solid var(--warning);
+ border-radius: 4px;
+ font-size: 0.75rem;
+ color: var(--warning);
+}
+
+.btn-block {
+ width: 100%;
+}
+
+/* OUTPUT BOX */
+.output-box {
+ margin-top: 12px;
+ padding: 12px;
+ background: #0d0d0d;
+ border: 1px solid var(--border);
+ border-radius: 6px;
+ font-family: 'Consolas', 'Monaco', monospace;
+ font-size: 0.85rem;
+ max-height: 300px;
+ overflow-y: auto;
+ white-space: pre-wrap;
+ word-wrap: break-word;
+ display: none;
+}
+
+.output-box.show {
+ display: block;
+}
+
+.output-box:empty {
+ display: none;
+}
+
+.output-box-compact {
+ max-height: 150px;
+ font-size: 0.75rem;
+}
+
+.docker-status-compact {
+ margin-bottom: 12px;
+}
+
+.docker-status-compact .info-item {
+ padding: 4px 0;
+ font-size: 0.85rem;
+}
+
+/* LOGS LIST */
+#logsList {
+ display: flex;
+ flex-direction: column;
+ gap: 12px;
+}
+
+.log-module {
+ padding: 12px;
+ background: rgba(79, 195, 247, 0.05);
+ border: 1px solid var(--border);
+ border-radius: 6px;
+}
+
+.log-module-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ margin-bottom: 8px;
+}
+
+.log-module-title {
+ font-weight: 600;
+ color: var(--accent);
+}
+
+.log-files {
+ display: flex;
+ flex-direction: column;
+ gap: 4px;
+ font-size: 0.85rem;
+ color: var(--text-dim);
+}
+
+.btn-danger {
+ background: var(--error);
+ color: white;
+}
+
+.btn-danger:hover {
+ background: #d32f2f;
+ transform: translateY(-2px);
+}
+
+/* DATABASE TABLES */
+.db-table {
+ margin-bottom: 16px;
+ padding: 12px;
+ background: rgba(79, 195, 247, 0.05);
+ border: 1px solid var(--border);
+ border-radius: 6px;
+}
+
+.db-table-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ margin-bottom: 8px;
+}
+
+.db-table-name {
+ font-weight: 600;
+ color: var(--accent);
+ font-size: 1rem;
+}
+
+.db-table-info {
+ font-size: 0.85rem;
+ color: var(--text-dim);
+}
+
+.db-columns {
+ display: flex;
+ flex-wrap: wrap;
+ gap: 8px;
+ margin-top: 8px;
+}
+
+.db-column {
+ padding: 4px 8px;
+ background: var(--bg-card-hover);
+ border: 1px solid var(--border);
+ border-radius: 4px;
+ font-size: 0.85rem;
+ display: inline-flex;
+ align-items: center;
+ gap: 4px;
+}
+
+.db-column small {
+ color: var(--text-dim);
+ font-size: 0.75rem;
+}
+
+/* Vorschautabelle für Datenbanktabellen */
+
+/* Vorschautabelle für Datenbanktabellen mit horizontalem Scroll */
+.db-preview-table-wrapper {
+ width: 100%;
+ overflow-x: auto;
+}
+.db-preview-table {
+ min-width: 900px;
+ border-collapse: collapse;
+ margin-top: 12px;
+ background: var(--bg-card);
+ color: var(--text);
+ font-size: 0.95rem;
+ box-shadow: 0 2px 8px rgba(79,195,247,0.08);
+}
+
+.db-preview-table th, .db-preview-table td {
+ padding: 8px 12px;
+ border: 1px solid var(--border);
+ text-align: left;
+ background: rgba(79,195,247,0.03);
+}
+
+.db-preview-table th {
+ background: var(--bg-card-hover);
+ color: var(--accent);
+ font-weight: 600;
+}
+
+.db-preview-table tr:hover {
+ background: rgba(79,195,247,0.12);
+}
+
+.db-preview-table td {
+ color: var(--text);
+}
+
+/* SCANNER */
+.scan-progress {
+ margin: 16px 0;
+ padding: 12px;
+ background: rgba(79, 195, 247, 0.1);
+ border-left: 3px solid var(--accent);
+ border-radius: 4px;
+ display: none;
+}
+
+.scan-progress.show {
+ display: block;
+}
+
+.scan-results {
+ margin-top: 16px;
+}
+
+.printer-card {
+ padding: 16px;
+ background: var(--bg-card-hover);
+ border: 1px solid var(--border);
+ border-left: 4px solid var(--accent);
+ border-radius: 8px;
+ margin-bottom: 12px;
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ transition: all 0.3s ease;
+}
+
+.printer-card:hover {
+ border-left-color: var(--success);
+ box-shadow: 0 2px 10px rgba(79, 195, 247, 0.2);
+}
+
+.printer-info {
+ flex: 1;
+}
+
+.printer-ip {
+ font-size: 1.1rem;
+ font-weight: 600;
+ color: var(--text);
+ margin-bottom: 4px;
+}
+
+.printer-details {
+ font-size: 0.85rem;
+ color: var(--text-dim);
+ display: flex;
+ gap: 12px;
+ flex-wrap: wrap;
+}
+
+.printer-badge {
+ display: inline-block;
+ padding: 4px 10px;
+ border-radius: 12px;
+ font-size: 0.75rem;
+ font-weight: 600;
+ text-transform: uppercase;
+}
+
+.printer-badge.bambu {
+ background: var(--success);
+ color: white;
+}
+
+.printer-badge.klipper {
+ background: var(--accent);
+ color: white;
+}
+
+.printer-badge.unknown {
+ background: var(--text-dim);
+ color: white;
+}
+
+.printer-actions {
+ display: flex;
+ gap: 8px;
+}
+
+input[type="text"],
+input[type="number"] {
+ background: var(--bg-dark);
+ border: 1px solid var(--border);
+ border-radius: 6px;
+ color: var(--text);
+ padding: 8px 12px;
+ font-size: 0.95rem;
+ transition: border-color 0.3s ease;
+}
+
+input[type="text"]:focus,
+input[type="number"]:focus {
+ outline: none;
+ border-color: var(--accent);
+}
+
+/* MQTT VIEWER */
+.mqtt-message {
+ padding: 12px;
+ margin-bottom: 8px;
+ background: var(--bg-card);
+ border-left: 3px solid var(--accent);
+ border-radius: 4px;
+ font-size: 0.85rem;
+ transition: all 0.2s ease;
+}
+
+.mqtt-message:hover {
+ background: var(--bg-card-hover);
+ border-left-color: var(--success);
+}
+
+.mqtt-message-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ margin-bottom: 8px;
+ padding-bottom: 6px;
+ border-bottom: 1px solid var(--border);
+}
+
+.mqtt-topic {
+ font-weight: 600;
+ color: var(--accent);
+ font-family: 'Consolas', monospace;
+}
+
+.mqtt-timestamp {
+ color: var(--text-dim);
+ font-size: 0.75rem;
+}
+
+.mqtt-payload {
+ color: var(--text);
+ white-space: pre-wrap;
+ word-break: break-all;
+ font-family: 'Consolas', monospace;
+ line-height: 1.4;
+}
+
+.mqtt-payload.json {
+ background: var(--bg-dark);
+ padding: 10px;
+ border-radius: 4px;
+ overflow-x: auto;
+}
+
+.topic-item {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 8px 12px;
+ margin-bottom: 6px;
+ background: var(--bg-card);
+ border-left: 3px solid var(--success);
+ border-radius: 4px;
+}
+
+.topic-name {
+ font-family: 'Consolas', monospace;
+ color: var(--text);
+ flex: 1;
+}
+
+.topic-btn {
+ text-align: left;
+ justify-content: flex-start;
+ font-family: 'Consolas', monospace;
+ font-size: 0.85rem;
+}
+
+textarea.select-input {
+ background: var(--bg-dark);
+ border: 1px solid var(--border);
+ border-radius: 6px;
+ color: var(--text);
+ padding: 10px;
+ font-size: 0.9rem;
+ transition: border-color 0.3s ease;
+}
+
+textarea.select-input:focus {
+ outline: none;
+ border-color: var(--accent);
+}
+
+/* TOAST NOTIFICATIONS */
+#toast-container {
+ position: fixed;
+ top: 20px;
+ right: 20px;
+ z-index: 9999;
+ display: flex;
+ flex-direction: column;
+ gap: 10px;
+}
+
+.toast {
+ display: flex;
+ align-items: center;
+ gap: 12px;
+ padding: 16px 20px;
+ background: var(--bg-card);
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ box-shadow: 0 4px 20px rgba(0, 0, 0, 0.5);
+ min-width: 300px;
+ max-width: 400px;
+ opacity: 0;
+ transform: translateX(400px);
+ transition: all 0.3s ease;
+}
+
+.toast.show {
+ opacity: 1;
+ transform: translateX(0);
+}
+
+.toast-icon {
+ font-size: 1.5rem;
+ font-weight: bold;
+ flex-shrink: 0;
+}
+
+.toast-message {
+ flex: 1;
+ color: var(--text);
+ font-weight: 500;
+}
+
+.toast-success {
+ border-left: 4px solid var(--success);
+}
+
+.toast-success .toast-icon {
+ color: var(--success);
+}
+
+.toast-error {
+ border-left: 4px solid var(--error);
+}
+
+.toast-error .toast-icon {
+ color: var(--error);
+}
+
+.toast-info {
+ border-left: 4px solid var(--accent);
+}
+
+.toast-info .toast-icon {
+ color: var(--accent);
+}
+
+.toast-warning {
+ border-left: 4px solid var(--warning);
+}
+
+.toast-warning .toast-icon {
+ color: var(--warning);
+}
+
+/* RESPONSIVE */
+/* === PERFORMANCE MONITORING === */
+.metric-display {
+ text-align: center;
+ margin-top: 10px;
+}
+
+.metric-value {
+ font-size: 2.5rem;
+ font-weight: 700;
+ color: var(--accent);
+ text-shadow: 0 0 20px rgba(79, 195, 247, 0.5);
+ transition: all 0.3s ease;
+}
+
+canvas {
+ width: 100% !important;
+ height: 150px !important;
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ background: var(--bg-dark);
+ margin-bottom: 10px;
+}
+
+.alert {
+ padding: 12px;
+ margin: 8px 0;
+ border-radius: 6px;
+ border-left: 4px solid;
+ animation: slideIn 0.3s ease;
+}
+
+.alert-warning {
+ background: rgba(255, 167, 38, 0.1);
+ border-color: var(--warning);
+ color: var(--warning);
+}
+
+.alert-critical {
+ background: rgba(239, 83, 80, 0.1);
+ border-color: var(--error);
+ color: var(--error);
+}
+
+@keyframes slideIn {
+ from {
+ opacity: 0;
+ transform: translateX(-20px);
+ }
+ to {
+ opacity: 1;
+ transform: translateX(0);
+ }
+}
+
+/* === RESPONSIVE === */
+@media (max-width: 768px) {
+ .header {
+ flex-direction: column;
+ align-items: flex-start;
+ gap: 15px;
+ }
+
+ .grid {
+ grid-template-columns: 1fr;
+ }
+
+ .tabs {
+ flex-wrap: nowrap;
+ overflow-x: auto;
+ }
+
+ #toast-container {
+ top: 10px;
+ right: 10px;
+ left: 10px;
+ }
+
+ .toast {
+ min-width: auto;
+ max-width: 100%;
+ }
+
+ .metric-value {
+ font-size: 2rem;
+ }
+
+ canvas {
+ height: 120px !important;
+ }
+}
+
+/* ===============================================
+ MODAL
+ =============================================== */
+.modal {
+ display: none;
+ position: fixed;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 100%;
+ background: rgba(0, 0, 0, 0.7);
+ backdrop-filter: blur(5px);
+ z-index: 10000;
+ align-items: center;
+ justify-content: center;
+ animation: fadeIn 0.2s ease;
+}
+
+@keyframes fadeIn {
+ from { opacity: 0; }
+ to { opacity: 1; }
+}
+
+.modal-content {
+ background: var(--card-bg);
+ border-radius: 12px;
+ box-shadow: 0 20px 60px rgba(0, 0, 0, 0.5);
+ max-width: 600px;
+ width: 90%;
+ max-height: 80vh;
+ overflow: hidden;
+ animation: slideUp 0.3s ease;
+}
+
+@keyframes slideUp {
+ from {
+ transform: translateY(30px);
+ opacity: 0;
+ }
+ to {
+ transform: translateY(0);
+ opacity: 1;
+ }
+}
+
+.modal-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 20px 24px;
+ border-bottom: 1px solid var(--border);
+ background: linear-gradient(135deg, rgba(79, 195, 247, 0.1), rgba(121, 85, 246, 0.1));
+}
+
+.modal-header h3 {
+ margin: 0;
+ font-size: 1.3rem;
+ color: var(--text);
+}
+
+.modal-close {
+ background: none;
+ border: none;
+ font-size: 2rem;
+ color: var(--text-dim);
+ cursor: pointer;
+ padding: 0;
+ width: 40px;
+ height: 40px;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ border-radius: 50%;
+ transition: all 0.2s;
+}
+
+.modal-close:hover {
+ background: rgba(255, 255, 255, 0.1);
+ color: var(--text);
+ transform: rotate(90deg);
+}
+
+.modal-body {
+ padding: 24px;
+ max-height: 60vh;
+ overflow-y: auto;
+}
+
+.modal-footer {
+ padding: 16px 24px;
+ border-top: 1px solid var(--border);
+ display: flex;
+ justify-content: flex-end;
+ gap: 10px;
+}
+
+.test-result-box {
+ background: var(--bg);
+ border-radius: 8px;
+ padding: 20px;
+}
+
+.test-result-box .info-group {
+ display: flex;
+ flex-direction: column;
+ gap: 12px;
+}
+
+.test-result-box .info-item {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 8px 0;
+ border-bottom: 1px solid var(--border);
+}
+
+.test-result-box .info-item:last-child {
+ border-bottom: none;
+}
+
+.test-result-box .label {
+ color: var(--text-dim);
+ font-size: 0.9rem;
+}
+
+.test-result-box .value {
+ color: var(--text);
+ font-weight: 500;
+}
+
+/* JSON Inspector Styles */
+.json-tree .json-node { padding: 2px 0; }
+.json-tree .json-row { display:flex; align-items:center; gap:8px; }
+.json-tree .json-toggle { cursor:pointer; color:#6cf; user-select:none; width:14px; text-align:center; }
+.json-tree .json-key { color:#9cdcfe; }
+.json-tree .json-type { color:#888; font-size:0.8em; }
+.json-tree .json-value { color:#ce9178; }
+.json-tree .json-children { margin-left: 16px; border-left:1px dashed #333; padding-left:10px; }
+.json-tree .copy-btn { cursor:pointer; font-size:0.75rem; padding:2px 6px; border:1px solid #444; background:#1f1f1f; color:#ddd; border-radius:4px; }
+.json-tree .copy-btn:hover { background:#2a2a2a; }
+
+/* Custom Scrollbar für Live Raw Output */
+#mqttLiveRawPublish::-webkit-scrollbar {
+ width: 12px;
+}
+
+#mqttLiveRawPublish::-webkit-scrollbar-track {
+ background: #111;
+ border-radius: 8px;
+}
+
+#mqttLiveRawPublish::-webkit-scrollbar-thumb {
+ background: #00ff00;
+ border-radius: 8px;
+ border: 2px solid #000;
+}
+
+#mqttLiveRawPublish::-webkit-scrollbar-thumb:hover {
+ background: #00dd00;
+}
+
+.printer-block {
+ background: #222;
+ padding: 15px;
+ border-radius: 6px;
+ margin-bottom: 20px;
+ color: #f0f0f0;
+}
+
+.printer-wrap, .raw-wrap {
+ margin-top: 20px;
+ white-space: pre-wrap;
+ font-family: monospace;
+}
+.status-badge {
+ display: inline-block;
+ padding: 6px 14px;
+ border-radius: 8px;
+ font-weight: bold;
+ font-family: monospace;
+ margin-bottom: 10px;
+ color: #fff;
+}
+
+.status-connected {
+ background-color: #28a745;
+ box-shadow: 0 0 6px #28a745aa;
+}
+
+.status-disconnected {
+ background-color: #dc3545;
+ box-shadow: 0 0 6px #dc3545aa;
+}
+
+.status-pending {
+ background-color: #ffc107;
+ color: black;
+ box-shadow: 0 0 6px #ffc107aa;
+}
+.status-badge {
+ display: inline-block;
+ padding: 6px 14px;
+ border-radius: 8px;
+ font-weight: bold;
+ font-family: monospace;
+ margin-bottom: 10px;
+ color: #fff;
+}
+
+.status-connected {
+ background-color: #28a745;
+ box-shadow: 0 0 6px #28a745aa;
+}
+
+.status-disconnected {
+ background-color: #dc3545;
+ box-shadow: 0 0 6px #dc3545aa;
+}
+
+.status-pending {
+ background-color: #ffc107;
+ color: black;
+ box-shadow: 0 0 6px #ffc107aa;
+}
+#wsActivity {
+ margin-top: 10px;
+ font-family: monospace;
+ color: #eee;
+}
+
+#activitySparkline {
+ margin-top: 6px;
+ background: #111;
+ border-radius: 4px;
+}
+
+/* Manual Printer Dialog */
+.modal-overlay {
+ position: fixed;
+ top: 0;
+ left: 0;
+ right: 0;
+ bottom: 0;
+ background: rgba(0, 0, 0, 0.75);
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ z-index: 9999;
+ backdrop-filter: blur(4px);
+}
+
+.modal-dialog {
+ background: var(--bg-primary, #1a1d29);
+ border: 1px solid var(--border-color, rgba(255, 255, 255, 0.12));
+ border-radius: 12px;
+ width: 90%;
+ max-width: 500px;
+ box-shadow: 0 8px 32px rgba(0, 0, 0, 0.4);
+ animation: modalSlideIn 0.2s ease-out;
+}
+
+@keyframes modalSlideIn {
+ from {
+ opacity: 0;
+ transform: translateY(-20px);
+ }
+ to {
+ opacity: 1;
+ transform: translateY(0);
+ }
+}
+
+.modal-header {
+ display: flex;
+ align-items: center;
+ justify-content: space-between;
+ padding: 20px;
+ border-bottom: 1px solid var(--border-color, rgba(255, 255, 255, 0.12));
+}
+
+.modal-header h3 {
+ margin: 0;
+ font-size: 1.25rem;
+ font-weight: 700;
+ color: var(--text-primary, #fff);
+}
+
+.modal-close {
+ background: transparent;
+ border: none;
+ color: var(--text-dim, #a7b2c3);
+ font-size: 2rem;
+ line-height: 1;
+ cursor: pointer;
+ padding: 0;
+ width: 32px;
+ height: 32px;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ border-radius: 6px;
+ transition: background-color 0.2s ease, color 0.2s ease;
+}
+
+.modal-close:hover {
+ background: rgba(255, 255, 255, 0.08);
+ color: var(--text-primary, #fff);
+}
+
+.modal-body {
+ padding: 20px;
+}
+
+.modal-footer {
+ display: flex;
+ gap: 12px;
+ justify-content: flex-end;
+ padding: 20px;
+ border-top: 1px solid var(--border-color, rgba(255, 255, 255, 0.12));
+}
+
+.form-group {
+ margin-bottom: 16px;
+}
+
+.form-group:last-child {
+ margin-bottom: 0;
+}
+
+.form-group label {
+ display: block;
+ margin-bottom: 6px;
+ font-weight: 600;
+ color: var(--text-primary, #fff);
+ font-size: 0.95rem;
+}
+
+.form-input {
+ width: 100%;
+ padding: 10px 12px;
+ border-radius: 8px;
+ border: 1px solid var(--border-color, rgba(255, 255, 255, 0.12));
+ background: var(--bg-secondary, #232838);
+ color: var(--text-primary, #fff);
+ font-size: 1rem;
+ transition: border-color 0.2s ease, background-color 0.2s ease;
+}
+
+.form-input:focus {
+ outline: none;
+ border-color: rgba(46, 134, 222, 0.6);
+ background: var(--bg-tertiary, #2a2f42);
+}
+
+.form-input::placeholder {
+ color: var(--text-dim, #a7b2c3);
+}
+
+.form-hint {
+ margin-top: 6px;
+ font-size: 0.85rem;
+ color: var(--text-dim, #a7b2c3);
+}
+
+.test-result {
+ padding: 12px;
+ border-radius: 8px;
+ font-size: 0.95rem;
+ font-weight: 500;
+ border: 1px solid;
+}
+
+.test-result-info {
+ background: rgba(46, 134, 222, 0.15);
+ color: #b6d8ff;
+ border-color: rgba(46, 134, 222, 0.4);
+}
+
+.test-result-success {
+ background: rgba(46, 204, 113, 0.15);
+ color: #9de8ba;
+ border-color: rgba(46, 204, 113, 0.4);
+}
+
+.test-result-error {
+ background: rgba(231, 76, 60, 0.15);
+ color: #ff9a8a;
+ border-color: rgba(231, 76, 60, 0.4);
+}
+
+.test-result-warning {
+ background: rgba(241, 196, 15, 0.15);
+ color: #ffd666;
+ border-color: rgba(241, 196, 15, 0.4);
+}
diff --git a/app/static/debug_center.css b/app/static/debug_center.css
new file mode 100644
index 0000000..ddb6b4d
--- /dev/null
+++ b/app/static/debug_center.css
@@ -0,0 +1,175 @@
+.debug-hero {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 18px 20px;
+ margin-bottom: 12px;
+ border: 1px solid var(--border, #222);
+ border-radius: 14px;
+ background: radial-gradient(circle at 20% 20%, rgba(255,255,255,0.03), transparent 40%), var(--panel, #111);
+ box-shadow: var(--shadow, 0 20px 50px rgba(0,0,0,0.35));
+}
+.debug-hero .title {
+ margin: 0;
+ font-size: 1.6rem;
+}
+.debug-hero .subtitle {
+ margin: 6px 0 0 0;
+ color: var(--text-dim, #a7b2c3);
+}
+
+.section-title-block {
+ margin: 16px 0 10px 0;
+}
+.section-title-block .eyebrow {
+ text-transform: uppercase;
+ letter-spacing: 0.12em;
+ font-size: 0.75rem;
+ color: var(--text-dim, #a7b2c3);
+ margin: 0 0 4px 0;
+}
+.section-title-block h2 {
+ margin: 0;
+ font-size: 1.35rem;
+}
+.section-title-block .subtitle {
+ margin: 4px 0 0 0;
+ color: var(--text-dim, #a7b2c3);
+}
+
+.panel-modern {
+ background: radial-gradient(circle at 20% 20%, rgba(255,255,255,0.03), transparent 40%), var(--panel, #111);
+ border: 1px solid var(--border, #222);
+ border-radius: 14px;
+ box-shadow: var(--shadow, 0 20px 50px rgba(0,0,0,0.35));
+ padding: 14px;
+}
+
+.tab-content {
+ background: transparent;
+ border: none;
+}
+
+.grid-modern {
+ display: grid;
+ gap: 14px;
+}
+
+.card {
+ background: radial-gradient(circle at 20% 20%, rgba(255,255,255,0.03), transparent 40%), var(--panel, #111);
+ border: 1px solid var(--border, #222);
+ box-shadow: var(--shadow, 0 20px 50px rgba(0,0,0,0.35));
+}
+
+.output-box,
+.output-box-compact {
+ background: #0f141c;
+ border: 1px solid var(--border, #222);
+ border-radius: 12px;
+}
+
+.json-tree {
+ background: #0f0f0f;
+ border: 1px solid #333;
+ border-radius: 8px;
+}
+
+/* Tabs modern */
+.tabs {
+ display: flex;
+ gap: 8px;
+ padding: 10px 0;
+ border-bottom: 1px solid var(--border, #222);
+ overflow-x: auto;
+}
+.tabs .tab {
+ position: relative;
+ padding: 10px 14px;
+ border-radius: 12px;
+ background: #11161f;
+ border: 1px solid var(--border, #222);
+ color: var(--text-dim, #a7b2c3);
+ cursor: pointer;
+ transition: all 0.15s ease;
+ display: inline-flex;
+ align-items: center;
+ gap: 8px;
+ box-shadow: inset 0 0 0 1px rgba(255,255,255,0.02);
+}
+.tabs .tab:hover {
+ color: var(--text, #e8ecf2);
+ border-color: #2f3a4d;
+}
+.tabs .tab.active {
+ color: var(--text, #e8ecf2);
+ background: linear-gradient(135deg, #1f2835, #141a22);
+ border-color: #2f3a4d;
+ box-shadow: 0 8px 20px rgba(0,0,0,0.35), inset 0 -2px 0 var(--accent, #f39c12);
+}
+.tabs .tab::after {
+ content: "";
+ position: absolute;
+ left: 12px;
+ right: 12px;
+ bottom: -4px;
+ height: 3px;
+ border-radius: 999px;
+ background: var(--accent, #f39c12);
+ opacity: 0;
+ transform: translateY(2px);
+ transition: all 0.15s ease;
+}
+.tabs .tab.active::after {
+ opacity: 1;
+ transform: translateY(0);
+}
+.tab__icon {
+ font-size: 0.9rem;
+ opacity: 0.9;
+}
+
+/* Log / Terminal */
+.log-panel {
+ border: 1px solid var(--border, #222);
+ border-radius: 14px;
+ background: radial-gradient(circle at 20% 20%, rgba(255,255,255,0.03), transparent 40%), #0a0f16;
+ box-shadow: inset 0 0 0 1px rgba(255,255,255,0.02), 0 18px 40px rgba(0,0,0,0.45);
+ padding: 12px;
+}
+.log-panel__header {
+ display: flex;
+ align-items: center;
+ justify-content: space-between;
+ margin-bottom: 8px;
+}
+.log-panel__actions {
+ display: flex;
+ gap: 8px;
+}
+.log-terminal {
+ font-family: "Consolas", "Fira Code", monospace;
+ background: #000000;
+ color: #00ff00;
+ border: 2px solid #0d1b2a;
+ border-radius: 10px;
+ min-height: 300px;
+ max-height: 500px;
+ overflow-y: auto;
+ padding: 12px;
+}
+.log-terminal .log-line.error { color: #ff6b6b; }
+.log-terminal .log-line.warn { color: #f6d55c; }
+.log-terminal .log-line.info { color: #4fc3f7; }
+.log-copy-btn {
+ background: #1b2330;
+ color: var(--text, #e8ecf2);
+ border: 1px solid var(--border, #222);
+ border-radius: 10px;
+ padding: 6px 10px;
+ cursor: pointer;
+}
+.log-copy-btn:hover {
+ background: #e74c3c;
+ color: #0c0f14;
+ border-color: #e74c3c;
+}
diff --git a/app/static/debug_v2.js b/app/static/debug_v2.js
new file mode 100644
index 0000000..f538f11
--- /dev/null
+++ b/app/static/debug_v2.js
@@ -0,0 +1,3319 @@
+// Debug Center baseline (System + Performance)
+const POLL_MS = 5000;
+let systemInterval = null;
+let backendInterval = null;
+let performanceInterval = null;
+let activeTab = 'system';
+let scannerInitialized = false;
+let scannerSuggestedRange = null;
+const runningPortTests = new Set();
+window.DEBUG_MODE = 'lite';
+let probeTarget = null;
+let configLoaded = false;
+let configSnapshot = {};
+let logViewerState = { module: 'app', lastCount: 0 };
+
+function setConfigEditable(enabled) {
+ document.querySelectorAll('#panel-config input, #panel-config select, #panel-config button').forEach(el => {
+ if (el.id && el.id.startsWith('cfg_')) {
+ el.disabled = !enabled;
+ }
+ });
+
+ if (enabled) {
+ applyConfigEnableStates();
+ }
+}
+
+function applyConfigEnableStates() {
+ // Health gate
+ const healthEnabled = document.getElementById('cfg_health_enabled')?.checked;
+ ['cfg_health_warn_latency', 'cfg_health_error_latency'].forEach(id => {
+ const el = document.getElementById(id);
+ if (el) el.disabled = !healthEnabled;
+ });
+ ['cfg_health_cancel', 'cfg_health_save'].forEach(id => {
+ const el = document.getElementById(id);
+ if (el) el.disabled = !healthEnabled;
+ });
+
+ // Runtime gate
+ const runtimeEnabled = document.getElementById('cfg_runtime_enabled')?.checked;
+ ['cfg_runtime_poll_interval'].forEach(id => {
+ const el = document.getElementById(id);
+ if (el) el.disabled = !runtimeEnabled;
+ });
+ ['cfg_runtime_cancel', 'cfg_runtime_save'].forEach(id => {
+ const el = document.getElementById(id);
+ if (el) el.disabled = !runtimeEnabled;
+ });
+
+ // JSON Inspector gate: Buttons immer aktiv wenn Felder gesetzt
+ const jsonMaxSize = document.getElementById('cfg_json_max_size')?.value;
+ const jsonMaxDepth = document.getElementById('cfg_json_max_depth')?.value;
+ const jsonEnabled = !!(jsonMaxSize || jsonMaxDepth);
+ ['cfg_json_cancel', 'cfg_json_save'].forEach(id => {
+ const el = document.getElementById(id);
+ if (el) el.disabled = !jsonEnabled;
+ });
+}
+
+function renderLogViewer(data) {
+ const statusEl = document.getElementById('log-overview-status');
+ const entriesEl = document.getElementById('log-entries');
+ const detailEl = document.getElementById('log-detail');
+ if (!statusEl || !entriesEl || !detailEl) return;
+ const safeData = data && typeof data === 'object' ? data : {};
+ const module = safeData.module || logViewerState.module || 'app';
+ const items = Array.isArray(safeData.items) ? safeData.items : [];
+ const count = Number.isFinite(safeData.count) ? safeData.count : items.length;
+ logViewerState.lastCount = count;
+ statusEl.textContent = `Modul: ${module} • Einträge: ${items.length} / ${count}`;
+ if (items.length === 0) {
+ entriesEl.textContent = 'Keine Logs verfuegbar.';
+ detailEl.textContent = 'Noch kein Eintrag ausgewaehlt.';
+ return;
+ }
+ const listText = items.join('\n');
+ entriesEl.textContent = listText;
+ detailEl.textContent = items[0] || 'Noch kein Eintrag ausgewaehlt.';
+}
+
+async function loadLogViewer(module = 'app') {
+ if (window.DEBUG_MODE !== 'pro') return;
+ const statusEl = document.getElementById('log-overview-status');
+ const entriesEl = document.getElementById('log-entries');
+ const detailEl = document.getElementById('log-detail');
+ if (!statusEl || !entriesEl || !detailEl) return;
+ logViewerState.module = module;
+ statusEl.textContent = 'Lade Logs...';
+ entriesEl.textContent = '...';
+ detailEl.textContent = 'Noch kein Eintrag ausgewaehlt.';
+ try {
+ const resp = await fetch(`/api/debug/logs?module=${encodeURIComponent(module)}&limit=200`);
+ if (!resp.ok) {
+ const txt = await resp.text();
+ throw new Error(txt || 'Fehler beim Laden');
+ }
+ const json = await resp.json();
+ renderLogViewer(json);
+ } catch (err) {
+ statusEl.textContent = 'Fehler beim Laden der Logs.';
+ entriesEl.textContent = (err && err.message) || 'Unbekannter Fehler';
+ detailEl.textContent = 'Keine Details verfuegbar.';
+ }
+}
+
+function setActiveTab(target) {
+ if (!target) return;
+ activeTab = target;
+ document.querySelectorAll('.debug-panel').forEach(panel => {
+ const isActive = panel.id === `panel-${activeTab}`;
+ panel.classList.toggle('active-panel', isActive);
+ panel.style.display = isActive ? '' : 'none';
+ });
+ document.querySelectorAll('.debug-tab').forEach(tab => {
+ tab.classList.toggle('active', tab.dataset.tab === activeTab);
+ });
+ if (activeTab === 'performance') {
+ startPerformancePolling();
+ } else {
+ stopPerformancePolling();
+ }
+ if (activeTab === 'scanner') {
+ initScannerTab();
+ }
+ if (activeTab === 'config') {
+ loadConfigData();
+ }
+ if (activeTab === 'json' && window.DEBUG_MODE === 'pro') {
+ loadJsonInspectorLimits();
+ if (typeof initJsonInspector === 'function') {
+ initJsonInspector();
+ }
+ }
+ if (activeTab === 'services' && window.DEBUG_MODE === 'pro') {
+ loadServicesData();
+ }
+ if (activeTab === 'logs' && window.DEBUG_MODE === 'pro') {
+ loadLogViewer();
+ }
+}
+
+function normalizePrinterType(val) {
+ const v = (val || '').toLowerCase();
+ if (v.includes('bambu')) return 'bambu';
+ if (v.includes('klipper')) return 'klipper';
+ return 'generic';
+}
+
+function setDebugMode(mode) {
+ window.DEBUG_MODE = mode === 'pro' ? 'pro' : 'lite';
+ document.body.classList.remove('debug-lite', 'pro-mode');
+ if (window.DEBUG_MODE === 'pro') {
+ document.body.classList.add('pro-mode');
+ } else {
+ document.body.classList.add('debug-lite');
+ }
+ const btnLite = document.getElementById('debugModeLite');
+ const btnPro = document.getElementById('debugModePro');
+ const label = document.getElementById('debugModeLabel');
+ if (btnLite) btnLite.classList.toggle('active', window.DEBUG_MODE === 'lite');
+ if (btnPro) btnPro.classList.toggle('active', window.DEBUG_MODE === 'pro');
+ if (label) {
+ label.textContent = `Mode: ${window.DEBUG_MODE === 'pro' ? 'Pro' : 'Lite'}`;
+ label.classList.remove('mode-lite', 'mode-pro');
+ label.classList.add(window.DEBUG_MODE === 'pro' ? 'mode-pro' : 'mode-lite');
+ }
+ setConfigEditable(window.DEBUG_MODE === 'pro');
+ document.querySelectorAll('.pro-only, [data-mode="pro"]').forEach(el => {
+ const isPanel = el.classList.contains('debug-panel');
+ if (window.DEBUG_MODE === 'pro') {
+ if (isPanel) {
+ const target = el.id?.replace('panel-', '');
+ el.style.display = target === activeTab ? '' : 'none';
+ } else {
+ el.style.display = '';
+ }
+ } else {
+ el.style.display = 'none';
+ }
+ });
+ document.querySelectorAll('.pro-only-inline').forEach(el => {
+ el.style.display = window.DEBUG_MODE === 'pro' ? '' : 'none';
+ });
+ updateProbeButtonState();
+ if (window.DEBUG_MODE === 'lite') {
+ const activeTabEl = document.querySelector(`.debug-tab[data-tab="${activeTab}"]`);
+ if (activeTabEl && activeTabEl.dataset.mode === 'pro') {
+ setActiveTab('system');
+ return;
+ }
+ }
+ setActiveTab(activeTab);
+}
+
+function initDebugModeUI() {
+ const btnLite = document.getElementById('debugModeLite');
+ const btnPro = document.getElementById('debugModePro');
+ if (btnLite) btnLite.addEventListener('click', () => setDebugMode('lite'));
+ if (btnPro) btnPro.addEventListener('click', () => setDebugMode('pro'));
+ setDebugMode(window.DEBUG_MODE || 'lite');
+ initConfigActions();
+}
+
+function $(id) {
+ return document.getElementById(id);
+}
+
+function setText(id, value, fallback = '-') {
+ // Nur MQTT-Status-BADGES werden geschützt, nicht die Daten-Felder
+ // Status-Badges werden von mqtt-connect-handler.js verwaltet
+ const mqttStatusBadges = ['mqttStatus', 'mqttStatusBadge', 'mqttConnBadge', 'proMqttStatus'];
+ if (mqttStatusBadges.includes(id)) return;
+
+ const el = $(id);
+ if (!el) return;
+ const safe = value === undefined || value === null || value === '' ? fallback : value;
+ el.textContent = safe;
+}
+
+function setCheckbox(id, value) {
+ const el = $(id);
+ if (!el) return;
+ el.checked = Boolean(value);
+}
+
+function setInputValue(id, value) {
+ const el = $(id);
+ if (!el) return;
+ el.value = value !== undefined && value !== null ? value : '';
+}
+
+function setSelectValue(id, value, allowed = []) {
+ const el = $(id);
+ if (!el) return;
+ const normalized = (value || '').toString().toLowerCase();
+ if (allowed.length === 0 || allowed.includes(normalized)) {
+ el.value = normalized;
+ }
+}
+
+function fmtMs(n) {
+ return Number.isFinite(n) ? `${n} ms` : '-';
+}
+
+function fmtReq(n) {
+ return Number.isFinite(n) ? n.toString() : '-';
+}
+
+function fmtPercent(n) {
+ return Number.isFinite(n) ? `${n}%` : '-';
+}
+
+function fmtGB(val) {
+ if (!Number.isFinite(val)) return null;
+ return `${val} GB`;
+}
+
+function fmtUptime(val) {
+ const num = Number(val);
+ if (Number.isFinite(num)) {
+ const total = Math.max(0, Math.floor(num));
+ const hours = Math.floor(total / 3600);
+ const minutes = Math.floor((total % 3600) / 60);
+ const seconds = total % 60;
+ return [hours, minutes, seconds].map(v => String(v).padStart(2, '0')).join(':');
+ }
+ if (typeof val === 'string' && val.trim().length > 0) {
+ return val;
+ }
+ return '-';
+}
+
+function populateConfigFields(data) {
+ const cfg = data && typeof data === 'object' ? data : {};
+ const cm = cfg.config_manager && typeof cfg.config_manager === 'object' ? cfg.config_manager : {};
+ const debug = cfg.debug && typeof cfg.debug === 'object' ? cfg.debug : {};
+ const systemHealth = debug.system_health && typeof debug.system_health === 'object' ? debug.system_health : {};
+ const runtime = debug.runtime && typeof debug.runtime === 'object' ? debug.runtime : {};
+ const logging = cfg.logging && typeof cfg.logging === 'object' ? cfg.logging : {};
+ const scanner = cfg.scanner && typeof cfg.scanner === 'object' ? cfg.scanner : {};
+ const scannerPro = scanner.pro && typeof scanner.pro === 'object' ? scanner.pro : {};
+ const fp = cfg.fingerprint && typeof cfg.fingerprint === 'object' ? cfg.fingerprint : {};
+ const jsonInspector = cfg.json_inspector && typeof cfg.json_inspector === 'object' ? cfg.json_inspector : {};
+
+ setCheckbox('cfg_health_enabled', systemHealth.enabled ?? cm.health_enabled);
+ setInputValue('cfg_health_warn_latency', systemHealth.warn_latency_ms ?? cm.health_latency_warn_ms);
+ setInputValue('cfg_health_error_latency', systemHealth.error_latency_ms ?? cm.health_latency_error_ms);
+
+ const systemLoggingModules = logging.modules && typeof logging.modules === 'object' ? logging.modules : {};
+ setCheckbox('cfg_system_logging_enabled', logging.enabled ?? true);
+ setSelectValue('cfg_system_logging_level', logging.level ?? 'info', ['debug', 'info', 'warning', 'error']);
+ setInputValue('cfg_system_logging_max_size', logging.max_size_mb ?? 10);
+ setInputValue('cfg_system_logging_backup_count', logging.backup_count ?? 3);
+ setInputValue('cfg_system_logging_keep_days', logging.keep_days ?? 14);
+ ['app', 'bambu', 'errors', 'klipper', 'mqtt'].forEach(module => {
+ setCheckbox(`cfg_system_logging_module_${module}`, systemLoggingModules[module]?.enabled ?? false);
+ });
+
+ setCheckbox('cfg_runtime_enabled', runtime.enabled ?? cm.runtime_enabled);
+ setInputValue('cfg_runtime_poll_interval', runtime.poll_interval_ms ?? cm.runtime_poll_interval_ms);
+
+ setInputValue('cfg_json_max_size', jsonInspector.max_size_mb);
+ setInputValue('cfg_json_max_depth', jsonInspector.max_depth);
+ setCheckbox('cfg_json_allow_override', jsonInspector.allow_override);
+
+ // MQTT Logging Config
+ const mqttLogging = cfg.mqtt_logging && typeof cfg.mqtt_logging === 'object' ? cfg.mqtt_logging : {};
+ const mqttSmartLog = mqttLogging.smart_logging && typeof mqttLogging.smart_logging === 'object' ? mqttLogging.smart_logging : {};
+ const mqttLimits = mqttLogging.limits && typeof mqttLogging.limits === 'object' ? mqttLogging.limits : {};
+
+ setCheckbox('cfg_mqtt_logging_enabled', mqttLogging.enabled ?? true);
+ setCheckbox('cfg_mqtt_smart_logging', mqttSmartLog.enabled ?? false);
+ setSelectValue('cfg_mqtt_trigger_type', mqttSmartLog.trigger_type ?? 'command', ['command', 'temperature']);
+ setInputValue('cfg_mqtt_trigger_command', mqttSmartLog.trigger_type === 'command' ? mqttSmartLog.trigger_value : 'printing');
+ setInputValue('cfg_mqtt_trigger_temp', mqttSmartLog.trigger_type === 'temperature' ? mqttSmartLog.trigger_value : 220);
+ setInputValue('cfg_mqtt_max_duration', mqttSmartLog.max_duration_hours ?? 4);
+ setInputValue('cfg_mqtt_buffer_minutes', mqttSmartLog.buffer_minutes ?? 5);
+ setInputValue('cfg_mqtt_max_size', mqttLimits.max_size_mb ?? 100);
+ setInputValue('cfg_mqtt_max_payload', mqttLimits.max_payload_chars ?? 1000);
+ setCheckbox('cfg_mqtt_full_payload', mqttLimits.full_payload_enabled ?? false);
+
+ // Trigger UI visibility
+ updateMqttTriggerUI();
+
+ const normalizedSystemLoggingLevel = (logging.level || 'info').toString().toLowerCase();
+ configSnapshot = {
+ health_enabled: systemHealth.enabled ?? cm.health_enabled,
+ health_warn_latency: systemHealth.warn_latency_ms ?? cm.health_latency_warn_ms,
+ health_error_latency: systemHealth.error_latency_ms ?? cm.health_latency_error_ms,
+ system_logging_enabled: logging.enabled ?? true,
+ system_logging_level: normalizedSystemLoggingLevel,
+ system_logging_max_size: logging.max_size_mb ?? 10,
+ system_logging_backup_count: logging.backup_count ?? 3,
+ system_logging_keep_days: logging.keep_days ?? 14,
+ system_logging_module_app: systemLoggingModules.app?.enabled ?? false,
+ system_logging_module_bambu: systemLoggingModules.bambu?.enabled ?? false,
+ system_logging_module_errors: systemLoggingModules.errors?.enabled ?? false,
+ system_logging_module_klipper: systemLoggingModules.klipper?.enabled ?? false,
+ system_logging_module_mqtt: systemLoggingModules.mqtt?.enabled ?? false,
+ runtime_enabled: runtime.enabled ?? cm.runtime_enabled,
+ runtime_poll_interval: runtime.poll_interval_ms ?? cm.runtime_poll_interval_ms,
+ json_max_size: jsonInspector.max_size_mb,
+ json_max_depth: jsonInspector.max_depth,
+ json_allow_override: jsonInspector.allow_override,
+ mqtt_logging_enabled: mqttLogging.enabled ?? true,
+ mqtt_smart_logging: mqttSmartLog.enabled ?? false,
+ mqtt_trigger_type: mqttSmartLog.trigger_type ?? 'command',
+ mqtt_trigger_command: mqttSmartLog.trigger_type === 'command' ? mqttSmartLog.trigger_value : 'printing',
+ mqtt_trigger_temp: mqttSmartLog.trigger_type === 'temperature' ? mqttSmartLog.trigger_value : 220,
+ mqtt_max_duration: mqttSmartLog.max_duration_hours ?? 4,
+ mqtt_buffer_minutes: mqttSmartLog.buffer_minutes ?? 5,
+ mqtt_max_size: mqttLimits.max_size_mb ?? 100,
+ mqtt_max_payload: mqttLimits.max_payload_chars ?? 1000,
+ mqtt_full_payload: mqttLimits.full_payload_enabled ?? false,
+ };
+ setConfigEditable(true);
+ applyConfigEnableStates();
+}
+
+function resetConfigCard(card) {
+ if (!configSnapshot || Object.keys(configSnapshot).length === 0) return;
+ if (card === 'health' || card === 'all') {
+ setCheckbox('cfg_health_enabled', configSnapshot.health_enabled);
+ setInputValue('cfg_health_warn_latency', configSnapshot.health_warn_latency);
+ setInputValue('cfg_health_error_latency', configSnapshot.health_error_latency);
+ }
+ if (card === 'system_logging' || card === 'all') {
+ setCheckbox('cfg_system_logging_enabled', configSnapshot.system_logging_enabled);
+ setSelectValue('cfg_system_logging_level', configSnapshot.system_logging_level, ['debug', 'info', 'warning', 'error']);
+ setInputValue('cfg_system_logging_max_size', configSnapshot.system_logging_max_size);
+ setInputValue('cfg_system_logging_backup_count', configSnapshot.system_logging_backup_count);
+ setInputValue('cfg_system_logging_keep_days', configSnapshot.system_logging_keep_days);
+ ['app', 'bambu', 'errors', 'klipper', 'mqtt'].forEach(module => {
+ setCheckbox(`cfg_system_logging_module_${module}`, configSnapshot[`system_logging_module_${module}`]);
+ });
+ }
+ if (card === 'runtime' || card === 'all') {
+ setCheckbox('cfg_runtime_enabled', configSnapshot.runtime_enabled);
+ setInputValue('cfg_runtime_poll_interval', configSnapshot.runtime_poll_interval);
+ }
+ if (card === 'json' || card === 'all') {
+ setInputValue('cfg_json_max_size', configSnapshot.json_max_size);
+ setInputValue('cfg_json_max_depth', configSnapshot.json_max_depth);
+ setCheckbox('cfg_json_allow_override', configSnapshot.json_allow_override);
+ }
+ if (card === 'mqtt_logging' || card === 'all') {
+ setCheckbox('cfg_mqtt_logging_enabled', configSnapshot.mqtt_logging_enabled);
+ setCheckbox('cfg_mqtt_smart_logging', configSnapshot.mqtt_smart_logging);
+ setSelectValue('cfg_mqtt_trigger_type', configSnapshot.mqtt_trigger_type, ['command', 'temperature']);
+ setInputValue('cfg_mqtt_trigger_command', configSnapshot.mqtt_trigger_command);
+ setInputValue('cfg_mqtt_trigger_temp', configSnapshot.mqtt_trigger_temp);
+ setInputValue('cfg_mqtt_max_duration', configSnapshot.mqtt_max_duration);
+ setInputValue('cfg_mqtt_buffer_minutes', configSnapshot.mqtt_buffer_minutes);
+ setInputValue('cfg_mqtt_max_size', configSnapshot.mqtt_max_size);
+ setInputValue('cfg_mqtt_max_payload', configSnapshot.mqtt_max_payload);
+ setCheckbox('cfg_mqtt_full_payload', configSnapshot.mqtt_full_payload);
+ updateMqttTriggerUI();
+ }
+}
+
+function parseIntOrUndefined(val) {
+ const parsed = parseInt(val, 10);
+ return Number.isFinite(parsed) ? parsed : undefined;
+}
+
+async function saveConfigSection(section) {
+ const payload = {};
+ if (section === 'health') {
+ payload['debug.system_health.enabled'] = document.getElementById('cfg_health_enabled').checked;
+ payload['debug.system_health.warn_latency_ms'] = parseInt(document.getElementById('cfg_health_warn_latency').value, 10);
+ payload['debug.system_health.error_latency_ms'] = parseInt(document.getElementById('cfg_health_error_latency').value, 10);
+ }
+ if (section === 'system_logging') {
+ payload['logging.enabled'] = document.getElementById('cfg_system_logging_enabled').checked;
+ payload['logging.level'] = document.getElementById('cfg_system_logging_level').value;
+ const maxSizeVal = parseIntOrUndefined(document.getElementById('cfg_system_logging_max_size').value);
+ const backupVal = parseIntOrUndefined(document.getElementById('cfg_system_logging_backup_count').value);
+ const keepDaysVal = parseIntOrUndefined(document.getElementById('cfg_system_logging_keep_days').value);
+ if (maxSizeVal !== undefined) payload['logging.max_size_mb'] = maxSizeVal;
+ if (backupVal !== undefined) payload['logging.backup_count'] = backupVal;
+ if (keepDaysVal !== undefined) payload['logging.keep_days'] = keepDaysVal;
+ ['app', 'bambu', 'errors', 'klipper', 'mqtt'].forEach(module => {
+ payload[`logging.modules.${module}`] = document.getElementById(`cfg_system_logging_module_${module}`).checked;
+ });
+ }
+ if (section === 'runtime') {
+ payload['debug.runtime.enabled'] = document.getElementById('cfg_runtime_enabled').checked;
+ payload['debug.runtime.poll_interval_ms'] = parseInt(document.getElementById('cfg_runtime_poll_interval').value, 10);
+ }
+ if (section === 'json') {
+ payload['json_inspector.max_size_mb'] = parseInt(document.getElementById('cfg_json_max_size').value, 10);
+ payload['json_inspector.max_depth'] = parseInt(document.getElementById('cfg_json_max_depth').value, 10);
+ payload['json_inspector.allow_override'] = document.getElementById('cfg_json_allow_override').checked;
+ }
+ if (section === 'mqtt_logging') {
+ payload['mqtt_logging.enabled'] = document.getElementById('cfg_mqtt_logging_enabled').checked;
+ payload['mqtt_logging.smart_logging.enabled'] = document.getElementById('cfg_mqtt_smart_logging').checked;
+ payload['mqtt_logging.smart_logging.trigger_type'] = document.getElementById('cfg_mqtt_trigger_type').value;
+ const triggerType = document.getElementById('cfg_mqtt_trigger_type').value;
+ if (triggerType === 'command') {
+ payload['mqtt_logging.smart_logging.trigger_value'] = document.getElementById('cfg_mqtt_trigger_command').value;
+ } else {
+ payload['mqtt_logging.smart_logging.trigger_value'] = parseInt(document.getElementById('cfg_mqtt_trigger_temp').value, 10);
+ }
+ payload['mqtt_logging.smart_logging.max_duration_hours'] = parseInt(document.getElementById('cfg_mqtt_max_duration').value, 10);
+ payload['mqtt_logging.smart_logging.buffer_minutes'] = parseInt(document.getElementById('cfg_mqtt_buffer_minutes').value, 10);
+ payload['mqtt_logging.limits.max_size_mb'] = parseInt(document.getElementById('cfg_mqtt_max_size').value, 10);
+ payload['mqtt_logging.limits.max_payload_chars'] = parseInt(document.getElementById('cfg_mqtt_max_payload').value, 10);
+ payload['mqtt_logging.limits.full_payload_enabled'] = document.getElementById('cfg_mqtt_full_payload').checked;
+ }
+ try {
+ const res = await fetch('/api/config', {
+ method: 'PUT',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify(payload),
+ });
+ if (!res.ok) return;
+ const data = await res.json();
+ populateConfigFields(data);
+ } catch (err) {
+ // ignore save errors for now
+ }
+}
+
+
+async function loadConfigData() {
+ if (!document.body.classList.contains('pro-mode')) return;
+ if (configLoaded) return;
+ try {
+ const res = await fetch('/api/config/current');
+ if (!res.ok) return;
+ const data = await res.json();
+ populateConfigFields(data);
+ configLoaded = true;
+ } catch (err) {
+ // ignore load errors
+ }
+}
+
+function setBadgeState(el, state) {
+ if (!el) return;
+ const level = state || 'idle';
+ el.classList.remove('status-ok', 'status-warn', 'status-error', 'status-idle');
+ if (level === 'ok') el.classList.add('status-ok');
+ else if (level === 'warn') el.classList.add('status-warn');
+ else if (level === 'error') el.classList.add('status-error');
+ else el.classList.add('status-idle');
+ if (level === 'ok') el.textContent = 'OK';
+ else if (level === 'warn') el.textContent = 'Warn';
+ else if (level === 'error') el.textContent = 'Error';
+ else el.textContent = 'Idle';
+}
+
+function setStatus(id, state) {
+ // MQTT-Status wird ausschließlich von mqtt-connect-handler.js verwaltet
+ // Schütze ALLE MQTT-Status-Elemente vor Überschreibung
+ const mqttElements = ['mqttStatus', 'mqttStatusBadge', 'mqttConnBadge', 'proMqttStatus'];
+ if (mqttElements.includes(id)) return;
+
+ const el = $(id);
+ if (!el) return;
+ const val = state || 'offline';
+ el.textContent = val;
+ el.classList.remove('status-ok', 'status-warn', 'status-error', 'status-idle', 'status-info');
+ if (val === 'online' || val === 'connected' || val === 'listening') {
+ el.classList.add('status-ok');
+ } else if (val === 'disabled') {
+ el.classList.add('status-idle');
+ } else {
+ el.classList.add('status-error');
+ }
+}
+
+function initTabs() {
+ document.querySelectorAll('.debug-tab').forEach(tab => {
+ tab.addEventListener('click', () => {
+ const target = tab.dataset.tab;
+ if (window.DEBUG_MODE === 'lite' && tab.dataset.mode === 'pro') {
+ return;
+ }
+ setActiveTab(target);
+ });
+ });
+ const initial = document.querySelector('.debug-tab.active');
+ activeTab = initial?.dataset?.tab || 'system';
+ setActiveTab(activeTab);
+}
+
+function initConfigActions() {
+ const bind = (id, section, handler) => {
+ const el = document.getElementById(id);
+ if (el) el.addEventListener('click', handler.bind(null, section));
+ };
+ const watch = (id) => {
+ const el = document.getElementById(id);
+ if (el) el.addEventListener('change', applyConfigEnableStates);
+ };
+ bind('cfg_health_cancel', 'health', () => resetConfigCard('health'));
+ bind('cfg_health_save', 'health', saveConfigSection);
+ bind('cfg_runtime_cancel', 'runtime', () => resetConfigCard('runtime'));
+ bind('cfg_runtime_save', 'runtime', saveConfigSection);
+ bind('cfg_json_cancel', 'json', () => resetConfigCard('json'));
+ bind('cfg_json_save', 'json', saveConfigSection);
+ bind('cfg_mqtt_cancel', 'mqtt_logging', () => resetConfigCard('mqtt_logging'));
+ bind('cfg_mqtt_save', 'mqtt_logging', saveConfigSection);
+ bind('cfg_system_logging_cancel', 'system_logging', () => resetConfigCard('system_logging'));
+ bind('cfg_system_logging_save', 'system_logging', saveConfigSection);
+ watch('cfg_health_enabled');
+ watch('cfg_runtime_enabled');
+
+ // MQTT Smart Logging UI watchers
+ const smartLoggingCheckbox = document.getElementById('cfg_mqtt_smart_logging');
+ if (smartLoggingCheckbox) {
+ smartLoggingCheckbox.addEventListener('change', updateMqttTriggerUI);
+ }
+ const triggerTypeSelect = document.getElementById('cfg_mqtt_trigger_type');
+ if (triggerTypeSelect) {
+ triggerTypeSelect.addEventListener('change', updateMqttTriggerUI);
+ }
+}
+
+function updateMqttTriggerUI() {
+ const smartEnabled = document.getElementById('cfg_mqtt_smart_logging')?.checked ?? false;
+ const triggerType = document.getElementById('cfg_mqtt_trigger_type')?.value ?? 'command';
+
+ const optionsBox = document.getElementById('cfg_mqtt_smart_logging_options');
+ const commandBox = document.getElementById('cfg_mqtt_trigger_command_box');
+ const tempBox = document.getElementById('cfg_mqtt_trigger_temp_box');
+
+ if (optionsBox) {
+ optionsBox.style.display = smartEnabled ? 'block' : 'none';
+ }
+
+ if (commandBox && tempBox) {
+ if (smartEnabled) {
+ commandBox.style.display = triggerType === 'command' ? 'block' : 'none';
+ tempBox.style.display = triggerType === 'temperature' ? 'block' : 'none';
+ } else {
+ commandBox.style.display = 'none';
+ tempBox.style.display = 'none';
+ }
+ }
+}
+
+async function loadSystemStatus() {
+ try {
+ const res = await fetch('/api/system/status');
+ if (!res.ok) return;
+ const data = await res.json();
+ setText('sys_app_name', data?.app?.name || 'FilamentHub');
+ setText('sys_app_version', data?.app?.version || '0.0.0');
+ setText('sys_app_env', data?.app?.environment || 'development');
+ setText('sys_app_uptime', data?.app?.uptime || '0');
+
+ setText('sys_cpu_usage', data?.system?.cpu_percent !== undefined ? data.system.cpu_percent + ' %' : '-');
+ setText('sys_cpu_cores', data?.system?.cpu_count !== undefined ? data.system.cpu_count + ' Cores' : 'n/a');
+ setText('sys_ram_usage', data?.system?.ram_percent !== undefined ? data.system.ram_percent + ' %' : '-');
+ if (data?.system?.ram_used_gb !== undefined && data?.system?.ram_total_gb !== undefined) {
+ setText('sys_ram_detail', `${data.system.ram_used_gb} GB / ${data.system.ram_total_gb} GB`);
+ } else {
+ setText('sys_ram_detail', '-');
+ }
+ setText('sys_disk_usage', data?.system?.disk_percent !== undefined ? data.system.disk_percent + ' %' : '-');
+ if (data?.system?.disk_used_gb !== undefined && data?.system?.disk_total_gb !== undefined) {
+ setText('sys_disk_detail', `${data.system.disk_used_gb} GB / ${data.system.disk_total_gb} GB`);
+ } else {
+ setText('sys_disk_detail', '-');
+ }
+ } catch (err) {
+ // ignore
+ }
+}
+
+async function loadBackendStatus() {
+ try {
+ const res = await fetch('/api/debug/system_status');
+ if (!res.ok) return;
+ const data = await res.json();
+ const rt = data?.runtime || {};
+ const stateRaw = (rt.state || 'idle').toString().toLowerCase();
+ const state = stateRaw === 'active' ? 'active' : 'idle';
+ setStatus('apiStatus', data?.api?.state || 'offline');
+ setStatus('dbStatus', data?.db?.state || 'offline');
+ // MQTT-Status wird NICHT mehr hier gesetzt - mqtt-connect-handler.js ist zuständig
+ setStatus('wsStatus', data?.websocket?.state || 'offline');
+ const clients = data?.websocket?.clients || 0;
+ setText('wsClients', clients ? `(${clients} clients)` : '');
+ renderSystemHealth({
+ api: data?.api?.state,
+ db: data?.db?.state,
+ mqtt: data?.mqtt?.state,
+ ws: data?.websocket?.state,
+ wsClients: clients,
+ runtimeState: state,
+ runtimeAvgMs: rt.avg_response_ms,
+ systemHealth: data?.system_health,
+ });
+
+ const badges = document.querySelectorAll('#sys_runtime_state');
+ const isActive = state === 'active';
+ if (badges.length > 0) {
+ badges.forEach(badge => {
+ badge.textContent = isActive ? 'ACTIVE' : 'IDLE';
+ badge.classList.remove('status-ok', 'status-warn', 'status-error', 'status-idle');
+ badge.classList.add(isActive ? 'status-ok' : 'status-idle');
+ });
+ } else {
+ console.warn('[runtime] sys_runtime_state not found in DOM');
+ }
+ setText(
+ 'sys_runtime_rpm',
+ (isActive && typeof rt.requests_per_minute === 'number')
+ ? Math.round(rt.requests_per_minute)
+ : '-'
+ );
+ setText(
+ 'sys_runtime_avg',
+ (isActive && typeof rt.avg_response_ms === 'number')
+ ? `${rt.avg_response_ms.toFixed(2)} ms`
+ : '-'
+ );
+ } catch (err) {
+ // ignore
+ }
+}
+
+async function fetchPerformanceData() {
+ try {
+ const res = await fetch('/api/debug/performance');
+ if (!res.ok) return null;
+ return await res.json();
+ } catch (err) {
+ return null;
+ }
+}
+
+function classifyPercent(n) {
+ if (!Number.isFinite(n)) return 'idle';
+ if (n >= 90) return 'error';
+ if (n >= 75) return 'warn';
+ return 'ok';
+}
+
+function showPerfError() {
+ const loading = $('perfLoading');
+ const err = $('perfError');
+ if (loading) loading.style.display = 'none';
+ if (err) err.style.display = '';
+ setText('perfCpuValue', '-');
+ setText('perfCpuSub', '-');
+ setText('perfRamValue', '-');
+ setText('perfRamSub', '-');
+ setText('perfDiskValue', '-');
+ setText('perfDiskSub', '-');
+ setText('perfUptimeValue', '-');
+ setText('perfUptimeSub', '-');
+ setBadgeState($('#perfCpuBadge'), 'idle');
+ setBadgeState($('#perfRamBadge'), 'idle');
+ setBadgeState($('#perfDiskBadge'), 'idle');
+ setBadgeState($('#perfUptimeBadge'), 'idle');
+}
+
+function setTestBadge(badge, status, text) {
+ if (!badge) return;
+ badge.textContent = text || '--';
+ badge.classList.remove('status-ok', 'status-warn', 'status-error', 'status-idle', 'badge-ok', 'badge-error', 'badge-warn', 'badge-idle', 'badge-testing', 'badge-failed');
+ if (status === 'ok') {
+ badge.classList.add('badge', 'badge-ok');
+ } else if (status === 'testing') {
+ badge.classList.add('badge', 'badge-testing');
+ } else if (status === 'fail' || status === 'failed') {
+ badge.classList.add('badge', 'badge-failed');
+ } else if (status === 'warn') {
+ badge.classList.add('badge', 'badge-warn');
+ } else {
+ badge.classList.add('badge', 'badge-idle');
+ }
+}
+
+function setScannerStatus(cardEl, state, text, detailText) {
+ if (!cardEl) return;
+ const badge = cardEl.querySelector('[data-role="statusBadge"]');
+ const result = cardEl.querySelector('[data-role="portResult"]');
+ if (badge) {
+ badge.classList.remove('badge-idle', 'badge-testing', 'badge-ok', 'badge-failed');
+ if (state === 'testing') badge.classList.add('badge-testing');
+ else if (state === 'ok') badge.classList.add('badge-ok');
+ else if (state === 'failed') badge.classList.add('badge-failed');
+ else badge.classList.add('badge-idle');
+ badge.textContent = text || '--';
+ }
+ if (result) result.textContent = detailText || '--';
+}
+
+async function loadPerformanceLite() {
+ const loading = $('perfLoading');
+ const err = $('perfError');
+ if (err) err.style.display = 'none';
+ if (loading) loading.style.display = '';
+ try {
+ const data = await fetchPerformanceData();
+ if (!data) {
+ showPerfError();
+ console.warn('Performance data not available');
+ return;
+ }
+ const cpu = Number(data.cpu_percent);
+ const ramUsedMb = Number(data.ram_used_mb);
+ const ramTotalMb = Number(data.ram_total_mb);
+ const diskUsedGb = Number(data.disk_used_gb);
+ const diskTotalGb = Number(data.disk_total_gb);
+ const uptimeSeconds = Number(data.backend_uptime_s);
+ if (loading) loading.style.display = 'none';
+ if (err) err.style.display = 'none';
+
+ setText('perfCpuValue', fmtPercent(cpu));
+ setText('perfCpuSub', '-');
+ setBadgeState($('#perfCpuBadge'), classifyPercent(cpu));
+
+ const ramPercent = Number.isFinite(ramUsedMb) && Number.isFinite(ramTotalMb) && ramTotalMb > 0
+ ? Math.round((ramUsedMb / ramTotalMb) * 100)
+ : null;
+ setText('perfRamValue', ramPercent !== null ? fmtPercent(ramPercent) : '-');
+ const ramDetail = Number.isFinite(ramUsedMb) && Number.isFinite(ramTotalMb)
+ ? `${ramUsedMb} MB / ${ramTotalMb} MB`
+ : '-';
+ setText('perfRamSub', ramDetail);
+ setBadgeState($('#perfRamBadge'), classifyPercent(ramPercent));
+
+ const diskPercent = Number.isFinite(diskUsedGb) && Number.isFinite(diskTotalGb) && diskTotalGb > 0
+ ? Math.round((diskUsedGb / diskTotalGb) * 100)
+ : null;
+ setText('perfDiskValue', diskPercent !== null ? fmtPercent(diskPercent) : '-');
+ const diskDetail = Number.isFinite(diskUsedGb) && Number.isFinite(diskTotalGb)
+ ? `${diskUsedGb} GB / ${diskTotalGb} GB`
+ : '-';
+ setText('perfDiskSub', diskDetail);
+ setBadgeState($('#perfDiskBadge'), classifyPercent(diskPercent));
+
+ const uptimeText = fmtUptime(uptimeSeconds);
+ setText('perfUptimeValue', uptimeText);
+ setText('perfUptimeSub', uptimeText === '-' ? '-' : 'backend uptime');
+ setBadgeState($('#perfUptimeBadge'), uptimeText === '-' ? 'idle' : 'ok');
+ } catch (err) {
+ showPerfError();
+ console.warn('Performance data not available', err);
+ }
+}
+
+function renderScannerEmpty(message) {
+ const list = $('scannerResults');
+ if (!list) return;
+ list.innerHTML = '';
+ const empty = document.createElement('div');
+ empty.className = 'badge badge-idle';
+ empty.textContent = message || 'No printers detected';
+ list.appendChild(empty);
+}
+
+function renderScannerResults(printers) {
+ const list = $('scannerResults');
+ if (!list) return;
+ list.innerHTML = '';
+ // Reset Probe-Ziel bis ein erfolgreicher Port-Test gelaufen ist
+ probeTarget = null;
+ updateProbeButtonState();
+ if (!Array.isArray(printers) || printers.length === 0) {
+ renderScannerEmpty('No printers detected');
+ return;
+ }
+ printers.forEach(pr => {
+ const card = document.createElement('div');
+ card.className = 'scanner-card';
+ const iconChar = pr.type === 'bambu' ? '[B]' : pr.type === 'klipper' ? '[K]' : '[P]';
+ const typeText = pr.type || 'generic';
+ const baseType = normalizePrinterType(typeText);
+ const portVal = pr.port || 6000;
+ const status = pr.status || (pr.accessible ? 'reachable' : 'idle');
+ const statusLabel = status === 'reachable' ? 'ONLINE' : status === 'offline' ? 'OFFLINE' : 'IDLE';
+ card.innerHTML = `
+
+
+
${pr.ip || '--'}
+
Typ: ${typeText} | Port ${portVal}
+
+
+
${statusLabel}
+
+ Test
+ Zum System
+
+
--
+
+ `;
+ const testBtn = card.querySelector('[data-action="testPort"]');
+ const addBtn = card.querySelector('[data-action="addPrinter"]');
+ if (testBtn) {
+ testBtn.addEventListener('click', () => handlePortTest(testBtn, card, pr.ip || '--', portVal));
+ }
+ if (addBtn) {
+ addBtn.addEventListener('click', () => handleAddPrinter(addBtn, card));
+ }
+ list.appendChild(card);
+ });
+}
+
+async function loadNetworkInfo() {
+ try {
+ const res = await fetch('/api/debug/network');
+ if (!res.ok) {
+ renderScannerEmpty('No printers detected');
+ return;
+ }
+ const data = await res.json();
+ setText('netHostname', data?.hostname || '-');
+ setText('netLocalIp', data?.local_ip || '-');
+ setText('netSuggestedRange', data?.suggested_range || '-');
+ scannerSuggestedRange = data?.suggested_range || null;
+ } catch (err) {
+ renderScannerEmpty('No printers detected');
+ }
+}
+
+async function handleQuickScanClick() {
+ const btn = $('scannerQuickScan');
+ if (btn) {
+ btn.disabled = true;
+ btn.textContent = 'Scanning...';
+ btn.classList.add('btn-loading');
+ }
+ if (!scannerSuggestedRange) {
+ renderScannerEmpty('Network range not available');
+ if (btn) {
+ btn.disabled = false;
+ btn.textContent = 'Quick Scan (LAN)';
+ btn.classList.remove('btn-loading');
+ }
+ return;
+ }
+ try {
+ renderScannerEmpty('Scanning...');
+ const res = await fetch('/api/scanner/scan/quick');
+ if (!res.ok) {
+ renderScannerEmpty('No printers detected');
+ } else {
+ const data = await res.json();
+ const printers = data?.printers || [];
+ renderScannerResults(printers);
+ }
+ } catch (err) {
+ renderScannerEmpty('No printers detected');
+ } finally {
+ if (btn) {
+ btn.disabled = false;
+ btn.textContent = 'Quick Scan (LAN)';
+ btn.classList.remove('btn-loading');
+ }
+ }
+}
+
+function updateProbeUI(data) {
+ const statusEl = document.getElementById('proProbeStatus');
+ const latencyEl = document.getElementById('proProbeLatency');
+ const typeEl = document.getElementById('proProbeType');
+ const msgEl = document.getElementById('proProbeMessage');
+ const errEl = document.getElementById('proProbeError');
+ const httpEl = document.getElementById('proProbeHttp');
+ const badgeEl = document.getElementById('proProbeBadge');
+ const statusText = (data?.status || 'Unbekannt').toString();
+ if (statusEl) statusEl.textContent = 'Status: ' + statusText;
+ const latencyText = Number.isFinite(data?.latency_ms) ? 'Antwortzeit: ' + data.latency_ms + ' ms' : 'Antwortzeit: -';
+ if (latencyEl) latencyEl.textContent = latencyText;
+ if (typeEl) typeEl.textContent = 'Erkannt: ' + (data?.detected_type || '-');
+ const errorClass = (data?.error_class || '').toString().trim().toLowerCase();
+ const normalizedStatus = (data?.status || '').toString().toUpperCase();
+ const httpVal = data?.http_status;
+ let httpLabel = '-';
+ let httpCode = null;
+ if (httpVal !== null && httpVal !== undefined && httpVal !== '') {
+ const code = Number(httpVal);
+ if (Number.isFinite(code)) {
+ httpCode = code;
+ const desc =
+ code === 200 ? 'Anfrage erfolgreich, Daten werden geliefert' :
+ code === 401 ? 'Authentifizierung fehlt oder falsch' :
+ code === 404 ? 'Endpunkt existiert nicht' :
+ code === 500 ? 'Interner Fehler am Drucker' : '';
+ httpLabel = desc ? `${code} (${desc})` : String(code);
+ } else {
+ httpLabel = String(httpVal);
+ }
+ }
+ // Fehlerklasse-Logik:
+ // 1) Wenn HTTP-Code vorhanden:
+ // - 200 -> OK
+ // - alles andere -> WARNUNG
+ // 2) Wenn kein HTTP-Code:
+ // - Status != OK -> ERROR
+ // - sonst -> OK
+ let errorLabel = 'OK';
+ if (httpCode !== null) {
+ errorLabel = httpCode === 200 ? 'OK' : 'WARNUNG';
+ } else if (normalizedStatus !== 'OK') {
+ errorLabel = 'ERROR';
+ }
+ if (errEl) errEl.textContent = 'Fehlerklasse: ' + errorLabel;
+ if (httpLabel === '-' && normalizedStatus === 'OK') {
+ httpLabel = '200 (Anfrage erfolgreich, Daten werden geliefert)';
+ }
+ if (httpEl) httpEl.textContent = 'HTTP-Status: ' + httpLabel;
+ const hint = data?.message || (Array.isArray(data?.details) && data.details.length ? data.details[0] : '-');
+ if (msgEl) msgEl.textContent = 'Hinweis: ' + hint;
+ if (badgeEl) {
+ badgeEl.classList.remove('status-ok', 'status-warn', 'status-error', 'status-idle');
+ let level = 'status-idle';
+ const normalized = (data?.status || '').toString().toUpperCase();
+ if (normalized === 'OK') level = 'status-ok';
+ else if (normalized === 'WARNUNG' || normalized === 'WARNING') level = 'status-warn';
+ else if (normalized === 'FEHLER' || normalized === 'ERROR') level = 'status-error';
+ badgeEl.classList.add(level);
+ badgeEl.textContent = normalized || 'IDLE';
+ }
+}
+
+function updateProbeButtonState() {
+ const probeBtn = document.getElementById('proProbeStart');
+ const fpBtn = document.getElementById('proFingerprintStart');
+ const probeBadge = document.getElementById('proProbeBadge');
+ const fpBadge = document.getElementById('proFingerprintBadge');
+ const enabled = probeTarget && probeTarget.ip;
+
+ if (probeBtn) {
+ probeBtn.disabled = !enabled;
+ probeBtn.title = enabled ? '' : 'Probe erfordert erfolgreichen Port-Test';
+
+ // Update button styling
+ if (enabled) {
+ probeBtn.classList.remove('pro-btn-disabled');
+ probeBtn.classList.add('btn-secondary');
+ } else {
+ probeBtn.classList.add('pro-btn-disabled');
+ probeBtn.classList.remove('btn-secondary');
+ }
+ }
+
+ if (fpBtn) {
+ fpBtn.disabled = !enabled;
+ fpBtn.title = enabled ? '' : 'Fingerprint erfordert erfolgreichen Port-Test';
+
+ // Update button styling
+ if (enabled) {
+ fpBtn.classList.remove('pro-btn-disabled');
+ fpBtn.classList.add('btn-secondary');
+ } else {
+ fpBtn.classList.add('pro-btn-disabled');
+ fpBtn.classList.remove('btn-secondary');
+ }
+ }
+
+ // Update badge states
+ if (probeBadge) {
+ probeBadge.textContent = enabled ? 'BEREIT' : 'IDLE';
+ probeBadge.className = enabled ? 'status-badge status-ok' : 'status-badge status-idle';
+ }
+
+ if (fpBadge) {
+ fpBadge.textContent = enabled ? 'BEREIT' : 'IDLE';
+ fpBadge.className = enabled ? 'status-badge status-ok' : 'status-badge status-idle';
+ }
+
+ // Log for debugging
+ if (enabled) {
+ console.log('[Scanner] Deep Probe & Fingerprint aktiviert für:', probeTarget);
+ }
+}
+
+function findFirstScannerTarget() {
+ const card = document.querySelector('.scanner-card');
+ if (!card) return null;
+ const testBtn = card.querySelector('[data-action="testPort"]');
+ if (testBtn) {
+ const ip = testBtn.dataset.ip;
+ const port = Number(testBtn.dataset.port || 0) || null;
+ if (ip) {
+ return { ip, port: port || 6000 };
+ }
+ }
+ return null;
+}
+
+async function handleProbe(btn) {
+ if (!btn) return;
+ let target = probeTarget;
+ if (!target) {
+ target = findFirstScannerTarget();
+ if (target) {
+ probeTarget = target;
+ updateProbeButtonState();
+ }
+ }
+ if (!target || !target.ip) {
+ updateProbeUI({ status: 'FEHLER', message: 'Kein Ziel fuer Probe gesetzt', detected_type: '-', latency_ms: null });
+ return;
+ }
+ const original = btn.textContent;
+ btn.disabled = true;
+ btn.textContent = 'Probe laeuft...';
+ updateProbeUI({ status: 'LAEUFT', latency_ms: null, detected_type: '-', message: 'Probe laeuft...' });
+ try {
+ const res = await fetch('/api/debug/printer/probe', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ host: target.ip, port: Number(target.port) || 6000 }),
+ });
+ if (!res.ok) {
+ updateProbeUI({ status: 'FEHLER', latency_ms: null, detected_type: '-', message: 'Probe fehlgeschlagen' });
+ return;
+ }
+ const data = await res.json();
+ console.debug('[probe] response', data);
+ updateProbeUI(data);
+ } catch (err) {
+ updateProbeUI({ status: 'FEHLER', latency_ms: null, detected_type: '-', message: 'Probe fehlgeschlagen' });
+ } finally {
+ btn.disabled = false;
+ btn.textContent = original;
+ }
+}
+
+function updateFingerprintUI(data) {
+ const statusEl = document.getElementById('proFingerprintStatus');
+ const typeEl = document.getElementById('proFingerprintType');
+ const confEl = document.getElementById('proFingerprintConfidence');
+ const portsEl = document.getElementById('proFingerprintPorts');
+ const statusText = data?.status || 'Unbekannt';
+ if (statusEl) statusEl.textContent = 'Status: ' + statusText;
+ if (typeEl) typeEl.textContent = 'Erkannt: ' + (data?.detected_type || '-');
+ if (confEl) confEl.textContent = 'Vertrauensgrad: ' + (data?.confidence != null ? data.confidence + '%' : '-');
+ if (portsEl) {
+ portsEl.innerHTML = '';
+ const ports = data?.ports && typeof data.ports === 'object' ? data.ports : {};
+ const keys = Object.keys(ports);
+ if (!keys.length) {
+ const li = document.createElement('li');
+ li.textContent = '-';
+ portsEl.appendChild(li);
+ } else {
+ keys.forEach(k => {
+ const info = ports[k] || {};
+ const reach = info.reachable === true ? 'reachable' : info.reachable === false ? 'not reachable' : '-';
+ const err = info.error_class || '-';
+ const msg = info.message || '';
+ const lat = Number.isFinite(info.latency_ms) ? ` (${info.latency_ms} ms)` : '';
+ const li = document.createElement('li');
+ li.textContent = `Port ${k}: ${reach}${lat}${msg ? ' - ' + msg : ''} [${err}]`;
+ portsEl.appendChild(li);
+ });
+ }
+ }
+ // badge styling
+ try {
+ const badgeEl = document.querySelector('.scanner-pro-head .status-badge');
+ if (badgeEl) {
+ badgeEl.classList.remove('status-ok', 'status-warn', 'status-error', 'status-idle');
+ const normalized = statusText.toUpperCase();
+ if (normalized === 'OK') badgeEl.classList.add('status-ok');
+ else if (normalized === 'FEHLER' || normalized === 'ERROR') badgeEl.classList.add('status-error');
+ else if (normalized === 'NICHT_VERFUEGBAR' || normalized === 'UNBEKANNT') badgeEl.classList.add('status-idle');
+ else badgeEl.classList.add('status-warn');
+ }
+ } catch (e) {
+ // ignore
+ }
+}
+
+async function handleFingerprint(btn) {
+ if (!btn) return;
+ let target = probeTarget || findFirstScannerTarget();
+ if (target) {
+ probeTarget = target;
+ updateProbeButtonState();
+ }
+ if (!target || !target.ip) {
+ updateFingerprintUI({ status: 'FEHLER', detected_type: '-', confidence: null, ports: {}, message: 'Kein Ziel fuer Fingerprint gesetzt' });
+ return;
+ }
+ const original = btn.textContent;
+ btn.disabled = true;
+ btn.textContent = 'Fingerprint laeuft...';
+ updateFingerprintUI({ status: 'LAEUFT', detected_type: '-', confidence: null, ports: {}, message: 'Fingerprint laeuft...' });
+ try {
+ console.debug('[fingerprint] target', target);
+ const res = await fetch('/api/debug/printer/fingerprint', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ // host reicht; ohne port prueft der Endpoint automatisch 8883/6000/7125
+ body: JSON.stringify({ host: target.ip }),
+ });
+ if (!res.ok) {
+ updateFingerprintUI({ status: 'FEHLER', detected_type: '-', confidence: null, ports: {}, message: 'Fingerprint fehlgeschlagen' });
+ return;
+ }
+ const data = await res.json();
+ // Determine whether a usable fingerprint was found
+ const hasFingerprint =
+ data &&
+ (data.detected_type ||
+ data.confidence != null ||
+ (data.ports && Object.keys(data.ports).some(
+ k => data.ports[k]?.reachable === true
+ )));
+
+ if (hasFingerprint) {
+ updateFingerprintUI({ ...data, status: 'OK' });
+ } else {
+ updateFingerprintUI({
+ status: 'NICHT_VERFUEGBAR',
+ detected_type: '-',
+ confidence: null,
+ ports: data?.ports || {},
+ message: 'Fingerprint technisch nicht moeglich oder keine verwertbaren Daten'
+ });
+ }
+ } catch (err) {
+ updateFingerprintUI({ status: 'FEHLER', detected_type: '-', confidence: null, ports: {}, message: 'Fingerprint fehlgeschlagen' });
+ } finally {
+ btn.disabled = false;
+ btn.textContent = original;
+ }
+}
+
+async function handlePortTest(btn, card, ip, port) {
+ if (!btn || !card || !ip) return;
+ const key = `${ip}-${port}`;
+ if (runningPortTests.has(key)) return;
+ runningPortTests.add(key);
+ btn.disabled = true;
+ btn.textContent = 'Teste...';
+ setScannerStatus(card, 'testing', 'TESTING', `Port ${port}: testing...`);
+ const addBtn = card.querySelector('[data-action="addPrinter"]');
+ try {
+ const res = await fetch('/api/debug/printer/test', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ ip,
+ port: Number(port) || 6000,
+ timeout_ms: 1500,
+ }),
+ });
+ if (!res.ok) {
+ setScannerStatus(card, 'failed', 'FAIL', `Port ${port}: error`);
+ if (addBtn) {
+ addBtn.disabled = true;
+ addBtn.classList.add('btn-add-disabled', 'disabled');
+ addBtn.classList.remove('btn-add-active');
+ addBtn.title = 'Port-Test erforderlich';
+ }
+ return;
+ }
+ const data = await res.json();
+ const reachable = data?.reachable === true;
+ const latency = Number(data?.latency_ms);
+ if (reachable) {
+ const latText = Number.isFinite(latency) ? ` (${latency} ms)` : '';
+ setScannerStatus(card, 'ok', 'OK', `Port ${port}: OK${latText}`);
+ probeTarget = { ip, port: Number(port) || 6000 };
+ updateProbeButtonState();
+ if (addBtn) {
+ addBtn.disabled = false;
+ addBtn.classList.remove('btn-add-disabled', 'disabled');
+ addBtn.classList.add('btn-add-active');
+ addBtn.title = '';
+ }
+ } else {
+ const reason = data?.error || 'fail';
+ setScannerStatus(card, 'failed', 'FAIL', `Port ${port}: ${reason}`);
+ if (addBtn) {
+ addBtn.disabled = true;
+ addBtn.classList.add('btn-add-disabled', 'disabled');
+ addBtn.classList.remove('btn-add-active');
+ addBtn.title = 'Port-Test erforderlich';
+ }
+ }
+ } catch (err) {
+ setScannerStatus(card, 'failed', 'FAIL', `Port ${port}: error`);
+ if (addBtn) {
+ addBtn.disabled = true;
+ addBtn.classList.add('btn-add-disabled', 'disabled');
+ addBtn.classList.remove('btn-add-active');
+ addBtn.title = 'Port-Test erforderlich';
+ }
+ } finally {
+ btn.disabled = false;
+ btn.textContent = 'Test';
+ runningPortTests.delete(key);
+ }
+}
+
+async function handleAddPrinter(btn, card) {
+ if (!btn || !card) return;
+ const ip = btn.dataset.ip;
+ const port = Number(btn.dataset.port || 6000);
+ const baseType = normalizePrinterType(btn.dataset.type || 'generic');
+ if (!ip) return;
+
+ // Open dialog instead of directly saving
+ openScannerPrinterDialog(ip, port, baseType, card, btn);
+}
+
+function initScannerTab() {
+ if (scannerInitialized) return;
+ const btn = $('scannerQuickScan');
+ if (btn) {
+ btn.addEventListener('click', handleQuickScanClick);
+ }
+ const addManualBtn = $('scannerAddManual');
+ if (addManualBtn) {
+ addManualBtn.addEventListener('click', openManualPrinterDialog);
+ }
+ const closeDialogBtn = $('closeManualDialog');
+ if (closeDialogBtn) {
+ closeDialogBtn.addEventListener('click', closeManualPrinterDialog);
+ }
+ const testBtn = $('manualTestBtn');
+ if (testBtn) {
+ testBtn.addEventListener('click', handleManualTest);
+ }
+ const saveBtn = $('manualSaveBtn');
+ if (saveBtn) {
+ saveBtn.addEventListener('click', handleManualSave);
+ }
+ const typeSelect = $('manualType');
+ if (typeSelect) {
+ typeSelect.addEventListener('change', toggleBambuCredentials);
+ }
+
+ // Scanner Dialog Event Listeners
+ const closeScannerDialogBtn = $('closeScannerDialog');
+ if (closeScannerDialogBtn) {
+ closeScannerDialogBtn.addEventListener('click', closeScannerPrinterDialog);
+ }
+ const scannerCancelBtn = $('scannerCancelBtn');
+ if (scannerCancelBtn) {
+ scannerCancelBtn.addEventListener('click', closeScannerPrinterDialog);
+ }
+ const scannerConfirmBtn = $('scannerConfirmBtn');
+ if (scannerConfirmBtn) {
+ scannerConfirmBtn.addEventListener('click', handleScannerConfirm);
+ }
+ const scannerTypeSelect = $('scannerDialogType');
+ if (scannerTypeSelect) {
+ scannerTypeSelect.addEventListener('change', toggleScannerBambuCredentials);
+ }
+
+ renderScannerEmpty('No printers detected');
+ loadNetworkInfo();
+ scannerInitialized = true;
+}
+
+function toggleBambuCredentials() {
+ const typeSelect = $('manualType');
+ const bambuCreds = $('bambuCredentials');
+ const bambuAccess = $('bambuAccessCode');
+ const portInput = $('manualPort');
+
+ if (!typeSelect || !bambuCreds || !bambuAccess) return;
+
+ const isBambu = typeSelect.value === 'bambu';
+ bambuCreds.style.display = isBambu ? 'block' : 'none';
+ bambuAccess.style.display = isBambu ? 'block' : 'none';
+
+ // Set default port based on printer type
+ if (portInput && !portInput.value) {
+ if (isBambu) {
+ portInput.value = '8883';
+ portInput.placeholder = 'Standard: 8883 (MQTT TLS)';
+ } else if (typeSelect.value === 'klipper') {
+ portInput.value = '7125';
+ portInput.placeholder = 'Standard: 7125 (Moonraker)';
+ } else {
+ portInput.value = '';
+ portInput.placeholder = 'z.B. 80 oder 443';
+ }
+ }
+}
+
+function toggleScannerBambuCredentials() {
+ const typeSelect = $('scannerDialogType');
+ const bambuCreds = $('scannerBambuCredentials');
+ const bambuAccess = $('scannerAccessCode');
+
+ if (!typeSelect || !bambuCreds || !bambuAccess) return;
+
+ const isBambu = typeSelect.value === 'bambu';
+ bambuCreds.style.display = isBambu ? 'block' : 'none';
+ bambuAccess.style.display = isBambu ? 'block' : 'none';
+}
+
+function openManualPrinterDialog() {
+ const dialog = $('manualPrinterDialog');
+ if (!dialog) return;
+
+ // Reset form
+ const ipInput = $('manualIp');
+ const portInput = $('manualPort');
+ const typeSelect = $('manualType');
+ const serialInput = $('manualSerial');
+ const accessCodeInput = $('manualAccessCode');
+ const testResult = $('manualTestResult');
+ const saveBtn = $('manualSaveBtn');
+
+ if (ipInput) ipInput.value = '';
+ if (portInput) portInput.value = '';
+ if (typeSelect) typeSelect.value = 'bambu';
+ if (serialInput) serialInput.value = '';
+ if (accessCodeInput) accessCodeInput.value = '';
+ if (testResult) {
+ testResult.style.display = 'none';
+ testResult.innerHTML = '';
+ }
+ if (saveBtn) saveBtn.disabled = true;
+
+ // Show Bambu credentials and set default port
+ toggleBambuCredentials();
+
+ dialog.style.display = 'flex';
+}
+
+function closeManualPrinterDialog() {
+ const dialog = $('manualPrinterDialog');
+ if (!dialog) return;
+ dialog.style.display = 'none';
+}
+
+// Scanner Dialog State
+let scannerDialogData = {
+ ip: '',
+ port: 0,
+ type: 'bambu',
+ card: null,
+ btn: null
+};
+
+function openScannerPrinterDialog(ip, port, type, card, btn) {
+ const dialog = $('scannerPrinterDialog');
+ if (!dialog) return;
+
+ // Store data for later use
+ scannerDialogData = { ip, port, type, card, btn };
+
+ // Set values
+ const ipEl = $('scannerDialogIp');
+ const portEl = $('scannerDialogPort');
+ const typeSelect = $('scannerDialogType');
+ const serialInput = $('scannerSerial');
+ const apiKeyInput = $('scannerApiKey');
+ const saveResult = $('scannerSaveResult');
+
+ // Für Bambu-Drucker wird IMMER Port 8883 verwendet (MQTT TLS)
+ // Port 6000 ist nur für Tests, nicht für die tatsächliche Verbindung
+ const displayPort = type === 'bambu' ? '8883 (MQTT TLS)' : port;
+ const infoText = type === 'bambu' ? 'Port 6000 für Tests OK, 8883 wird für MQTT verwendet' : '';
+
+ if (ipEl) ipEl.textContent = ip;
+ if (portEl) {
+ portEl.textContent = displayPort;
+ if (infoText) {
+ portEl.title = infoText;
+ portEl.style.cursor = 'help';
+ }
+ }
+ if (typeSelect) typeSelect.value = type;
+ if (serialInput) serialInput.value = '';
+ if (apiKeyInput) apiKeyInput.value = '';
+ if (saveResult) {
+ saveResult.style.display = 'none';
+ saveResult.innerHTML = '';
+ }
+
+ // Show/hide Bambu credentials based on type
+ toggleScannerBambuCredentials();
+
+ dialog.style.display = 'flex';
+}
+
+function closeScannerPrinterDialog() {
+ const dialog = $('scannerPrinterDialog');
+ if (!dialog) return;
+ dialog.style.display = 'none';
+}
+
+async function handleScannerConfirm() {
+ const { ip, port, card, btn } = scannerDialogData;
+ const typeSelect = $('scannerDialogType');
+ const serialInput = $('scannerSerial');
+ const apiKeyInput = $('scannerApiKey');
+ const saveResult = $('scannerSaveResult');
+ const confirmBtn = $('scannerConfirmBtn');
+
+ if (!typeSelect || !confirmBtn) return;
+
+ const printerType = typeSelect.value;
+ const isBambu = printerType === 'bambu';
+
+ // Validate Bambu credentials
+ if (isBambu) {
+ const serial = serialInput?.value.trim() || '';
+ const apiKey = apiKeyInput?.value.trim() || '';
+
+ if (!serial || !apiKey) {
+ if (saveResult) {
+ saveResult.style.display = 'block';
+ saveResult.className = 'test-result test-result-error';
+ saveResult.textContent = 'Für Bambu Lab Drucker sind Seriennummer und Access Code erforderlich';
+ }
+ return;
+ }
+ }
+
+ // Disable button during save
+ confirmBtn.disabled = true;
+ confirmBtn.textContent = 'Speichere...';
+
+ try {
+ // WICHTIG: Bambu-Drucker benötigen Port 8883 für MQTT (TLS)
+ // Scanner testet Port 6000, aber für die Verbindung brauchen wir 8883
+ const finalPort = isBambu ? 8883 : port;
+
+ const payload = {
+ name: `${printerType}-${ip}`,
+ printer_type: printerType,
+ ip_address: ip,
+ port: finalPort,
+ model: "Lite",
+ mqtt_version: "311",
+ active: true
+ };
+
+ // Add Bambu credentials if needed
+ if (isBambu) {
+ payload.cloud_serial = serialInput?.value.trim() || '';
+ payload.api_key = apiKeyInput?.value.trim() || '';
+ }
+
+ const res = await fetch('/api/printers', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify(payload),
+ });
+
+ const data = await res.json().catch(() => ({}));
+
+ if (data?.status === 'exists') {
+ if (saveResult) {
+ saveResult.style.display = 'block';
+ saveResult.className = 'test-result test-result-warning';
+ saveResult.textContent = 'Drucker bereits im System vorhanden';
+ }
+
+ // Update UI
+ if (btn) {
+ btn.classList.add('btn-add-disabled', 'disabled');
+ btn.classList.remove('btn-add-active');
+ btn.disabled = true;
+ btn.textContent = 'Im System';
+ }
+ if (card) {
+ const saveInfo = card.querySelector('[data-role="saveInfo"]');
+ if (saveInfo) saveInfo.textContent = 'Bereits vorhanden';
+ setScannerStatus(card, 'ok', 'OK', 'info');
+ }
+
+ setTimeout(() => closeScannerPrinterDialog(), 1500);
+ return;
+ }
+
+ if (res.ok) {
+ if (saveResult) {
+ saveResult.style.display = 'block';
+ saveResult.className = 'test-result test-result-success';
+ saveResult.textContent = 'Drucker erfolgreich hinzugefügt!';
+ }
+
+ // Update UI
+ if (btn) {
+ btn.classList.remove('btn-add-disabled', 'disabled');
+ btn.classList.add('btn-add-active');
+ btn.disabled = false;
+ btn.textContent = 'Gespeichert';
+ btn.title = '';
+ }
+ if (card) {
+ const saveInfo = card.querySelector('[data-role="saveInfo"]');
+ if (saveInfo) saveInfo.textContent = 'Gespeichert';
+ setScannerStatus(card, 'ok', 'OK', `Port ${port}: OK`);
+ }
+
+ setTimeout(() => closeScannerPrinterDialog(), 1500);
+ } else {
+ if (saveResult) {
+ saveResult.style.display = 'block';
+ saveResult.className = 'test-result test-result-error';
+ saveResult.textContent = `Fehler beim Speichern: ${data?.detail || 'Unbekannter Fehler'}`;
+ }
+ confirmBtn.disabled = false;
+ confirmBtn.textContent = 'Zum System hinzufügen';
+ }
+ } catch (err) {
+ if (saveResult) {
+ saveResult.style.display = 'block';
+ saveResult.className = 'test-result test-result-error';
+ saveResult.textContent = `Fehler: ${err.message}`;
+ }
+ confirmBtn.disabled = false;
+ confirmBtn.textContent = 'Zum System hinzufügen';
+ }
+}
+
+async function handleManualTest() {
+ const ipInput = $('manualIp');
+ const portInput = $('manualPort');
+ const typeSelect = $('manualType');
+ const testBtn = $('manualTestBtn');
+ const saveBtn = $('manualSaveBtn');
+ const testResult = $('manualTestResult');
+
+ if (!ipInput || !portInput || !typeSelect || !testBtn || !saveBtn || !testResult) return;
+
+ const ip = ipInput.value.trim();
+ const port = parseInt(portInput.value.trim(), 10);
+ const printerType = typeSelect.value;
+
+ // Validate IP
+ const ipRegex = /^(\d{1,3}\.){3}\d{1,3}$/;
+ if (!ip || !ipRegex.test(ip)) {
+ testResult.style.display = 'block';
+ testResult.className = 'test-result test-result-error';
+ testResult.textContent = 'Ungültiges IP-Adressen-Format';
+ saveBtn.disabled = true;
+ return;
+ }
+
+ // Validate port
+ if (!port || port < 1 || port > 65535) {
+ testResult.style.display = 'block';
+ testResult.className = 'test-result test-result-error';
+ testResult.textContent = 'Port muss zwischen 1 und 65535 liegen';
+ saveBtn.disabled = true;
+ return;
+ }
+
+ // Disable button during test
+ testBtn.disabled = true;
+ testBtn.textContent = 'Teste...';
+ testResult.style.display = 'block';
+ testResult.className = 'test-result test-result-info';
+ testResult.textContent = 'Verbindung wird getestet...';
+ saveBtn.disabled = true;
+
+ try {
+ const res = await fetch('/api/debug/printer/test', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ ip, port, timeout_ms: 2000 })
+ });
+
+ const data = await res.json();
+
+ if (data.ok && data.reachable) {
+ testResult.className = 'test-result test-result-success';
+ testResult.textContent = `Verbindung erfolgreich! Latenz: ${data.latency_ms}ms`;
+ saveBtn.disabled = false;
+ } else {
+ testResult.className = 'test-result test-result-error';
+ testResult.textContent = data.message || 'Verbindung fehlgeschlagen - Drucker nicht erreichbar';
+ saveBtn.disabled = true;
+ }
+ } catch (err) {
+ testResult.className = 'test-result test-result-error';
+ testResult.textContent = 'Test fehlgeschlagen: ' + err.message;
+ saveBtn.disabled = true;
+ } finally {
+ testBtn.disabled = false;
+ testBtn.textContent = 'Verbindung testen';
+ }
+}
+
+async function handleManualSave() {
+ const ipInput = $('manualIp');
+ const portInput = $('manualPort');
+ const typeSelect = $('manualType');
+ const serialInput = $('manualSerial');
+ const accessCodeInput = $('manualAccessCode');
+ const saveBtn = $('manualSaveBtn');
+ const testResult = $('manualTestResult');
+
+ if (!ipInput || !portInput || !typeSelect || !saveBtn || !testResult) return;
+
+ const ip = ipInput.value.trim();
+ const port = parseInt(portInput.value.trim(), 10);
+ const printerType = typeSelect.value;
+ const serial = serialInput ? serialInput.value.trim() : '';
+ const accessCode = accessCodeInput ? accessCodeInput.value.trim() : '';
+
+ // Validate Bambu credentials if Bambu is selected
+ if (printerType === 'bambu') {
+ if (!serial || !accessCode) {
+ testResult.style.display = 'block';
+ testResult.className = 'test-result test-result-error';
+ testResult.textContent = 'Seriennummer und Access Code sind für Bambu-Drucker erforderlich';
+ saveBtn.disabled = false;
+ saveBtn.textContent = 'Drucker speichern';
+ return;
+ }
+ }
+
+ saveBtn.disabled = true;
+ saveBtn.textContent = 'Speichere...';
+
+ const payload = {
+ name: `${printerType}-${ip}`,
+ printer_type: printerType,
+ ip_address: ip,
+ port: port,
+ model: printerType === 'bambu' ? 'X1C' : printerType === 'klipper' ? 'Klipper' : 'Generic',
+ mqtt_version: '311',
+ active: true
+ };
+
+ // Add Bambu credentials if Bambu is selected
+ if (printerType === 'bambu') {
+ payload.cloud_serial = serial;
+ payload.api_key = accessCode;
+ }
+
+ try {
+ const res = await fetch('/api/printers', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify(payload)
+ });
+
+ const data = await res.json();
+
+ if (data.status === 'exists') {
+ testResult.className = 'test-result test-result-warning';
+ testResult.textContent = 'Drucker existiert bereits im System';
+ } else if (data.id) {
+ testResult.className = 'test-result test-result-success';
+ testResult.textContent = 'Drucker erfolgreich hinzugefügt!';
+ setTimeout(() => {
+ closeManualPrinterDialog();
+ }, 1500);
+ } else {
+ testResult.className = 'test-result test-result-error';
+ testResult.textContent = 'Fehler beim Hinzufügen des Druckers';
+ }
+ } catch (err) {
+ testResult.className = 'test-result test-result-error';
+ testResult.textContent = 'Speichern fehlgeschlagen: ' + err.message;
+ } finally {
+ saveBtn.disabled = false;
+ saveBtn.textContent = 'Drucker speichern';
+ }
+}
+
+function renderSystemHealth(statusData) {
+ const badges = document.querySelectorAll('#healthBadge');
+ const texts = document.querySelectorAll('#healthText');
+ const reasonsEl = document.getElementById('healthReasons');
+ const whyBadge = document.getElementById('whyBadgePro');
+ const whyList = document.getElementById('whyReasonsPro');
+ const setHealth = (id, ok, warn) => {
+ const el = document.getElementById(id);
+ if (!el) return;
+ el.textContent = ok ? 'OK' : 'Attention';
+ el.classList.remove('health-ok', 'health-warn', 'health-bad');
+ if (ok) el.classList.add('health-ok');
+ else if (warn) el.classList.add('health-warn');
+ else el.classList.add('health-bad');
+ };
+ if (!badges.length || !texts.length) {
+ console.warn('[health] healthBadge or healthText not found in DOM');
+ return;
+ }
+ const applyClass = (el, level) => {
+ el.classList.remove('status-ok', 'status-warn', 'status-error', 'status-idle');
+ if (level === 'ok') el.classList.add('status-ok');
+ else if (level === 'critical') el.classList.add('status-error');
+ else el.classList.add('status-warn');
+ };
+ // ensure text has no status classes
+ texts.forEach(t => t.classList.remove('status-ok', 'status-warn', 'status-error', 'status-idle'));
+ const api = (statusData?.api || '').toLowerCase();
+ const db = (statusData?.db || '').toLowerCase();
+ const ws = (statusData?.ws || '').toLowerCase();
+ // mqtt variable entfernt - wird nicht mehr benötigt, da mqtt-connect-handler.js zuständig ist
+
+ const sysHealth = statusData?.systemHealth || {};
+ let level = sysHealth.status || 'warning';
+ const textMap = {
+ ok: 'All core services operational',
+ warning: 'Warning due to service status or response time.',
+ critical: 'Critical system services',
+ };
+ let reasons = Array.isArray(sysHealth.reasons) ? sysHealth.reasons.filter(Boolean) : [];
+ if (!statusData?.systemHealth) {
+ const avgMs = Number(statusData?.runtimeAvgMs);
+ const wsClients = Number(statusData?.wsClients);
+ if (Number.isFinite(avgMs) && avgMs >= 600) {
+ reasons.push(`High average response time (${Math.round(avgMs)} ms)`);
+ }
+ // MQTT-Status-Checks entfernt - mqtt-connect-handler.js ist zuständig
+ if (ws === 'listening' && (!Number.isFinite(wsClients) || wsClients === 0)) {
+ reasons.push('WebSocket has no active clients');
+ }
+ if (db === 'disconnected' || db === 'offline') {
+ reasons.push('Database is not connected');
+ }
+ level = reasons.length ? 'warning' : 'ok';
+ }
+ if (level === 'warning' && !reasons.length) {
+ reasons = ['Some services require attention'];
+ }
+ if (level === 'ok' && !reasons.length) {
+ reasons = ['System is operating normally'];
+ }
+
+ badges.forEach(b => {
+ applyClass(b, level);
+ b.textContent = level === 'ok' ? 'OK' : level === 'critical' ? 'Critical' : 'Warning';
+ });
+ texts.forEach(t => {
+ t.textContent = textMap[level] || textMap.warning;
+ applyClass(t, level);
+ });
+
+ // Mirror to pro detail placeholders
+ setText('proApiStatus', statusData?.api || '-');
+ setText('proDbStatus', statusData?.db || '-');
+ setText('proWsStatus', statusData?.ws || '-');
+ // proMqttStatus wird NICHT mehr hier gesetzt - mqtt-connect-handler.js ist zuständig
+
+ if (whyBadge && whyList) {
+ whyBadge.classList.remove('status-ok', 'status-warn', 'status-error', 'status-idle');
+ if (level === 'ok') whyBadge.classList.add('status-ok');
+ else if (level === 'critical') whyBadge.classList.add('status-error');
+ else whyBadge.classList.add('status-warn');
+
+ whyList.innerHTML = '';
+ let whyReasons = Array.isArray(reasons) ? [...reasons] : [];
+ if (level === 'ok') {
+ if (!whyReasons.length || whyReasons[0] === 'System is operating normally') {
+ whyReasons = ['Keine Warnungen aktiv.'];
+ }
+ } else {
+ if (!whyReasons.length) {
+ whyReasons = ['Warnung aktiv, Ursache nicht ermittelt.'];
+ }
+ }
+ whyReasons.forEach(msg => {
+ const li = document.createElement('li');
+ li.textContent = msg;
+ whyList.appendChild(li);
+ });
+ }
+
+ if (reasonsEl) {
+ reasonsEl.innerHTML = '';
+ reasons.forEach(msg => {
+ const li = document.createElement('li');
+ li.textContent = msg;
+ reasonsEl.appendChild(li);
+ });
+ reasonsEl.style.display = document.body.classList.contains('pro-mode') ? '' : 'none';
+ }
+}
+
+function startPolling() {
+ loadSystemStatus();
+ loadBackendStatus();
+ if (systemInterval) clearInterval(systemInterval);
+ if (backendInterval) clearInterval(backendInterval);
+ systemInterval = setInterval(loadSystemStatus, POLL_MS);
+ backendInterval = setInterval(loadBackendStatus, POLL_MS);
+}
+
+function startPerformancePolling() {
+ stopPerformancePolling();
+ loadPerformanceLite();
+ performanceInterval = setInterval(loadPerformanceLite, POLL_MS);
+}
+
+function stopPerformancePolling() {
+ if (performanceInterval) {
+ clearInterval(performanceInterval);
+ performanceInterval = null;
+ }
+}
+
+document.addEventListener('DOMContentLoaded', () => {
+
+ // DEPRECATED: initDebugModeUI wurde durch bindDebugModeToggle im HTML ersetzt
+ // if (typeof initDebugModeUI === 'function') {
+ // initDebugModeUI();
+ // } else {
+ // console.warn('[debug.js] initDebugModeUI fehlt – übersprungen');
+ // }
+
+ if (typeof initTabs === 'function') {
+ initTabs();
+ } else {
+ console.warn('[debug.js] initTabs fehlt – übersprungen');
+ }
+
+ if (typeof startPolling === 'function') {
+ startPolling();
+ } else {
+ console.warn('[debug.js] startPolling fehlt – übersprungen');
+ }
+
+ const probeBtn = document.getElementById('proProbeStart');
+ if (probeBtn && typeof handleProbe === 'function') {
+ probeBtn.addEventListener('click', () => handleProbe(probeBtn));
+ if (typeof updateProbeButtonState === 'function') {
+ updateProbeButtonState();
+ }
+ }
+
+ const fpBtn = document.getElementById('proFingerprintStart');
+ if (fpBtn && typeof handleFingerprint === 'function') {
+ fpBtn.addEventListener('click', () => handleFingerprint(fpBtn));
+ if (typeof updateProbeButtonState === 'function') {
+ updateProbeButtonState();
+ }
+ }
+
+ if (typeof activeTab !== 'undefined') {
+ if (activeTab === 'performance' && typeof startPerformancePolling === 'function') {
+ startPerformancePolling();
+ }
+ if (activeTab === 'scanner' && typeof initScannerTab === 'function') {
+ initScannerTab();
+ }
+ }
+
+ if (typeof initJsonInspector === 'function') {
+ initJsonInspector();
+ }
+
+ // JSON start button
+ const jsonStartBtn = document.getElementById('json-start-btn');
+ if (jsonStartBtn) {
+ jsonStartBtn.addEventListener('click', () => {
+ if (liveStatePaused) {
+ toggleLiveStatePause();
+ } else if (window.showToast) {
+ window.showToast('Live-Ansicht bereits aktiv', 'info');
+ }
+ });
+ }
+
+ // JSON copy button
+ const jsonCopyBtn = document.getElementById('json-copy-btn');
+ if (jsonCopyBtn) {
+ jsonCopyBtn.addEventListener('click', () => copyJsonInspector());
+ }
+
+ // JSON pause button
+ const jsonPauseBtn = document.getElementById('json-pause-btn');
+ if (jsonPauseBtn) {
+ jsonPauseBtn.addEventListener('click', () => toggleLiveStatePause());
+ }
+
+ if (typeof initServicesButtons === 'function') {
+ initServicesButtons();
+ }
+});
+
+// ============================================
+// MQTT TOPICS MANAGEMENT FUNCTIONS
+// ============================================
+
+async function refreshMQTTTopics() {
+ console.log('refreshMQTTTopics called');
+ try {
+ const r = await fetch('/api/mqtt/runtime/topics', { method: 'GET' });
+ const data = await r.json();
+ console.log('Topics API response:', data);
+ if (r.ok && data && data.connected) {
+ _renderTopics(data.items || []);
+ return;
+ }
+ _renderTopics([]);
+ } catch (e) {
+ console.error('refreshMQTTTopics error:', e);
+ const hint = document.getElementById('mqttTopicsHint');
+ if (hint) hint.textContent = 'Keine Daten';
+ _renderTopics([]);
+ }
+}
+
+function _renderTopics(items) {
+ console.log('_renderTopics called with', items);
+ const list = document.getElementById('topicsList');
+ const empty = document.getElementById('topicsEmpty');
+ const hint = document.getElementById('mqttTopicsHint');
+ const countEl = document.getElementById('mqttSubscriptionsCount');
+ if (!list) return;
+
+ const safeItems = Array.isArray(items) ? items : [];
+ if (countEl) countEl.textContent = String(safeItems.length);
+
+ if (safeItems.length === 0) {
+ if (hint) hint.textContent = 'Keine Topics';
+ if (empty) empty.style.display = 'block';
+ list.style.display = 'none';
+ return;
+ }
+
+ if (empty) empty.style.display = 'none';
+ list.style.display = 'block';
+ list.innerHTML = '';
+
+ safeItems.forEach((topic) => {
+ const row = document.createElement('div');
+ row.className = 'kv';
+
+ const k = document.createElement('div');
+ k.className = 'k';
+ k.style.fontFamily = 'Consolas,monospace';
+ k.style.whiteSpace = 'nowrap';
+ k.style.overflow = 'hidden';
+ k.style.textOverflow = 'ellipsis';
+ k.title = String(topic || '');
+ k.textContent = String(topic || '');
+
+ const v = document.createElement('div');
+ v.className = 'v';
+ v.textContent = 'abonniert';
+
+ row.appendChild(k);
+ row.appendChild(v);
+ list.appendChild(row);
+ });
+}
+
+let _mqttTopicsPollTimer = null;
+
+function _syncTopicsPolling() {
+ console.log('_syncTopicsPolling called, _mqttLastConnected:', window._mqttLastConnected);
+ const shouldRun = Boolean(window._mqttLastConnected);
+ if (!shouldRun) {
+ if (_mqttTopicsPollTimer) {
+ clearInterval(_mqttTopicsPollTimer);
+ _mqttTopicsPollTimer = null;
+ }
+ const hint = document.getElementById('mqttTopicsHint');
+ if (hint) hint.textContent = 'Nicht verbunden';
+ const empty = document.getElementById('topicsEmpty');
+ const list = document.getElementById('topicsList');
+ if (empty) empty.style.display = 'block';
+ if (list) list.style.display = 'none';
+ return;
+ }
+
+ if (_mqttTopicsPollTimer) return;
+ console.log('Starting topics polling...');
+ refreshMQTTTopics().catch(() => {});
+ _mqttTopicsPollTimer = setInterval(() => {
+ refreshMQTTTopics().catch(() => {});
+ }, 4000);
+}
+
+let _mqttMessagesPollTimer = null;
+
+async function refreshMQTTMessages() {
+ console.log('refreshMQTTMessages called');
+ try {
+ const r = await fetch('/api/mqtt/runtime/messages?limit=50', { method: 'GET' });
+ const data = await r.json();
+ console.log('Messages API response:', data);
+ if (r.ok && data) {
+ _renderMessages(data.messages || []);
+ return;
+ }
+ _renderMessages([]);
+ } catch (e) {
+ console.error('refreshMQTTMessages error:', e);
+ _renderMessages([]);
+ }
+}
+
+function _renderMessages(messages) {
+ console.log('_renderMessages called with', messages.length, 'messages');
+ const container = document.getElementById('mqttLiveMessages');
+ if (!container) {
+ console.warn('Live messages container not found');
+ return;
+ }
+
+ const safeMessages = Array.isArray(messages) ? messages : [];
+
+ if (safeMessages.length === 0) {
+ container.innerHTML = 'Keine Live-Nachrichten
';
+ return;
+ }
+
+ // Clear and rebuild
+ container.innerHTML = '';
+
+ safeMessages.forEach((msg, idx) => {
+ if (!msg || typeof msg !== 'object') return;
+
+ const msgDiv = document.createElement('div');
+ msgDiv.style.cssText = 'padding: 12px 10px; border-bottom: 1px solid var(--border); display: flex; justify-content: space-between; align-items: flex-start; gap: 10px;';
+
+ // Left: Topic and Payload
+ const leftDiv = document.createElement('div');
+ leftDiv.style.cssText = 'flex: 1; min-width: 0;';
+
+ const topicSpan = document.createElement('div');
+ topicSpan.style.cssText = 'font-family: Consolas, monospace; color: #3498db; font-weight: 500; word-break: break-all;';
+ topicSpan.textContent = msg.topic || '';
+
+ const payloadSpan = document.createElement('div');
+ payloadSpan.style.cssText = 'font-family: Consolas, monospace; color: #666; font-size: 12px; margin-top: 4px; word-break: break-all; max-height: 60px; overflow: hidden;';
+ const payload = String(msg.payload || '').substring(0, 150);
+ payloadSpan.textContent = payload || '(empty)';
+
+ leftDiv.appendChild(topicSpan);
+ leftDiv.appendChild(payloadSpan);
+
+ // Right: Timestamp
+ const timeSpan = document.createElement('div');
+ timeSpan.style.cssText = 'font-size: 12px; color: var(--text-dim); white-space: nowrap;';
+ try {
+ const ts = new Date(msg.timestamp);
+ const hours = String(ts.getHours()).padStart(2, '0');
+ const mins = String(ts.getMinutes()).padStart(2, '0');
+ const secs = String(ts.getSeconds()).padStart(2, '0');
+ timeSpan.textContent = `${hours}:${mins}:${secs}`;
+ } catch (e) {
+ timeSpan.textContent = '00:00:00';
+ }
+
+ msgDiv.appendChild(leftDiv);
+ msgDiv.appendChild(timeSpan);
+ container.appendChild(msgDiv);
+ });
+
+ const treeContainer = document.getElementById('mqtt-json-tree');
+ if (treeContainer) {
+ updateMqttJsonTreeFromMessage(safeMessages[0]);
+ }
+}
+
+function updateMqttJsonTreeFromMessage(message) {
+ const container = document.getElementById('mqtt-json-tree');
+ if (!container) return;
+ if (!message) {
+ container.innerHTML = 'Warte auf JSON-Daten...
';
+ return;
+ }
+
+ let payload = message.payload;
+ if (typeof payload === 'string') {
+ try {
+ payload = JSON.parse(payload);
+ } catch (error) {
+ payload = { raw: payload };
+ }
+ }
+
+ renderMqttMessageJsonTree(payload, container);
+}
+
+function renderMqttMessageJsonTree(data, container) {
+ if (!container) return;
+ container.innerHTML = '';
+
+ function renderNode(key, value, parentElement, level) {
+ const row = document.createElement('div');
+ row.className = 'json-row';
+ row.dataset.level = String(level);
+
+ const isCollection = value && typeof value === 'object';
+ const entries = Array.isArray(value) ? Array.from(value.entries()) : Object.entries(value || {});
+ const hasChildren = isCollection && entries.length > 0;
+
+ const toggle = document.createElement('span');
+ toggle.className = 'json-toggle';
+ toggle.textContent = hasChildren ? '▼' : ' ';
+ toggle.style.visibility = hasChildren ? 'visible' : 'hidden';
+
+ const keySpan = document.createElement('span');
+ keySpan.className = 'json-key';
+ keySpan.textContent = key;
+
+ const valueSpan = document.createElement('span');
+ const type = determineType(value);
+ valueSpan.className = `json-value json-${type}`;
+ if (type === 'object') {
+ valueSpan.textContent = '{}';
+ } else if (type === 'array') {
+ valueSpan.textContent = '[]';
+ } else {
+ valueSpan.textContent = formatPrimitive(value, type);
+ }
+
+ const copyBtn = document.createElement('button');
+ copyBtn.className = 'copy-btn';
+ copyBtn.textContent = 'Copy';
+ const copyValue = type === 'object' || type === 'array' ? JSON.stringify(value) : String(value ?? 'null');
+ copyBtn.addEventListener('click', () => copyToClipboard(copyValue));
+
+ row.append(toggle, keySpan, valueSpan, copyBtn);
+ parentElement.appendChild(row);
+
+ if (hasChildren) {
+ const childrenWrapper = document.createElement('div');
+ childrenWrapper.className = 'json-children';
+ row.after(childrenWrapper);
+
+ toggle.addEventListener('click', () => {
+ const expanded = toggle.textContent === '▼';
+ toggle.textContent = expanded ? '▶' : '▼';
+ childrenWrapper.classList.toggle('collapsed', expanded);
+ });
+
+ for (const [childKey, childValue] of entries) {
+ renderNode(String(childKey), childValue, childrenWrapper, level + 1);
+ }
+ }
+ }
+
+ renderNode('(root)', data, container, 0);
+}
+
+function determineType(value) {
+ if (Array.isArray(value)) return 'array';
+ if (value === null) return 'null';
+ if (typeof value === 'number') return 'number';
+ if (typeof value === 'boolean') return 'boolean';
+ if (typeof value === 'string') return 'value';
+ if (typeof value === 'object') return 'object';
+ return 'value';
+}
+
+function formatPrimitive(value, type) {
+ if (type === 'null') return 'null';
+ if (value === undefined) return 'undefined';
+ return String(value);
+}
+
+function copyToClipboard(text) {
+ if (!text) return;
+ if (navigator.clipboard && navigator.clipboard.writeText) {
+ navigator.clipboard.writeText(text).catch(() => {});
+ return;
+ }
+ const textarea = document.createElement('textarea');
+ textarea.value = text;
+ textarea.style.position = 'fixed';
+ textarea.style.opacity = '0';
+ document.body.appendChild(textarea);
+ textarea.focus();
+ textarea.select();
+ document.execCommand('copy');
+ document.body.removeChild(textarea);
+}
+
+function _syncMessagesPolling() {
+ console.log('_syncMessagesPolling called, _mqttLastConnected:', window._mqttLastConnected);
+ const shouldRun = Boolean(window._mqttLastConnected);
+
+ if (!shouldRun) {
+ if (_mqttMessagesPollTimer) {
+ clearInterval(_mqttMessagesPollTimer);
+ _mqttMessagesPollTimer = null;
+ }
+ return;
+ }
+
+ if (_mqttMessagesPollTimer) return;
+ console.log('Starting messages polling...');
+ refreshMQTTMessages().catch(() => {});
+ _mqttMessagesPollTimer = setInterval(() => {
+ refreshMQTTMessages().catch(() => {});
+ }, 2000); // Update every 2 seconds for live feel
+}
+
+// MQTT Detail & Health Functions
+async function refreshMQTTDetails() {
+ console.log('refreshMQTTDetails called');
+ try {
+ const r = await fetch('/api/mqtt/runtime/status', { method: 'GET' });
+ const data = await r.json();
+ console.log('MQTT Details response:', data);
+
+ if (r.ok && data) {
+ _updateMQTTDetails(data);
+ return;
+ }
+ _clearMQTTDetails();
+ } catch (e) {
+ console.error('refreshMQTTDetails error:', e);
+ _clearMQTTDetails();
+ }
+}
+
+function _updateMQTTDetails(status) {
+ // MQTT Detail Panel - Lite Version
+ const statusEl_lite = document.getElementById('mqttStatus_lite');
+ const brokerEl_lite = document.getElementById('mqttBroker_lite');
+ const clientsEl_lite = document.getElementById('mqttClients_lite');
+
+ if (statusEl_lite) {
+ statusEl_lite.textContent = status.connected ? 'Verbunden' : 'Nicht verbunden';
+ statusEl_lite.style.color = status.connected ? '#2ecc71' : '#e74c3c';
+ }
+
+ if (brokerEl_lite) {
+ brokerEl_lite.textContent = status.broker || '-';
+ }
+
+ if (clientsEl_lite) {
+ clientsEl_lite.textContent = '1'; // Wir haben immer 1 client (runtime)
+ }
+
+ // MQTT Detail Panel - Pro Version
+ const statusEl_pro = document.getElementById('mqttStatus_pro');
+ const brokerEl_pro = document.getElementById('mqttBroker_pro');
+ const clientsEl_pro = document.getElementById('mqttClients_pro');
+
+ if (statusEl_pro) {
+ statusEl_pro.textContent = status.connected ? 'Verbunden' : 'Nicht verbunden';
+ statusEl_pro.style.color = status.connected ? '#2ecc71' : '#e74c3c';
+ }
+
+ if (brokerEl_pro) {
+ brokerEl_pro.textContent = status.broker || '-';
+ }
+
+ if (clientsEl_pro) {
+ clientsEl_pro.textContent = '1'; // Wir haben immer 1 client (runtime)
+ }
+
+ // Health & Statistik Panel - Lite Version
+ const msgSecEl_lite = document.getElementById('mqttMsgSec_lite');
+ const errorsEl_lite = document.getElementById('mqttErrors_lite');
+ const qosAvgEl_lite = document.getElementById('mqttQosAvg_lite');
+
+ if (msgSecEl_lite) {
+ // Calculate messages per second from uptime and message_count
+ const msgCount = parseInt(status.message_count || 0);
+ const uptime = status.uptime || '00:00:00';
+ let msgPerSec = 0;
+
+ try {
+ const parts = uptime.split(':');
+ if (parts.length === 3) {
+ const hours = parseInt(parts[0]) || 0;
+ const mins = parseInt(parts[1]) || 0;
+ const secs = parseInt(parts[2]) || 0;
+ const totalSecs = hours * 3600 + mins * 60 + secs;
+ msgPerSec = totalSecs > 0 ? (msgCount / totalSecs).toFixed(2) : 0;
+ }
+ } catch (e) {
+ msgPerSec = 0;
+ }
+ msgSecEl_lite.textContent = msgPerSec + ' msg/s';
+ }
+
+ if (errorsEl_lite) {
+ errorsEl_lite.textContent = '0'; // No error tracking yet
+ }
+
+ if (qosAvgEl_lite) {
+ qosAvgEl_lite.textContent = status.qos || '1';
+ }
+
+ // Health & Statistik Panel - Pro Version
+ const msgSecEl_pro = document.getElementById('mqttMsgSec_pro');
+ const errorsEl_pro = document.getElementById('mqttErrors_pro');
+ const qosAvgEl_pro = document.getElementById('mqttQosAvg_pro');
+
+ if (msgSecEl_pro) {
+ // Calculate messages per second from uptime and message_count
+ const msgCount = parseInt(status.message_count || 0);
+ const uptime = status.uptime || '00:00:00';
+ let msgPerSec = 0;
+
+ try {
+ const parts = uptime.split(':');
+ if (parts.length === 3) {
+ const hours = parseInt(parts[0]) || 0;
+ const mins = parseInt(parts[1]) || 0;
+ const secs = parseInt(parts[2]) || 0;
+ const totalSecs = hours * 3600 + mins * 60 + secs;
+ msgPerSec = totalSecs > 0 ? (msgCount / totalSecs).toFixed(2) : 0;
+ }
+ } catch (e) {
+ msgPerSec = 0;
+ }
+ msgSecEl_pro.textContent = msgPerSec + ' msg/s';
+ }
+
+ if (errorsEl_pro) {
+ errorsEl_pro.textContent = '0'; // No error tracking yet
+ }
+
+ if (qosAvgEl_pro) {
+ qosAvgEl_pro.textContent = status.qos || '1';
+ }
+}
+
+function _clearMQTTDetails() {
+ const elements = [
+ 'mqttStatus_lite', 'mqttBroker_lite', 'mqttClients_lite',
+ 'mqttMsgSec_lite', 'mqttErrors_lite', 'mqttQosAvg_lite',
+ 'mqttStatus_pro', 'mqttBroker_pro', 'mqttClients_pro',
+ 'mqttMsgSec_pro', 'mqttErrors_pro', 'mqttQosAvg_pro'
+ ];
+
+ elements.forEach(id => {
+ const el = document.getElementById(id);
+ if (el) el.textContent = '-';
+ });
+}
+
+let _mqttDetailsPollTimer = null;
+
+function _syncDetailsPoll() {
+ console.log('_syncDetailsPoll called, _mqttLastConnected:', window._mqttLastConnected);
+ const shouldRun = Boolean(window._mqttLastConnected);
+
+ if (!shouldRun) {
+ if (_mqttDetailsPollTimer) {
+ clearInterval(_mqttDetailsPollTimer);
+ _mqttDetailsPollTimer = null;
+ }
+ _clearMQTTDetails();
+ return;
+ }
+
+ if (_mqttDetailsPollTimer) return;
+ console.log('Starting details polling...');
+ refreshMQTTDetails().catch(() => {});
+ _mqttDetailsPollTimer = setInterval(() => {
+ refreshMQTTDetails().catch(() => {});
+ }, 5000); // Update every 5 seconds
+}
+
+// Export globally
+window.refreshMQTTTopics = refreshMQTTTopics;
+window._renderTopics = _renderTopics;
+window._syncTopicsPolling = _syncTopicsPolling;
+window.refreshMQTTMessages = refreshMQTTMessages;
+window._renderMessages = _renderMessages;
+window._syncMessagesPolling = _syncMessagesPolling;
+window.refreshMQTTDetails = refreshMQTTDetails;
+window._updateMQTTDetails = _updateMQTTDetails;
+window._syncDetailsPoll = _syncDetailsPoll;
+
+console.log('✓ MQTT Topics & Messages functions exported from debug.js');
+
+// ============================================
+// JSON INSPECTOR FUNCTIONS
+// ============================================
+
+let jsonInspectorLimits = {
+ max_size_mb: 5,
+ max_depth: 50,
+ allow_override: false
+};
+
+async function loadJsonInspectorLimits() {
+ try {
+ const res = await fetch('/api/config/current');
+ if (!res.ok) return;
+ const data = await res.json();
+ const limits = data?.json_inspector;
+ if (limits && typeof limits === 'object') {
+ jsonInspectorLimits = {
+ max_size_mb: limits.max_size_mb || 5,
+ max_depth: limits.max_depth || 50,
+ allow_override: limits.allow_override || false
+ };
+ updateJsonInspectorLimitDisplay();
+ }
+ } catch (err) {
+ console.warn('[json-inspector] Failed to load limits', err);
+ }
+}
+
+function updateJsonInspectorLimitDisplay() {
+ const sizeEl = document.getElementById('json-limit-size');
+ const depthEl = document.getElementById('json-limit-depth');
+ const overrideEl = document.getElementById('json-limit-override');
+
+ if (sizeEl) sizeEl.textContent = `${jsonInspectorLimits.max_size_mb} MB`;
+ if (depthEl) depthEl.textContent = jsonInspectorLimits.max_depth.toString();
+ if (overrideEl) overrideEl.textContent = jsonInspectorLimits.allow_override ? 'Yes' : 'No';
+}
+
+function calculateJsonDepth(obj, currentDepth = 0) {
+ if (typeof obj !== 'object' || obj === null) return currentDepth;
+ let maxDepth = currentDepth;
+ for (const key in obj) {
+ if (obj.hasOwnProperty(key)) {
+ const depth = calculateJsonDepth(obj[key], currentDepth + 1);
+ maxDepth = Math.max(maxDepth, depth);
+ }
+ }
+ return maxDepth;
+}
+
+function validateJsonData(jsonData, jsonString) {
+ const statusBadge = document.getElementById('json-inspector-status');
+ const warningEl = document.getElementById('json-inspector-warning');
+
+ if (!statusBadge || !warningEl) return { allowed: true, reason: null };
+
+ const sizeMB = (new Blob([jsonString]).size) / (1024 * 1024);
+ const depth = calculateJsonDepth(jsonData);
+
+ let sizeExceeded = false;
+ let depthExceeded = false;
+
+ if (sizeMB > jsonInspectorLimits.max_size_mb) {
+ sizeExceeded = true;
+ }
+
+ if (depth > jsonInspectorLimits.max_depth) {
+ depthExceeded = true;
+ }
+
+ if (!sizeExceeded && !depthExceeded) {
+ statusBadge.className = 'status-badge status-ok';
+ statusBadge.textContent = 'Ready';
+ warningEl.style.display = 'none';
+ warningEl.textContent = '';
+ return { allowed: true, reason: null };
+ }
+
+ const reasons = [];
+ if (sizeExceeded) {
+ reasons.push(`JSON size (${sizeMB.toFixed(2)} MB) exceeds configured limit (${jsonInspectorLimits.max_size_mb} MB)`);
+ }
+ if (depthExceeded) {
+ reasons.push(`JSON depth (${depth}) exceeds configured limit (${jsonInspectorLimits.max_depth})`);
+ }
+
+ const reasonText = reasons.join('. ');
+
+ if (jsonInspectorLimits.allow_override) {
+ statusBadge.className = 'status-badge status-warn';
+ statusBadge.textContent = 'Limit exceeded';
+ warningEl.textContent = reasonText + '. Rendering continued.';
+ warningEl.style.display = '';
+ return { allowed: true, reason: reasonText };
+ } else {
+ statusBadge.className = 'status-badge status-error';
+ statusBadge.textContent = 'Rendering blocked';
+ warningEl.textContent = reasonText + '. Rendering blocked. Enable override in Config Manager to proceed.';
+ warningEl.style.display = '';
+ return { allowed: false, reason: reasonText };
+ }
+}
+
+// ============================================
+// DEPRECATED - JSON Inspector moved to json_inspector_new.js
+// ============================================
+// This section is kept for reference but no longer used
+// The new JSON Inspector is in json_inspector_new.js
+
+// State-Management für geöffnete JSON-Knoten (DEPRECATED)
+let jsonTreeOpenPaths = new Set();
+
+// DEPRECATED - Use json_inspector_new.js instead
+// This function is no longer called
+function renderJsonTree_DEPRECATED(jsonData) {
+ const treeEl = document.getElementById('json-inspector-tree');
+ if (!treeEl) {
+ return;
+ }
+
+ // Function body removed - replaced by json_inspector_new.js
+ console.warn('renderJsonTree_DEPRECATED called - use json_inspector_new.js instead');
+ return;
+}
+
+// Helper: Extract JSON from text that may contain prefixes (timestamps, topics, etc.)
+function parsePossiblyWrappedJSON(text) {
+ try {
+ return JSON.parse(text);
+ } catch (e) {
+ const start = text.indexOf('{');
+ const end = text.lastIndexOf('}');
+ if (start === -1 || end === -1 || end <= start) {
+ throw e;
+ }
+ const extracted = text.slice(start, end + 1);
+ const parsed = JSON.parse(extracted);
+
+ // If the parsed object has a 'payload' field that's a JSON string, parse it
+ if (parsed.payload && typeof parsed.payload === 'string') {
+ // Check if payload is truncated
+ if (parsed.payload.includes('...[truncated]')) {
+ // Try to extract valid JSON from truncated payload
+ const payloadStart = parsed.payload.indexOf('{');
+ const truncPos = parsed.payload.indexOf('...[truncated]');
+ if (payloadStart !== -1 && truncPos > payloadStart) {
+ // Find the last complete closing brace before truncation
+ const beforeTrunc = parsed.payload.slice(payloadStart, truncPos);
+ // Count braces to find matching close
+ let depth = 0;
+ let lastValidPos = -1;
+ for (let i = 0; i < beforeTrunc.length; i++) {
+ if (beforeTrunc[i] === '{') depth++;
+ if (beforeTrunc[i] === '}') {
+ depth--;
+ if (depth === 0) lastValidPos = i;
+ }
+ }
+ if (lastValidPos !== -1) {
+ const truncatedJson = beforeTrunc.slice(0, lastValidPos + 1);
+ try {
+ parsed.payload = JSON.parse(truncatedJson);
+ parsed._truncated = true;
+ return parsed;
+ } catch (e2) {
+ // Keep as string
+ }
+ }
+ }
+ parsed._truncated = true;
+ } else {
+ // Not truncated, try to parse normally
+ try {
+ parsed.payload = JSON.parse(parsed.payload);
+ } catch (payloadErr) {
+ // Keep as string if it's not valid JSON
+ }
+ }
+ }
+
+ return parsed;
+ }
+}
+
+function initJsonInspector() {
+ const uploadEl = document.getElementById('json-upload');
+ if (!uploadEl) return;
+
+ uploadEl.addEventListener('change', async (e) => {
+ const file = e.target.files?.[0];
+ if (!file) return;
+
+ try {
+ const text = await file.text();
+ let jsonData;
+
+ // Try to parse as regular JSON first
+ try {
+ jsonData = JSON.parse(text);
+ } catch (firstErr) {
+ // If that fails, try JSONL format (one JSON object per line)
+ const lines = text.split('\n').filter(line => line.trim());
+ const parsed = [];
+ let hasAnyJson = false;
+
+ for (const line of lines) {
+ // Skip lines that don't contain JSON (check for opening brace)
+ if (!line.includes('{')) {
+ continue;
+ }
+
+ try {
+ // Use robust parser that can extract JSON from log lines
+ const obj = parsePossiblyWrappedJSON(line);
+ parsed.push(obj);
+ hasAnyJson = true;
+ } catch (lineErr) {
+ // Skip lines that fail to parse (e.g., plain text log lines)
+ console.debug('[json-inspector] Skipping non-JSON line:', line.substring(0, 100));
+ continue;
+ }
+ }
+
+ if (hasAnyJson && parsed.length > 0) {
+ jsonData = parsed;
+ } else {
+ // Last fallback: try to extract JSON from the whole text
+ jsonData = parsePossiblyWrappedJSON(text);
+ }
+ }
+
+ const validation = validateJsonData(jsonData, JSON.stringify(jsonData));
+
+ if (validation.allowed) {
+ // JSON Inspector rendering is now handled by json_inspector_new.js
+ // Clear warning if present
+ const warningEl = document.getElementById('json-inspector-warning');
+ if (warningEl) warningEl.style.display = 'none';
+
+ // Auto-pause live updates when file is loaded
+ if (!liveStatePaused) {
+ liveStatePaused = true;
+ liveStatePausedManually = true; // Treat file load as manual pause
+ const pauseBtn = document.getElementById('json-pause-btn');
+ if (pauseBtn) {
+ pauseBtn.textContent = '▶ Resume';
+ pauseBtn.title = 'Setze Live-Aktualisierung fort';
+ }
+ if (window.showToast) window.showToast('Live-Aktualisierung automatisch pausiert', 'info');
+ }
+ } else {
+ const treeEl = document.getElementById('json-inspector-tree');
+ if (treeEl) {
+ treeEl.innerHTML = 'Rendering blocked due to limit violation.
';
+ }
+ }
+ } catch (err) {
+ console.error('[json-inspector] Parse error:', err);
+ const statusBadge = document.getElementById('json-inspector-status');
+ const warningEl = document.getElementById('json-inspector-warning');
+ const treeEl = document.getElementById('json-inspector-tree');
+
+ if (statusBadge) {
+ statusBadge.className = 'status-badge status-error';
+ statusBadge.textContent = 'Parse error';
+ }
+ if (warningEl) {
+ warningEl.textContent = 'Fehler beim Parsen: ' + err.message;
+ warningEl.style.display = '';
+ }
+ if (treeEl) {
+ treeEl.innerHTML = 'Fehler beim Parsen der Datei ' +
+ '' + err.message + '
';
+ }
+ }
+ });
+}
+
+window.loadJsonInspectorLimits = loadJsonInspectorLimits;
+window.initJsonInspector = initJsonInspector;
+
+// ============================================
+// Live Payload - Frontend polling + rendering
+// ============================================
+
+let liveStatePollInterval = null;
+let lastLiveState = null;
+let lastLiveDeviceKeysCount = 0;
+let lastLiveSelectedDevice = null;
+let liveStatePaused = false;
+let liveStatePausedManually = false; // Track if user manually paused
+let lastMqttConnected = false; // Track MQTT connection state
+
+function updateLivePayloadUI(state) {
+ // Skip update if paused
+ if (liveStatePaused) {
+ console.log('[JSON Inspector] updateLivePayloadUI skipped - paused');
+ return;
+ }
+
+ console.log('[JSON Inspector] updateLivePayloadUI called', { hasState: !!state, device: state?.device });
+
+ if (!state) {
+ setText('liveDeviceName', '-');
+ setText('liveStatus', 'Status: -');
+ setText('liveLastUpdate', 'Letztes Update: -');
+ setText('liveJobName', 'Job: -');
+ const pb = document.getElementById('liveProgressBar'); if (pb) pb.value = 0;
+ setText('liveAmsInfo', 'AMS: -');
+ return;
+ }
+ const device = state.device || '-';
+ const ts = state.ts || null;
+ const payload = state.payload || {};
+ setText('liveDeviceName', device);
+ // determine status
+ const lastTs = ts ? Date.parse(ts) : null;
+ const now = Date.now();
+ let status = 'offline';
+ if (lastTs && (now - lastTs) < 30000) {
+ // within 30s, consider active
+ const gstate = (payload?.print?.gcode_state || payload?.gcode_state || '').toString().toLowerCase();
+ if (gstate && (gstate === 'running' || gstate === 'printing' || gstate === 'start')) status = 'printing';
+ else status = 'idle';
+ }
+ const statusText = `Status: ${status}`;
+ setText('liveStatus', statusText);
+ // show relative time (Deutsch) with full timestamp as title
+ if (ts) {
+ const rel = relativeTime(ts);
+ const full = new Date(ts).toLocaleString();
+ const el = document.getElementById('liveLastUpdate');
+ if (el) {
+ el.textContent = `Letztes Update: ${rel}`;
+ el.title = full;
+ }
+ } else {
+ setText('liveLastUpdate', 'Letztes Update: -');
+ }
+ const jobname = (payload?.job?.name) || (payload?.print?.file?.name) || payload?.subtask_name || '-';
+ setText('liveJobName', `Job: ${jobname}`);
+ const progress = Number(payload?.print?.progress || payload?.progress || 0);
+ const pb = document.getElementById('liveProgressBar');
+ const pct = Number.isFinite(progress) ? Math.max(0, Math.min(100, Math.round(progress))) : 0;
+ if (pb) pb.value = pct;
+ // show percent text next to progress bar (insert if missing)
+ let pctEl = document.getElementById('liveProgressPercent');
+ if (!pctEl) {
+ const container = document.getElementById('liveProgress');
+ if (container) {
+ pctEl = document.createElement('span');
+ pctEl.id = 'liveProgressPercent';
+ pctEl.style.cssText = 'margin-left:8px; font-weight:600; color:var(--text-dim);';
+ container.appendChild(pctEl);
+ }
+ }
+ if (pctEl) pctEl.textContent = `${pct}%`;
+ // AMS short info
+ const ams = payload?.ams || null;
+ if (ams && Array.isArray(ams) && ams.length) {
+ const first = ams[0];
+ const slot = first?.trays?.[0]?.tray_id || first?.slot || '-';
+ const material = first?.trays?.[0]?.material || first?.trays?.[0]?.tray_type || '-';
+ const color = first?.trays?.[0]?.tray_color || first?.trays?.[0]?.color || '-';
+ setText('liveAmsInfo', `AMS: slot=${slot}, material=${material}, color=${color}`);
+ } else {
+ setText('liveAmsInfo', 'AMS: -');
+ }
+ // JSON Inspector rendering is now handled by json_inspector_new.js
+}
+
+function relativeTime(ts) {
+ try {
+ const t = Date.parse(ts);
+ if (!t) return new Date(ts).toLocaleString();
+ const diff = Date.now() - t;
+ const sec = Math.floor(diff / 1000);
+ if (sec < 5) return 'gerade eben';
+ if (sec < 60) return `vor ${sec} s`;
+ const min = Math.floor(sec / 60);
+ if (min < 60) return `vor ${min} min`;
+ const h = Math.floor(min / 60);
+ if (h < 24) return `vor ${h} h`;
+ const d = Math.floor(h / 24);
+ return `vor ${d} d`;
+ } catch (e) {
+ return new Date(ts).toLocaleString();
+ }
+}
+
+async function copyJsonInspector() {
+ try {
+ const tree = document.getElementById('json-inspector-tree');
+ if (!tree) return;
+ // Prefer preformatted text content
+ const txt = tree.innerText || tree.textContent || '';
+ if (!txt) {
+ if (window.showToast) window.showToast('Kein JSON zum Kopieren vorhanden', 'warning');
+ return;
+ }
+ if (navigator.clipboard && navigator.clipboard.writeText) {
+ await navigator.clipboard.writeText(txt);
+ if (window.showToast) window.showToast('JSON kopiert', 'success');
+ } else {
+ // Fallback
+ const ta = document.createElement('textarea');
+ ta.value = txt;
+ document.body.appendChild(ta);
+ ta.select();
+ document.execCommand('copy');
+ document.body.removeChild(ta);
+ if (window.showToast) window.showToast('JSON kopiert', 'success');
+ }
+ } catch (e) {
+ console.warn('copyJsonInspector failed', e);
+ if (window.showToast) window.showToast('Kopieren fehlgeschlagen', 'error');
+ }
+}
+
+function toggleLiveStatePause() {
+ liveStatePaused = !liveStatePaused;
+ liveStatePausedManually = liveStatePaused; // Track manual pause
+ const btn = document.getElementById('json-pause-btn');
+ const statusBadge = document.getElementById('json-inspector-status');
+
+ if (liveStatePaused) {
+ // Paused
+ if (btn) {
+ btn.textContent = '▶ Resume';
+ btn.title = 'Setze Live-Aktualisierung fort';
+ }
+ if (statusBadge && statusBadge.textContent === 'Live') {
+ statusBadge.className = 'status-badge status-warn';
+ statusBadge.textContent = 'Paused';
+ }
+ if (window.showToast) window.showToast('Live-Aktualisierung pausiert', 'info');
+
+ // Stop polling
+ if (liveStatePollInterval) {
+ clearInterval(liveStatePollInterval);
+ liveStatePollInterval = null;
+ }
+ } else {
+ // Resumed/Started
+ if (btn) {
+ btn.textContent = '⏸ Pause';
+ btn.title = 'Pausiere Live-Aktualisierung';
+ }
+ if (statusBadge && (statusBadge.textContent === 'Paused' || statusBadge.textContent === 'Ready')) {
+ statusBadge.className = 'status-badge status-ok';
+ statusBadge.textContent = 'Live';
+ }
+ if (window.showToast) window.showToast('Live-Aktualisierung gestartet', 'success');
+
+ // Start polling with selector
+ startLiveStatePollingWithSelector();
+ }
+}
+
+async function checkMqttConnectionStatus() {
+ try {
+ const res = await fetch('/api/mqtt/runtime/status');
+ if (!res.ok) return false;
+ const data = await res.json();
+ const isConnected = data && data.connected === true;
+ console.log('[JSON Inspector] MQTT status check:', { connected: isConnected, data });
+ return isConnected;
+ } catch (err) {
+ console.warn('[JSON Inspector] MQTT status check failed:', err);
+ return false;
+ }
+}
+
+async function refreshLiveStateAll() {
+ try {
+ console.log('[JSON Inspector] refreshLiveStateAll - liveStatePaused:', liveStatePaused);
+
+ // Check MQTT connection status
+ const mqttConnected = await checkMqttConnectionStatus();
+
+ // Auto-pause if MQTT disconnected (but don't override manual pause/file load)
+ if (!mqttConnected && !liveStatePaused) {
+ liveStatePaused = true;
+ const btn = document.getElementById('json-pause-btn');
+ const statusBadge = document.getElementById('json-inspector-status');
+ if (btn) {
+ btn.textContent = '▶ Resume';
+ btn.title = 'Setze Live-Aktualisierung fort';
+ }
+ if (statusBadge && statusBadge.textContent === 'Live') {
+ statusBadge.className = 'status-badge status-idle';
+ statusBadge.textContent = 'No MQTT';
+ }
+ lastMqttConnected = false;
+ console.log('[JSON Inspector] Auto-paused: MQTT disconnected');
+ return;
+ }
+
+ // Auto-resume if MQTT connected AND not manually paused AND not showing file data
+ if (mqttConnected && !lastMqttConnected && liveStatePaused && !liveStatePausedManually) {
+ liveStatePaused = false;
+ const btn = document.getElementById('json-pause-btn');
+ const statusBadge = document.getElementById('json-inspector-status');
+ if (btn) {
+ btn.textContent = '⏸ Pause';
+ btn.title = 'Pausiere Live-Aktualisierung';
+ }
+ if (statusBadge && statusBadge.textContent === 'No MQTT') {
+ statusBadge.className = 'status-badge status-ok';
+ statusBadge.textContent = 'Live';
+ }
+ console.log('[JSON Inspector] Auto-resumed: MQTT connected');
+ }
+
+ lastMqttConnected = mqttConnected;
+
+ const res = await fetch('/api/live-state/');
+ if (!res.ok) {
+ console.warn('[JSON Inspector] API call failed:', res.status);
+ return;
+ }
+ const data = await res.json();
+ const keys = Object.keys(data || {});
+ console.log('[JSON Inspector] Live-state data received:', { deviceCount: keys.length, keys });
+
+ if (!keys.length) {
+ console.log('[JSON Inspector] No devices found');
+ updateLivePayloadUI(null);
+ return;
+ }
+ // pick first device for now
+ const first = data[keys[0]];
+ if (!first) {
+ console.warn('[JSON Inspector] First device has no data');
+ return;
+ }
+ console.log('[JSON Inspector] Calling updateLivePayloadUI with device:', first.device);
+ lastLiveState = first;
+ updateLivePayloadUI(first);
+ } catch (err) {
+ console.error('[JSON Inspector] refresh failed', err);
+ }
+}
+
+function startLiveStatePolling() {
+ if (liveStatePollInterval) clearInterval(liveStatePollInterval);
+ refreshLiveStateAll();
+ liveStatePollInterval = setInterval(refreshLiveStateAll, 2500);
+}
+
+// start polling when JSON inspector initialized so user sees live data
+const _origInitJsonInspector = typeof initJsonInspector === 'function' ? initJsonInspector : null;
+function _wrappedInitJsonInspector() {
+ if (_origInitJsonInspector) _origInitJsonInspector();
+ // Auto-start polling if MQTT is connected
+ initEmptyJsonInspector();
+}
+window.initJsonInspector = _wrappedInitJsonInspector;
+
+async function initEmptyJsonInspector() {
+ const treeEl = document.getElementById('json-inspector-tree');
+ const statusBadge = document.getElementById('json-inspector-status');
+ const pauseBtn = document.getElementById('json-pause-btn');
+
+ // Check if MQTT is already connected
+ try {
+ const mqttConnected = await checkMqttConnectionStatus();
+
+ if (mqttConnected) {
+ // MQTT verbunden - Auto-start live updates
+ console.log('[JSON Inspector] MQTT connected - auto-starting live updates');
+
+ if (statusBadge) {
+ statusBadge.className = 'status-badge status-ok';
+ statusBadge.textContent = 'Live';
+ }
+
+ if (pauseBtn) {
+ pauseBtn.textContent = '⏸ Pause';
+ pauseBtn.title = 'Live-Aktualisierung pausieren';
+ }
+
+ // Start polling automatically (this will populate the tree)
+ liveStatePaused = false;
+ liveStatePausedManually = false;
+ console.log('[JSON Inspector] Starting polling...');
+ startLiveStatePollingWithSelector();
+
+ } else {
+ // MQTT nicht verbunden - zeige Platzhalter
+ console.log('[JSON Inspector] MQTT not connected - showing placeholder');
+
+ if (treeEl) {
+ treeEl.innerHTML = 'Keine Daten vorhanden MQTT nicht verbunden. Lade eine Datei oder klicke auf ▶ Start
';
+ }
+
+ if (statusBadge) {
+ statusBadge.className = 'status-badge status-idle';
+ statusBadge.textContent = 'Ready';
+ }
+
+ if (pauseBtn) {
+ pauseBtn.textContent = '▶ Start';
+ pauseBtn.title = 'Starte Live-Aktualisierung';
+ }
+
+ // Set initial paused state
+ liveStatePaused = true;
+ liveStatePausedManually = false;
+ }
+ } catch (e) {
+ // Error checking MQTT status - default to paused state
+ console.warn('[JSON Inspector] Error checking MQTT status:', e);
+
+ if (treeEl) {
+ treeEl.innerHTML = 'Keine Daten vorhanden Lade eine Datei oder klicke auf ▶ Start
';
+ }
+
+ if (statusBadge) {
+ statusBadge.className = 'status-badge status-idle';
+ statusBadge.textContent = 'Ready';
+ }
+
+ if (pauseBtn) {
+ pauseBtn.textContent = '▶ Start';
+ pauseBtn.title = 'Starte Live-Aktualisierung';
+ }
+
+ liveStatePaused = true;
+ liveStatePausedManually = false;
+ }
+}
+
+async function populateLiveDeviceSelector() {
+ try {
+ const res = await fetch('/api/live-state');
+ if (!res.ok) return;
+ const data = await res.json();
+ const sel = document.getElementById('liveDeviceSelect');
+ if (!sel) return;
+ // preserve selection if possible
+ const prevSelected = sel.value || lastLiveSelectedDevice || null;
+ const prevCount = lastLiveDeviceKeysCount || 0;
+ sel.innerHTML = '';
+ const keys = Object.keys(data || {});
+ keys.forEach(k => {
+ const opt = document.createElement('option');
+ opt.value = k;
+ opt.textContent = k;
+ sel.appendChild(opt);
+ });
+ // ensure only one change listener: replace node then operate on the new element
+ const cloned = sel.cloneNode(true);
+ sel.replaceWith(cloned);
+ const newSel = document.getElementById('liveDeviceSelect');
+ if (newSel) {
+ newSel.addEventListener('change', () => {
+ const v = newSel.value;
+ lastLiveSelectedDevice = v;
+ if (v && data[v]) updateLivePayloadUI(data[v]);
+ });
+ // restore selection if still present
+ if (prevSelected && keys.includes(prevSelected)) {
+ newSel.value = prevSelected;
+ lastLiveSelectedDevice = prevSelected;
+ } else if (keys.length && keys.length > prevCount) {
+ // new device(s) added -> auto-select the last one
+ const last = keys[keys.length - 1];
+ newSel.value = last;
+ lastLiveSelectedDevice = last;
+ } else if (keys.length) {
+ newSel.value = keys[0];
+ lastLiveSelectedDevice = keys[0];
+ }
+ if (lastLiveSelectedDevice && data[lastLiveSelectedDevice]) updateLivePayloadUI(data[lastLiveSelectedDevice]);
+ }
+ lastLiveDeviceKeysCount = keys.length;
+ } catch (e) {
+ // ignore
+ }
+}
+
+// enhance polling to refresh selector list
+function startLiveStatePollingWithSelector() {
+ startLiveStatePolling();
+ if (liveStatePollInterval) clearInterval(liveStatePollInterval);
+ refreshLiveStateAll();
+ populateLiveDeviceSelector();
+ liveStatePollInterval = setInterval(async () => {
+ await refreshLiveStateAll();
+ await populateLiveDeviceSelector();
+ }, 2500);
+}
+
+console.log('✓ JSON Inspector functions registered');
+
+// ============================================
+// SERVICES TAB FUNCTIONS
+// ============================================
+
+async function loadServicesData() {
+ try {
+ const [perfRes, sysRes] = await Promise.all([
+ fetch('/api/debug/performance'),
+ fetch('/api/debug/system_status')
+ ]);
+
+ if (!perfRes.ok || !sysRes.ok) {
+ console.warn('[services] Failed to load data');
+ return;
+ }
+
+ const perfData = await perfRes.json();
+ const sysData = await sysRes.json();
+
+ updateServicesDisplay(perfData, sysData);
+ } catch (err) {
+ console.error('[services] Error loading data', err);
+ }
+}
+
+function updateServicesDisplay(perf, sys) {
+ // Runtime & Process
+ const pidEl = document.getElementById('service-pid');
+ const cpuEl = document.getElementById('service-cpu');
+ const memoryEl = document.getElementById('service-memory');
+ const threadsEl = document.getElementById('service-threads');
+ const runtimeStatusEl = document.getElementById('service-runtime-status');
+
+ if (pidEl) pidEl.textContent = 'N/A';
+ if (cpuEl && perf?.cpu_percent) cpuEl.textContent = `${perf.cpu_percent}%`;
+ if (memoryEl && perf?.ram_used_mb && perf?.ram_total_mb) {
+ const percent = ((perf.ram_used_mb / perf.ram_total_mb) * 100).toFixed(1);
+ memoryEl.textContent = `${percent}% (${perf.ram_used_mb} MB / ${perf.ram_total_mb} MB)`;
+ }
+ if (threadsEl) threadsEl.textContent = 'N/A';
+
+ if (runtimeStatusEl) {
+ runtimeStatusEl.className = 'status-badge status-ok';
+ runtimeStatusEl.textContent = 'Running';
+ }
+
+ // Server & Environment
+ const startedEl = document.getElementById('service-started');
+ const uptimeEl = document.getElementById('service-uptime');
+ const platformEl = document.getElementById('service-platform');
+ const hostnameEl = document.getElementById('service-hostname');
+ const pythonEl = document.getElementById('service-python');
+ const portEl = document.getElementById('service-port');
+ const environment = sys?.environment || {};
+ const serverInfo = environment.server || {};
+ const uptimeSeconds = Number(perf?.backend_uptime_s);
+
+ if (startedEl) {
+ if (Number.isFinite(uptimeSeconds)) {
+ const startedAtMs = Date.now() - uptimeSeconds * 1000;
+ startedEl.textContent = new Date(startedAtMs).toLocaleString();
+ } else {
+ startedEl.textContent = '-';
+ }
+ }
+ if (uptimeEl && Number.isFinite(uptimeSeconds)) {
+ const hours = Math.floor(uptimeSeconds / 3600);
+ const minutes = Math.floor((uptimeSeconds % 3600) / 60);
+ const seconds = Math.floor(uptimeSeconds % 60);
+ uptimeEl.textContent = `${hours}h ${minutes}m ${seconds}s`;
+ }
+ if (platformEl) {
+ const platformLabel = environment.platform
+ ? environment.platform_release && environment.platform_release !== environment.platform
+ ? `${environment.platform} ${environment.platform_release}`
+ : environment.platform
+ : environment.platform_details || '-';
+ platformEl.textContent = platformLabel;
+ }
+ if (hostnameEl) hostnameEl.textContent = environment.hostname || '-';
+ if (pythonEl) {
+ const pythonLine = environment.python_version ? environment.python_version.split('\n')[0] : '-';
+ pythonEl.textContent = pythonLine;
+ }
+ if (portEl) {
+ portEl.textContent =
+ serverInfo.port !== undefined && serverInfo.port !== null ? String(serverInfo.port) : '-';
+ }
+}
+
+function initServicesButtons() {
+ const restartBtn = document.getElementById('service-restart-btn');
+ if (restartBtn) {
+ restartBtn.addEventListener('click', () => {
+ alert('Backend restart functionality not implemented yet.');
+ });
+ }
+
+ const dockerUpBtn = document.getElementById('service-docker-up-btn');
+ if (dockerUpBtn) {
+ dockerUpBtn.addEventListener('click', async () => {
+ alert('Docker up functionality not implemented yet.');
+ });
+ }
+
+ const dockerDownBtn = document.getElementById('service-docker-down-btn');
+ if (dockerDownBtn) {
+ dockerDownBtn.addEventListener('click', async () => {
+ alert('Docker down functionality not implemented yet.');
+ });
+ }
+
+ const dockerStatusBtn = document.getElementById('service-docker-status-btn');
+ if (dockerStatusBtn) {
+ dockerStatusBtn.addEventListener('click', async () => {
+ alert('Docker status functionality not implemented yet.');
+ });
+ }
+
+ // Test buttons with locked state management
+ const testButtons = [
+ { id: 'service-test-smoke-btn', name: 'Smoke CRUD', endpoint: '/api/services/tests/smoke' },
+ { id: 'service-test-db-btn', name: 'DB CRUD', endpoint: '/api/services/tests/db' },
+ { id: 'service-test-all-btn', name: 'All Tests', endpoint: '/api/services/tests/all' },
+ { id: 'service-test-coverage-btn', name: 'Coverage', endpoint: '/api/services/tests/coverage' }
+ ];
+
+ testButtons.forEach(config => {
+ const btn = document.getElementById(config.id);
+ if (btn) {
+ // Store original label
+ btn.setAttribute('data-original-label', btn.textContent);
+ btn.setAttribute('data-test-name', config.name);
+
+ btn.addEventListener('click', async () => {
+ // Only allow click if button is not disabled
+ if (btn.disabled) return;
+
+ // Set to running state
+ btn.disabled = true;
+ btn.textContent = 'Running…';
+ btn.className = 'btn btn-secondary';
+
+ try {
+ const response = await fetch(config.endpoint, { method: 'POST' });
+ const data = await response.json();
+
+ // Evaluate result and lock button in final state
+ if (data.status === 'ok') {
+ btn.className = 'btn btn-success';
+ btn.textContent = 'Success';
+ if (window.showToast) {
+ window.showToast(`${config.name} erfolgreich`, 'success');
+ }
+ } else if (data.status === 'fail') {
+ btn.className = 'btn btn-error';
+ btn.textContent = 'Failed';
+ if (window.showToast) {
+ window.showToast(`${config.name} fehlgeschlagen`, 'error');
+ }
+ } else if (data.status === 'blocked') {
+ btn.className = 'btn btn-warning';
+ btn.textContent = 'Blocked';
+ if (window.showToast) {
+ window.showToast('Test konnte nicht ausgeführt werden', 'warning');
+ }
+ } else {
+ // Unknown status, treat as fail
+ btn.className = 'btn btn-error';
+ btn.textContent = 'Failed';
+ if (window.showToast) {
+ window.showToast(`${config.name} fehlgeschlagen`, 'error');
+ }
+ }
+
+ // Button stays disabled (locked)
+ } catch (error) {
+ // Network or other error - lock as blocked
+ btn.className = 'btn btn-warning';
+ btn.textContent = 'Blocked';
+ if (window.showToast) {
+ window.showToast('Test konnte nicht ausgeführt werden', 'warning');
+ }
+ // Button stays disabled (locked)
+ console.error('Test execution error:', error);
+ }
+ });
+ }
+ });
+
+ // Dependency buttons
+ const depsButtons = [
+ 'service-deps-install-btn',
+ 'service-deps-update-btn',
+ 'service-deps-list-btn',
+ 'service-deps-outdated-btn'
+ ];
+ depsButtons.forEach(id => {
+ const btn = document.getElementById(id);
+ if (btn) {
+ btn.addEventListener('click', () => {
+ alert('Dependency functionality not implemented yet.');
+ });
+ }
+ });
+}
+
+window.loadServicesData = loadServicesData;
+window.initServicesButtons = initServicesButtons;
+
+console.log('✓ Services tab functions registered');
diff --git a/app/static/favicon.ico b/app/static/favicon.ico
new file mode 100644
index 0000000..e69de29
diff --git a/app/static/js/debug_log_cards.js b/app/static/js/debug_log_cards.js
new file mode 100644
index 0000000..a0569d5
--- /dev/null
+++ b/app/static/js/debug_log_cards.js
@@ -0,0 +1,140 @@
+// Synced from frontend/static/js on 2025-12-14
+// Debug Log Cards Renderer für Pro-Log-Viewer
+// Fügt maximal 50 Log-Einträge als Karten in #log-entries ein
+
+function renderLogCards(logLines) {
+ const container = document.getElementById('log-entries');
+ container.innerHTML = '';
+ const lastLines = logLines.slice(-50);
+ lastLines.forEach((line, idx) => {
+ // Stacktrace-Erkennung
+ let hasStacktrace = false;
+ let message = line;
+ // Prüfe auf typische Stacktrace-Marker oder Zeilenumbrüche
+ if (typeof line === 'string') {
+ if (line.includes('\n')) hasStacktrace = true;
+ if (/Traceback \(most recent call last\):|File \\\"|Exception|Error:/i.test(line)) hasStacktrace = true;
+ }
+ // Zeilen splitten
+ const msgLines = line.split(/\r?\n/);
+ // Level bestimmen
+ let level = 'info';
+ if (/\bERROR|CRITICAL\b/i.test(line)) level = 'error';
+ else if (/\bWARNING|WARN\b/i.test(line)) level = 'warning';
+ else if (/\bDEBUG\b/i.test(line)) level = 'debug';
+ // Modul extrahieren (optional, falls im Text vorhanden)
+ const moduleMatch = line.match(/\[(.*?)\]/);
+ // Hauptcontainer
+ const div = document.createElement('div');
+ div.className = 'log-line';
+ div.classList.add('log-' + level);
+ div.dataset.level = level;
+ if (moduleMatch) div.dataset.module = moduleMatch[1];
+ // .log-summary
+ const summary = document.createElement('div');
+ summary.className = 'log-summary';
+ // Timestamp extrahieren (optional, falls im Text vorhanden)
+ let timestamp = '';
+ const tsMatch = line.match(/^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?)/);
+ if (tsMatch) timestamp = tsMatch[1];
+ if (timestamp) {
+ const tsSpan = document.createElement('span');
+ tsSpan.className = 'log-timestamp';
+ tsSpan.textContent = timestamp + ' ';
+ summary.appendChild(tsSpan);
+ }
+ // Level
+ const lvlSpan = document.createElement('span');
+ lvlSpan.className = 'log-level';
+ lvlSpan.textContent = level.toUpperCase() + ' ';
+ summary.appendChild(lvlSpan);
+ // Kurztext (erste Zeile)
+ const msgSpan = document.createElement('span');
+ msgSpan.className = 'log-msg-short';
+ msgSpan.textContent = msgLines[0];
+ summary.appendChild(msgSpan);
+ // Toggle-Button, falls Stacktrace
+ let toggleBtn = null;
+ if (hasStacktrace && msgLines.length > 1) {
+ toggleBtn = document.createElement('button');
+ toggleBtn.className = 'log-toggle-btn';
+ toggleBtn.textContent = 'Details anzeigen';
+ toggleBtn.onclick = function(e) {
+ e.stopPropagation();
+ const expanded = div.classList.toggle('expanded');
+ stackDiv.style.display = expanded ? 'block' : 'none';
+ toggleBtn.textContent = expanded ? 'Details ausblenden' : 'Details anzeigen';
+ // Auto-Scroll deaktivieren
+ if (typeof autoScroll !== 'undefined') {
+ autoScroll = false;
+ const btn = document.getElementById('proLogPauseBtn');
+ if (btn) btn.textContent = 'Fortsetzen';
+ }
+ };
+ summary.appendChild(toggleBtn);
+ }
+ div.appendChild(summary);
+ // .log-stacktrace
+ let stackDiv = null;
+ if (hasStacktrace && msgLines.length > 1) {
+ stackDiv = document.createElement('div');
+ stackDiv.className = 'log-stacktrace';
+ stackDiv.style.display = 'none';
+ stackDiv.textContent = msgLines.slice(1).join('\n');
+ div.appendChild(stackDiv);
+ }
+ // Klick auf ganze Zeile (außer Toggle)
+ div.onclick = function(e) {
+ if (e.target.classList.contains('log-toggle-btn')) return;
+ container.querySelectorAll('.log-line.active').forEach(e => e.classList.remove('active'));
+ div.classList.add('active');
+ // Log-Detail-Anzeige (optional)
+ const detail = document.getElementById('log-detail');
+ if (detail) detail.textContent = line;
+ // Auto-Scroll deaktivieren
+ if (typeof autoScroll !== 'undefined') {
+ autoScroll = false;
+ const btn = document.getElementById('proLogPauseBtn');
+ if (btn) btn.textContent = 'Fortsetzen';
+ }
+ };
+ container.appendChild(div);
+ });
+ // Auto-Scroll nach dem Rendern
+ if (typeof autoScroll !== 'undefined' && autoScroll) {
+ container.scrollTop = container.scrollHeight;
+ }
+}
+// window.renderLogCards = renderLogCards;
+
+function collapseAllLogStacktraces() {
+ const container = document.getElementById('log-entries');
+ container.querySelectorAll('.log-line.expanded').forEach(div => {
+ div.classList.remove('expanded');
+ const stack = div.querySelector('.log-stacktrace');
+ if (stack) stack.style.display = 'none';
+ const btn = div.querySelector('.log-toggle-btn');
+ if (btn) btn.textContent = 'Details anzeigen';
+ });
+}
+
+function expandAllLogStacktraces() {
+ const container = document.getElementById('log-entries');
+ container.querySelectorAll('.log-line').forEach(div => {
+ const stack = div.querySelector('.log-stacktrace');
+ if (stack) {
+ div.classList.add('expanded');
+ stack.style.display = 'block';
+ const btn = div.querySelector('.log-toggle-btn');
+ if (btn) btn.textContent = 'Details ausblenden';
+ }
+ });
+ if (typeof autoScroll !== 'undefined') {
+ autoScroll = false;
+ const btn = document.getElementById('proLogPauseBtn');
+ if (btn) btn.textContent = 'Fortsetzen';
+ }
+}
+
+window.collapseAllLogStacktraces = collapseAllLogStacktraces;
+window.expandAllLogStacktraces = expandAllLogStacktraces;
diff --git a/app/static/js/debug_pro_log.js b/app/static/js/debug_pro_log.js
new file mode 100644
index 0000000..198a3ab
--- /dev/null
+++ b/app/static/js/debug_pro_log.js
@@ -0,0 +1,153 @@
+// Synced from frontend/static/js on 2025-12-14
+// Lädt Logdaten für den Pro-Log-Viewer und rendert sie als Karten
+
+let proLogLines = [];
+let proLogFiltered = [];
+let autoScroll = true;
+
+function scrollLogToBottom() {
+ const container = document.getElementById('log-entries');
+ container.scrollTop = container.scrollHeight;
+}
+
+function renderProLogCards() {
+ renderLogCards(proLogFiltered);
+ if (autoScroll) scrollLogToBottom();
+}
+
+function applyProLogFilter() {
+ const level = document.getElementById('proLogFilterLevel')?.value || 'ALL';
+ const module = document.getElementById('proLogFilterModule')?.value || 'ALL';
+ const search = document.getElementById('proLogFilterSearch')?.value?.toLowerCase() || '';
+ proLogFiltered = proLogLines.filter(line => {
+ let ok = true;
+ if (level !== 'ALL') {
+ if (level === 'ERROR' && !/\b(ERROR|CRITICAL)\b/i.test(line)) ok = false;
+ if (level === 'WARNING' && !/\bWARNING\b/i.test(line)) ok = false;
+ if (level === 'INFO' && !/\bINFO\b/i.test(line)) ok = false;
+ if (level === 'DEBUG' && !/\bDEBUG\b/i.test(line)) ok = false;
+ }
+ if (module !== 'ALL' && !line.toLowerCase().includes(module.toLowerCase())) ok = false;
+ if (search && !line.toLowerCase().includes(search)) ok = false;
+ return ok;
+ });
+ renderProLogCards();
+}
+
+async function loadProLogCards() {
+ const container = document.getElementById('log-entries');
+ container.innerHTML = 'Lade Logs...';
+ try {
+ const res = await fetch('/api/debug/logs?module=app&limit=1000');
+ const data = await res.json();
+ let lines = [];
+ if (Array.isArray(data.items)) {
+ lines = data.items;
+ } else if (typeof data.lines === 'string') {
+ lines = data.lines.split('\n');
+ } else if (Array.isArray(data.lines)) {
+ lines = data.lines;
+ } else {
+ lines = [];
+ }
+ proLogLines = lines;
+ applyProLogFilter();
+ } catch (e) {
+ container.innerHTML = 'Fehler beim Laden der Logs.';
+ }
+}
+
+function observeProLogPanel() {
+ const panel = document.getElementById('panel-logs');
+ if (!panel) return;
+ const observer = new MutationObserver(() => {
+ if (panel.style.display !== 'none') {
+ loadProLogCards();
+ }
+ });
+ observer.observe(panel, { attributes: true, attributeFilter: ['style'] });
+}
+
+function setupProLogPauseButton() {
+ let btn = document.getElementById('proLogPauseBtn');
+ if (!btn) {
+ btn = document.createElement('button');
+ btn.id = 'proLogPauseBtn';
+ btn.textContent = 'Pause';
+ btn.className = 'btn btn-secondary';
+ btn.style.margin = '8px 0 8px 0';
+ btn.onclick = function() {
+ autoScroll = !autoScroll;
+ btn.textContent = autoScroll ? 'Pause' : 'Fortsetzen';
+ if (autoScroll) scrollLogToBottom();
+ };
+ const logPanel = document.getElementById('panel-logs');
+ if (logPanel) logPanel.querySelector('.panel')?.appendChild(btn);
+ }
+}
+
+function setupProLogFilterUI() {
+ const logPanel = document.getElementById('panel-logs');
+ if (!logPanel) return;
+ let filterBar = document.getElementById('proLogFilterBar');
+ if (!filterBar) {
+ filterBar = document.createElement('div');
+ filterBar.id = 'proLogFilterBar';
+ filterBar.style.display = 'flex';
+ filterBar.style.gap = '8px';
+ filterBar.style.margin = '8px 0';
+ filterBar.innerHTML = `
+
+ Level: Alle
+ Error
+ Warning
+ Info
+ Debug
+
+
+ Modul: Alle
+ App
+ Bambu
+ Klipper
+ MQTT
+
+
+ Neu laden
+ Alle einklappen
+ Alle ausklappen
+ `;
+ logPanel.querySelector('.panel')?.appendChild(filterBar);
+ }
+ document.getElementById('proLogFilterLevel').onchange = applyProLogFilter;
+ document.getElementById('proLogFilterModule').onchange = applyProLogFilter;
+ document.getElementById('proLogFilterSearch').oninput = applyProLogFilter;
+ document.getElementById('proLogReloadBtn').onclick = loadProLogCards;
+ document.getElementById('proLogCollapseAllBtn').onclick = () => {
+ if (window.collapseAllLogStacktraces) window.collapseAllLogStacktraces();
+ };
+ document.getElementById('proLogExpandAllBtn').onclick = () => {
+ if (window.expandAllLogStacktraces) window.expandAllLogStacktraces();
+ };
+}
+
+function setupProLogCardClickPause() {
+ const container = document.getElementById('log-entries');
+ container.onclick = function(e) {
+ if (e.target.classList.contains('log-line')) {
+ autoScroll = false;
+ const btn = document.getElementById('proLogPauseBtn');
+ if (btn) btn.textContent = 'Fortsetzen';
+ }
+ };
+}
+
+document.addEventListener('DOMContentLoaded', () => {
+ observeProLogPanel();
+ setupProLogPauseButton();
+ setupProLogFilterUI();
+ setupProLogCardClickPause();
+ const c2 = document.getElementById('proLogCollapseAllBtn2');
+ if (c2) c2.onclick = () => { if (window.collapseAllLogStacktraces) window.collapseAllLogStacktraces(); };
+ const e2 = document.getElementById('proLogExpandAllBtn2');
+ if (e2) e2.onclick = () => { if (window.expandAllLogStacktraces) window.expandAllLogStacktraces(); };
+});
diff --git a/app/static/js/json_renderer.js b/app/static/js/json_renderer.js
new file mode 100644
index 0000000..421a88e
--- /dev/null
+++ b/app/static/js/json_renderer.js
@@ -0,0 +1,87 @@
+(function(){
+ function copyToClipboard(text){
+ try {
+ if (navigator && navigator.clipboard && navigator.clipboard.writeText){
+ return navigator.clipboard.writeText(String(text));
+ }
+ } catch(_) {
+ // ignore
+ }
+ }
+
+ function renderJsonTree(obj, container){
+ if (!container) return;
+ container.innerHTML = '';
+ const root = buildJsonNode('(root)', obj, true);
+ container.appendChild(root);
+ }
+
+ function buildJsonNode(key, value, expanded){
+ const node = document.createElement('div');
+ node.className = 'json-node';
+
+ const row = document.createElement('div');
+ row.className = 'json-row';
+
+ const toggle = document.createElement('span');
+ toggle.className = 'json-toggle';
+
+ const keyEl = document.createElement('span');
+ keyEl.className = 'json-key';
+ keyEl.textContent = key;
+
+ const typeEl = document.createElement('span');
+ typeEl.className = 'json-type';
+
+ const copyBtn = document.createElement('button');
+ copyBtn.className = 'copy-btn';
+
+ const children = document.createElement('div');
+ children.className = 'json-children';
+
+ if (value !== null && typeof value === 'object'){
+ const isArray = Array.isArray(value);
+ typeEl.textContent = isArray ? `[${value.length}]` : '{ }';
+ copyBtn.textContent = 'Copy';
+ copyBtn.addEventListener('click', () => copyToClipboard(JSON.stringify(value, null, 2)));
+
+ toggle.textContent = expanded ? '-' : '+';
+ toggle.addEventListener('click', () => {
+ const currentlyHidden = children.style.display === 'none';
+ children.style.display = currentlyHidden ? 'block' : 'none';
+ toggle.textContent = currentlyHidden ? '-' : '+';
+ });
+
+ row.appendChild(toggle);
+ row.appendChild(keyEl);
+ row.appendChild(typeEl);
+ row.appendChild(copyBtn);
+ node.appendChild(row);
+ node.appendChild(children);
+
+ for (const k of Object.keys(value)){
+ children.appendChild(buildJsonNode(k, value[k], false));
+ }
+ children.style.display = expanded ? 'block' : 'none';
+ } else {
+ const val = document.createElement('span');
+ val.className = 'json-value';
+ val.textContent = String(value);
+
+ copyBtn.textContent = 'Copy';
+ copyBtn.addEventListener('click', () => copyToClipboard(String(value)));
+
+ row.appendChild(toggle);
+ row.appendChild(keyEl);
+ row.appendChild(document.createTextNode(':'));
+ row.appendChild(val);
+ row.appendChild(copyBtn);
+ node.appendChild(row);
+ }
+ return node;
+ }
+
+ if (typeof window !== 'undefined'){
+ window.renderJsonTree = renderJsonTree;
+ }
+})();
diff --git a/app/static/js/log_viewer_controller.js b/app/static/js/log_viewer_controller.js
new file mode 100644
index 0000000..08b17b7
--- /dev/null
+++ b/app/static/js/log_viewer_controller.js
@@ -0,0 +1,158 @@
+// Controller: loads logs from API and renders via LogViewerRenderer
+(function(){
+ // Lokaler State (ohne window.LogViewerState)
+ let allLogs = [];
+ let filteredLogs = [];
+
+ async function loadLogs(){
+ const root = document.getElementById('log-entries');
+ if (root) root.textContent = 'Lade Logs...';
+
+ // Lese aktuelles Modul aus Dropdown
+ const moduleSelect = document.getElementById('logModuleSelect');
+ const limitSelect = document.getElementById('logLimitSelect');
+ const module = moduleSelect ? moduleSelect.value : 'app';
+ const limit = limitSelect ? limitSelect.value : '500';
+
+ try{
+ const res = await fetch(`/api/debug/logs?module=${module}&limit=${limit}`);
+ const data = await res.json();
+ let items = [];
+
+ // API gibt { logs: [...], count: X, module: "..." } zurück
+ if (Array.isArray(data.logs)) {
+ items = data.logs;
+ } else if (Array.isArray(data.items)) {
+ items = data.items;
+ } else if (typeof data.lines === 'string') {
+ items = data.lines.split('\n').map(m=>({level:'INFO', module:module, message:m}));
+ } else if (Array.isArray(data.lines)) {
+ items = data.lines.map(m=>({level:'INFO', module:module, message:m}));
+ }
+
+ // Store und Render via lokale Filter
+ allLogs = items;
+ populateModuleOptions(allLogs);
+ applyFilters();
+
+ // Update Last Update Zeit
+ const lastUpdateEl = document.getElementById('logLastUpdate');
+ if (lastUpdateEl) {
+ const now = new Date();
+ lastUpdateEl.textContent = now.toLocaleTimeString('de-DE');
+ }
+ } catch (err) {
+ console.error('Fehler beim Laden der Logs:', err);
+ const rootEl = document.getElementById('log-entries');
+ if (rootEl) rootEl.textContent = `Fehler beim Laden der Logs für Modul "${module}": ${err.message}`;
+ }
+ }
+
+ function populateModuleOptions(items){
+ const modSel = document.getElementById('logModuleFilter');
+ if (!modSel) return;
+ const mods = new Set();
+ (items||[]).forEach(it => {
+ const msg = (it.message ?? it.text ?? '').toString();
+ const inferred = (msg.match(/\[(.*?)\]/)?.[1] || 'app');
+ const mod = (it.module ?? inferred).toString();
+ if (mod) mods.add(mod);
+ });
+ const prev = modSel.value;
+ const options = ['','...'].slice(0,1); // ensure empty 'Alle'
+ modSel.innerHTML = '';
+ const allOpt = document.createElement('option');
+ allOpt.value = '';
+ allOpt.textContent = 'Modul: Alle';
+ modSel.appendChild(allOpt);
+ Array.from(mods).sort((a,b)=>a.localeCompare(b)).forEach(m => {
+ const o = document.createElement('option');
+ o.value = m;
+ o.textContent = m;
+ modSel.appendChild(o);
+ });
+ // restore previous selection if still present
+ if (prev && Array.from(mods).includes(prev)) {
+ modSel.value = prev;
+ } else {
+ modSel.value = '';
+ }
+ }
+
+ function applyFilters(){
+ const level = document.getElementById('logLevelFilter')?.value || '';
+ const moduleSel = document.getElementById('logModuleFilter')?.value || '';
+ const search = (document.getElementById('logSearchInput')?.value || '').toLowerCase();
+
+ filteredLogs = (allLogs || []).filter(log => {
+ // Heuristik: Level und Modul aus Text ableiten, falls Felder fehlen
+ const rawText = (log.message || log.text || '').toString();
+ const lvl = (log.level ? String(log.level) : ( /\b(ERROR|CRITICAL)\b/i.test(rawText) ? 'ERROR' : /\bWARNING|WARN\b/i.test(rawText) ? 'WARNING' : /\bDEBUG\b/i.test(rawText) ? 'DEBUG' : 'INFO'));
+ const mod = (log.module ? String(log.module) : (rawText.match(/\[(.*?)\]/)?.[1] || 'app'));
+ if (level && lvl !== level) return false;
+ if (moduleSel && mod !== moduleSel) return false;
+ if (search) {
+ const text = rawText.toLowerCase();
+ if (!text.includes(search)) return false;
+ }
+ return true;
+ });
+
+ window.LogViewerRenderer?.renderLogs(filteredLogs);
+ }
+
+ function setupToolbar(){
+ const reload = document.getElementById('logReloadBtn');
+ const pause = document.getElementById('logPauseBtn');
+ const lvl = document.getElementById('logLevelFilter');
+ const mod = document.getElementById('logModuleFilter');
+ const q = document.getElementById('logSearchInput');
+
+ // Modul-Selector (lädt neue Logs bei Änderung)
+ const moduleSelect = document.getElementById('logModuleSelect');
+ const limitSelect = document.getElementById('logLimitSelect');
+
+ if (reload) reload.onclick = loadLogs;
+
+ // Modul-Wechsel → Logs neu laden
+ if (moduleSelect) moduleSelect.onchange = loadLogs;
+ if (limitSelect) limitSelect.onchange = loadLogs;
+
+ // Pause-Button: toggelt nur Label (reine UI, da Autoscroll lokal nicht verwaltet wird)
+ if (pause) pause.onclick = () => {
+ pause.textContent = (pause.textContent === 'Pause') ? 'Fortsetzen' : 'Pause';
+ };
+ if (lvl) lvl.onchange = applyFilters;
+ if (mod) mod.onchange = applyFilters;
+ if (q) q.oninput = applyFilters;
+ }
+
+ function observePanel(){
+ const panel = document.getElementById('panel-logs');
+ if (!panel) return;
+ const obs = new MutationObserver(() => {
+ if (panel.style.display !== 'none') loadLogs();
+ });
+ obs.observe(panel, { attributes: true, attributeFilter: ['style'] });
+ }
+
+ function setupClickPause(){
+ const container = document.getElementById('log-entries');
+ if (!container) return;
+ container.addEventListener('click', (e) => {
+ if (e.target.classList.contains('log-toggle') || e.target.classList.contains('log-line')) {
+ const pause = document.getElementById('logPauseBtn');
+ if (pause) pause.textContent = 'Fortsetzen';
+ }
+ });
+ }
+
+ document.addEventListener('DOMContentLoaded', () => {
+ setupToolbar();
+ observePanel();
+ setupClickPause();
+ loadLogs();
+ });
+
+ window.LogViewerController = { loadLogs };
+})();
\ No newline at end of file
diff --git a/app/static/js/log_viewer_renderer.js b/app/static/js/log_viewer_renderer.js
new file mode 100644
index 0000000..23f33ba
--- /dev/null
+++ b/app/static/js/log_viewer_renderer.js
@@ -0,0 +1,131 @@
+// Renderer for new Log Viewer (DOM only, no state)
+// Minimal window-based renderer (no modules)
+(function(){
+ function formatTimestamp(ts){
+ if (!ts) return '';
+ try {
+ const d = new Date(ts);
+ if (isNaN(d.getTime())) return '';
+ const hh = String(d.getHours()).padStart(2,'0');
+ const mm = String(d.getMinutes()).padStart(2,'0');
+ const ss = String(d.getSeconds()).padStart(2,'0');
+ return `${hh}:${mm}:${ss}`;
+ } catch { return ''; }
+ }
+ function splitMessage(message){
+ const text = (message ?? '').toString();
+ const parts = text.split(/\r?\n/);
+ return { first: parts[0] ?? '', rest: parts.slice(1).join('\n') };
+ }
+ function normalizeLevel(level){
+ const lv = (level ?? 'INFO').toString().toUpperCase();
+ return lv === 'WARN' ? 'WARNING' : lv;
+ }
+ function levelClass(level){
+ const lv = normalizeLevel(level);
+ if (lv === 'ERROR' || lv === 'CRITICAL') return 'log-error';
+ if (lv === 'WARNING') return 'log-warning';
+ if (lv === 'DEBUG') return 'log-debug';
+ return 'log-info';
+ }
+ function renderLogs(items){
+ const root = document.getElementById('log-entries');
+ if (!root) return;
+ root.innerHTML = '';
+ (items || []).forEach(it => {
+ const lv = normalizeLevel(it.level);
+ const mod = (it.module ?? 'app').toString();
+ const ts = it.timestamp || it.time || it.created_at || null;
+ const msg = (it.message ?? it.text ?? '').toString();
+ const { first, rest } = splitMessage(msg);
+
+ const line = document.createElement('div');
+ line.className = `log-line ${levelClass(lv)}`;
+ line.dataset.level = lv;
+ line.dataset.module = mod;
+
+ const summary = document.createElement('div');
+ summary.className = 'log-summary';
+
+ const lvlEl = document.createElement('span');
+ lvlEl.className = 'log-level';
+ lvlEl.textContent = lv;
+
+ const tsText = formatTimestamp(ts);
+ let tsEl = null;
+ if (tsText) {
+ tsEl = document.createElement('span');
+ tsEl.className = 'log-timestamp';
+ tsEl.textContent = tsText;
+ }
+
+ const msgEl = document.createElement('span');
+ msgEl.className = 'log-message';
+ msgEl.textContent = first;
+
+ summary.appendChild(lvlEl);
+ if (tsEl) summary.appendChild(tsEl);
+ summary.appendChild(msgEl);
+
+ let toggleBtn = null;
+ let toggleIcon = null;
+ let stackEl = null;
+ if (rest.trim().length > 0){
+ toggleBtn = document.createElement('button');
+ toggleBtn.className = 'log-toggle';
+ toggleBtn.type = 'button';
+ toggleBtn.textContent = 'Details';
+ stackEl = document.createElement('pre');
+ stackEl.className = 'log-stacktrace';
+ stackEl.textContent = rest;
+ toggleBtn.addEventListener('click', (e) => {
+ e.preventDefault();
+ e.stopPropagation();
+ const isOpen = line.classList.toggle('expanded');
+ toggleBtn.textContent = isOpen ? 'Schließen' : 'Details';
+ if (toggleIcon) toggleIcon.textContent = isOpen ? '▾' : '▸';
+ });
+ // Klick auf gesamte Zeile/summary toggelt ebenfalls
+ line.addEventListener('click', () => {
+ const isOpen = line.classList.toggle('expanded');
+ if (toggleBtn) toggleBtn.textContent = isOpen ? 'Schließen' : 'Details';
+ if (toggleIcon) toggleIcon.textContent = isOpen ? '▾' : '▸';
+ });
+ summary.addEventListener('click', (e) => {
+ // Falls Button geklickt, ist bereits behandelt
+ if (e.target === toggleBtn) return;
+ const isOpen = line.classList.toggle('expanded');
+ if (toggleBtn) toggleBtn.textContent = isOpen ? 'Schließen' : 'Details';
+ if (toggleIcon) toggleIcon.textContent = isOpen ? '▾' : '▸';
+ });
+ // dezentes Icon rechts zur visuellen Andeutung
+ toggleIcon = document.createElement('span');
+ toggleIcon.className = 'log-toggle-icon';
+ toggleIcon.textContent = '▸';
+ summary.appendChild(toggleBtn);
+ summary.appendChild(toggleIcon);
+ } else {
+ // Einstzeilige Logs: Zeilenklick toggelt und zeigt eine dezente Detailfläche
+ stackEl = document.createElement('pre');
+ stackEl.className = 'log-stacktrace';
+ stackEl.textContent = 'Keine weiteren Details';
+ // dezentes Icon rechts
+ toggleIcon = document.createElement('span');
+ toggleIcon.className = 'log-toggle-icon';
+ toggleIcon.textContent = '▸';
+ line.addEventListener('click', () => {
+ line.classList.toggle('expanded');
+ if (toggleIcon) toggleIcon.textContent = line.classList.contains('expanded') ? '▾' : '▸';
+ });
+ summary.addEventListener('click', () => {
+ line.classList.toggle('expanded');
+ if (toggleIcon) toggleIcon.textContent = line.classList.contains('expanded') ? '▾' : '▸';
+ });
+ }
+ line.appendChild(summary);
+ if (stackEl) line.appendChild(stackEl);
+ root.appendChild(line);
+ });
+ }
+ window.LogViewerRenderer = { renderLogs };
+})();
\ No newline at end of file
diff --git a/app/static/js/mqtt-connect-handler.js b/app/static/js/mqtt-connect-handler.js
new file mode 100644
index 0000000..61fca38
--- /dev/null
+++ b/app/static/js/mqtt-connect-handler.js
@@ -0,0 +1,421 @@
+// MQTT Connection Handler - Vollständige Implementation
+// Passt zu den IDs in deinem HTML: mqttBroker, mqttPort, mqttClientId, etc.
+
+
+// Handler-Vereinheitlichung: Nur handleMQTTConnect(), Endpoints vereinheitlicht, IDs bereinigt, Logging und Payload exakt wie gefordert, UI-State nur über Status-GET
+(function() {
+ 'use strict';
+
+ // EINZIGE Connect-Funktion mit Schnellauswahl-Priorisierung
+ async function handleMQTTConnect() {
+ console.log('🔌 Connect geklickt!');
+
+ const printerId = document.getElementById('mqttPrinterDropdown')?.value;
+ let payload = {};
+ let mode = 'manual';
+ if (printerId && printerId !== '') {
+ // PRIORITÄT 1: Schnellauswahl aktiv
+ mode = 'printer';
+ payload = {
+ printer_id: printerId,
+ use_printer_config: true
+ };
+ console.log('📦 Payload (printer):', payload);
+ } else {
+ // PRIORITÄT 2: Manuelle Felder
+ const broker = document.getElementById('mqttBroker')?.value?.trim() || '';
+ const port = document.getElementById('mqttPort')?.value?.trim() || '';
+ const clientId = document.getElementById('mqttClientId')?.value?.trim() || '';
+ const username = document.getElementById('mqttUsername')?.value?.trim() || '';
+ const password = document.getElementById('mqttPassword')?.value?.trim() || '';
+ const tls = document.getElementById('mqttTls')?.checked || false;
+ const protocol = document.getElementById('mqttProtocol')?.value || '311';
+
+ // Validierung
+ if (!broker) {
+ alert('Broker-Adresse fehlt!');
+ return;
+ }
+ if (!port) {
+ alert('Port fehlt!');
+ return;
+ }
+ if (!clientId) {
+ alert('Client-ID fehlt!');
+ return;
+ }
+
+ payload = {
+ broker: broker,
+ port: Number(port),
+ client_id: clientId,
+ username: username ? username : null,
+ password: password ? password : null,
+ tls: !!tls,
+ protocol: protocol === '5' ? '5' : '311'
+ };
+ console.log('📦 Payload (manual):', payload);
+ }
+ console.log('📡 POST /api/mqtt/runtime/connect');
+
+ // POST an EINEN Endpoint
+ try {
+ const response = await fetch('/api/mqtt/runtime/connect', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify(payload)
+ });
+ // Nach POST IMMER Status holen
+ await refreshMQTTStatus();
+ } catch (error) {
+ console.error('MQTT Connect Error:', error);
+ alert('Fehler beim Verbindungsaufbau: ' + error.message);
+ }
+ }
+
+ // Disconnect-Funktion (Endpoint vereinheitlicht)
+ async function handleMQTTDisconnect() {
+ try {
+ await fetch('/api/mqtt/runtime/disconnect', { method: 'POST' });
+ await refreshMQTTStatus();
+ } catch (error) {
+ console.error('MQTT Disconnect Error:', error);
+ alert('Fehler beim Trennen: ' + error.message);
+ }
+ }
+
+ // Status-Update Funktion (aktualisiert ALLE Status-Badges konsistent)
+ function updateStatus(state, text, detail) {
+ // Harte Normalisierung: nur zwei sichtbare Texte erlaubt.
+ // state === 'connected' -> '🟢 Verbunden'
+ // sonst -> '⚫ Nicht verbunden'
+ try {
+ const normalizedText = (state === 'connected') ? '🟢 Verbunden' : '⚫ Nicht verbunden';
+ const normalizedClass = (state === 'connected') ? 'status-ok' : 'status-error';
+
+ // Haupt-Status-Badge (System Status Tab)
+ const badge = document.getElementById('mqttStatus');
+ if (badge) {
+ badge.textContent = normalizedText;
+ badge.title = (state === 'connected') ? (detail || 'MQTT verbunden') : (detail || 'Nicht verbunden');
+ badge.classList.remove('status-ok', 'status-warn', 'status-error', 'status-idle');
+ badge.classList.add(normalizedClass);
+ }
+
+ // Pro-Mode MQTT Detail Status
+ const proMqttStatus = document.getElementById('proMqttStatus');
+ if (proMqttStatus) {
+ proMqttStatus.textContent = (state === 'connected') ? 'connected' : 'disconnected';
+ }
+
+ // Buttons: Connect/Disconnect aktivieren/deaktivieren
+ const connectBtn = document.getElementById('mqttConnectBtn');
+ const disconnectBtn = document.getElementById('mqttDisconnectBtn');
+ if (connectBtn) connectBtn.disabled = (state === 'connected');
+ if (disconnectBtn) disconnectBtn.disabled = (state !== 'connected');
+
+ console.log('🎯 Status-Badges aktualisiert:', normalizedText);
+ } catch (err) {
+ console.error('updateStatus error:', err);
+ }
+ }
+
+ // Topics anzeigen (optional)
+ function displayTopics(topics) {
+ // ...existing code...
+ }
+
+ // Drucker-Dropdown Handler (Schnellauswahl)
+ function handlePrinterSelect() {
+ // ...existing code...
+ }
+
+ // Bambu Topics Button Handler
+ async function handleBambuTopics() {
+ // ...existing code...
+ }
+
+ // Event-Listener Setup
+ function setupEventListeners() {
+ // Connect Button
+ const connectBtn = document.getElementById('mqttConnectBtn');
+ if (connectBtn) {
+ connectBtn.addEventListener('click', handleMQTTConnect);
+ }
+
+ // Disconnect Button
+ const disconnectBtn = document.getElementById('mqttDisconnectBtn');
+ if (disconnectBtn) {
+ disconnectBtn.addEventListener('click', handleMQTTDisconnect);
+ }
+
+ // Drucker-Dropdown
+ const printerDropdown = document.getElementById('mqttPrinterDropdown');
+ if (printerDropdown) {
+ printerDropdown.addEventListener('change', handlePrinterSelect);
+ }
+
+ // Bambu Topics Button
+ const topicsBtn = document.getElementById('mqttTopicsBtn');
+ if (topicsBtn) {
+ topicsBtn.addEventListener('click', handleBambuTopics);
+ }
+
+ // Enter-Taste in Passwort-Feld = Connect
+ const passwordInput = document.getElementById('mqttPassword');
+ if (passwordInput) {
+ passwordInput.addEventListener('keypress', (e) => {
+ if (e.key === 'Enter') {
+ handleMQTTConnect();
+ }
+ });
+ }
+ }
+
+ // Initialisierung
+ function init() {
+ setupEventListeners();
+ console.log('MQTT Connection Handler initialisiert');
+ // Initial-Status setzen
+ updateStatus('disconnected', 'Nicht verbunden', 'Bereit zur Verbindung');
+
+ // 🔴 DAS WAR DER FEHLENDE TEIL
+ // Status regelmäßig vom Backend holen
+ refreshMQTTStatus(); // sofort einmal
+ setInterval(refreshMQTTStatus, 2000); // dann alle 2 Sekunden
+ }
+
+ // Bei DOM-Ready initialisieren
+ if (document.readyState === 'loading') {
+ document.addEventListener('DOMContentLoaded', init);
+ } else {
+ init();
+ }
+
+ // Export für externe Nutzung
+ window.MQTTConnectionHandler = {
+ connect: handleMQTTConnect,
+ disconnect: handleMQTTDisconnect,
+ updateStatus: updateStatus,
+ refresh: refreshMQTTStatus,
+ updateOverview: updateOverviewFields // NEU: Overview-Update exportieren
+ };
+
+ // --- Hilfsfunktion: Status holen und ALLE Overview-Felder aktualisieren ---
+async function refreshMQTTStatus() {
+ try {
+ const response = await fetch('/api/mqtt/runtime/status');
+ const data = await response.json();
+
+ if (data && data.connected === true) {
+ // Status-Badge aktualisieren
+ updateStatus(
+ 'connected',
+ 'Verbunden',
+ 'MQTT verbunden'
+ );
+
+ // MQTT Overview Felder aktualisieren
+ updateOverviewFields({
+ connected: true,
+ broker: data.broker,
+ port: data.port,
+ client_id: data.client_id,
+ message_count: data.message_count,
+ last_message_time: data.last_message_time,
+ qos: data.qos,
+ uptime: data.uptime,
+ connected_since: data.connected_since,
+ subscriptions_count: data.subscriptions_count || data.topics_count
+ });
+
+ // Setze globale Variablen für Topics-Polling
+ if (typeof window._mqttLastConnected !== 'undefined') {
+ window._mqttLastConnected = true;
+ } else {
+ window._mqttLastConnected = true; // Erstelle falls nicht vorhanden
+ }
+
+ // Start Topics & Messages Polling when connected
+ if (typeof window._syncTopicsPolling === 'function') {
+ window._syncTopicsPolling();
+ }
+ if (typeof window._syncMessagesPolling === 'function') {
+ window._syncMessagesPolling();
+ }
+ if (typeof window._syncDetailsPoll === 'function') {
+ window._syncDetailsPoll();
+ }
+
+ // Trigger Topics-Refresh wenn Tab aktiv
+ if (typeof window.refreshMQTTTopics === 'function') {
+ // Prüfe ob MQTT-Tab aktiv ist
+ const mqttPanel = document.getElementById('panel-mqtt');
+ const isMqttTabActive = mqttPanel && mqttPanel.style.display !== 'none';
+
+ if (isMqttTabActive || window._mqttTabActive === true) {
+ window.refreshMQTTTopics().catch(() => {});
+ }
+ }
+ } else {
+ // Getrennt
+ updateStatus(
+ 'disconnected',
+ 'Nicht verbunden',
+ 'Bereit zur Verbindung'
+ );
+
+ // Overview-Felder zurücksetzen
+ updateOverviewFields({ connected: false });
+
+ // Setze globale Variable
+ if (typeof window._mqttLastConnected !== 'undefined') {
+ window._mqttLastConnected = false;
+ } else {
+ window._mqttLastConnected = false;
+ }
+
+ // Stop Topics & Messages Polling when disconnected
+ if (typeof window._syncTopicsPolling === 'function') {
+ window._syncTopicsPolling();
+ }
+ if (typeof window._syncMessagesPolling === 'function') {
+ window._syncMessagesPolling();
+ }
+ if (typeof window._syncDetailsPoll === 'function') {
+ window._syncDetailsPoll();
+ }
+ }
+ } catch (error) {
+ console.error('Fehler beim Status-Refresh:', error);
+ updateStatus(
+ 'disconnected',
+ 'Nicht verbunden',
+ 'Status nicht erreichbar'
+ );
+ updateOverviewFields({ connected: false });
+
+ if (typeof window._mqttLastConnected !== 'undefined') {
+ window._mqttLastConnected = false;
+ } else {
+ window._mqttLastConnected = false;
+ }
+ }
+}
+
+// --- Neue Funktion: Overview-Felder aktualisieren ---
+function updateOverviewFields({ connected, broker, port, client_id, message_count, last_message_time, qos, uptime, connected_since, subscriptions_count }) {
+ // MQTT Overview Status Badge (im MQTT Tab)
+ const statusBadge = document.getElementById('mqttStatusBadge');
+ if (statusBadge) {
+ if (connected) {
+ statusBadge.textContent = 'Verbunden';
+ statusBadge.className = 'status-badge status-ok';
+ } else {
+ statusBadge.textContent = 'Nicht verbunden';
+ statusBadge.className = 'status-badge status-error';
+ }
+ }
+
+ // Broker
+ const brokerValue = document.getElementById('mqttBrokerValue');
+ if (brokerValue) {
+ brokerValue.textContent = connected && broker && port
+ ? `${broker}:${port}`
+ : '-';
+ }
+
+ // Client ID
+ const clientIdValue = document.getElementById('mqttClientIdValue');
+ if (clientIdValue) {
+ clientIdValue.textContent = connected && client_id ? client_id : '-';
+ }
+
+ // Subscriptions Count (NEU)
+ const subsCount = document.getElementById('mqttSubscriptionsCount');
+ if (subsCount) {
+ subsCount.textContent = connected && subscriptions_count !== undefined
+ ? String(subscriptions_count)
+ : '-';
+ }
+
+ // Nachrichten empfangen
+ const msgCount = document.getElementById('mqttMsgCount');
+ if (msgCount) {
+ msgCount.textContent = connected && message_count !== undefined
+ ? String(message_count)
+ : '-';
+ }
+
+ // Letzte Nachricht
+ const lastMsgTime = document.getElementById('mqttLastMsgTime');
+ if (lastMsgTime) {
+ lastMsgTime.textContent = connected && last_message_time
+ ? last_message_time
+ : '-';
+ }
+
+ // QoS Level
+ const qosEl = document.getElementById('mqttQos');
+ if (qosEl) {
+ qosEl.textContent = connected && qos !== undefined
+ ? String(qos)
+ : '-';
+ }
+
+ // Verbindungsdauer
+ const uptimeEl = document.getElementById('mqttUptime');
+ if (uptimeEl) {
+ if (connected) {
+ if (uptime) {
+ uptimeEl.textContent = uptime;
+ } else if (connected_since) {
+ uptimeEl.textContent = `seit ${connected_since}`;
+ } else {
+ uptimeEl.textContent = '-';
+ }
+ } else {
+ uptimeEl.textContent = '-';
+ }
+ }
+
+ // Pro-Mode Connection Badge
+ const connBadge = document.getElementById('mqttConnBadge');
+ if (connBadge) {
+ if (connected) {
+ connBadge.textContent = 'Verbunden';
+ connBadge.className = 'status-badge status-ok';
+ } else {
+ connBadge.textContent = 'Nicht verbunden';
+ connBadge.className = 'status-badge status-error';
+ }
+ }
+
+ // Pro-Mode Connection Dot
+ const connDot = document.getElementById('mqttConnDot');
+ if (connDot) {
+ connDot.classList.remove('dot-ok', 'dot-error', 'dot-idle');
+ connDot.classList.add(connected ? 'dot-ok' : 'dot-idle');
+ }
+
+ // Pro-Mode Status Text
+ const connText = document.getElementById('mqttConnText');
+ if (connText) {
+ if (connected && broker && port) {
+ const detail = `Verbunden mit ${broker}:${port}`;
+ const since = connected_since ? ` (seit ${connected_since})` : '';
+ connText.textContent = detail + since;
+ } else {
+ connText.textContent = 'Nicht verbunden';
+ }
+ }
+
+ console.log('📊 Overview-Felder aktualisiert:', {
+ connected, broker, port, client_id,
+ message_count, subscriptions_count,
+ last_message_time, qos
+ });
+}
+
+
+})();
diff --git a/app/static/json_inspector_new.js b/app/static/json_inspector_new.js
new file mode 100644
index 0000000..875386e
--- /dev/null
+++ b/app/static/json_inspector_new.js
@@ -0,0 +1,199 @@
+// ============================================
+// NEUER SAUBERER JSON INSPECTOR
+// ============================================
+
+console.log('🔥 JSON Inspector NEU geladen:', new Date().toISOString());
+
+let jsonInspectorPollInterval = null;
+let jsonInspectorPaused = true;
+
+// Simple JSON Tree Renderer
+function renderJsonInspectorTree(data) {
+ const container = document.getElementById('json-inspector-tree');
+ if (!container) {
+ console.error('❌ json-inspector-tree Element nicht gefunden!');
+ return;
+ }
+
+ console.log('✅ Rendere JSON Tree, Daten-Keys:', Object.keys(data || {}).length);
+
+ container.innerHTML = '';
+ container.style.maxHeight = '500px';
+ container.style.overflow = 'auto';
+ container.style.padding = '10px';
+ container.style.fontFamily = 'monospace';
+ container.style.fontSize = '13px';
+
+ function createNode(key, value, level = 0) {
+ const div = document.createElement('div');
+ div.style.paddingLeft = (level * 20) + 'px';
+ div.style.margin = '2px 0';
+
+ if (value === null) {
+ div.innerHTML = `${key}: null `;
+ } else if (typeof value === 'object' && !Array.isArray(value)) {
+ div.innerHTML = `${key} { `;
+ container.appendChild(div);
+ Object.keys(value).forEach(k => createNode(k, value[k], level + 1));
+ const close = document.createElement('div');
+ close.style.paddingLeft = (level * 20) + 'px';
+ close.innerHTML = '} ';
+ container.appendChild(close);
+ return;
+ } else if (Array.isArray(value)) {
+ div.innerHTML = `${key} [${value.length}] `;
+ container.appendChild(div);
+ value.forEach((item, i) => createNode(`[${i}]`, item, level + 1));
+ return;
+ } else if (typeof value === 'string') {
+ div.innerHTML = `${key}: "${value}" `;
+ } else if (typeof value === 'number') {
+ div.innerHTML = `${key}: ${value} `;
+ } else if (typeof value === 'boolean') {
+ div.innerHTML = `${key}: ${value} `;
+ } else {
+ div.innerHTML = `${key}: ${value}`;
+ }
+
+ container.appendChild(div);
+ }
+
+ createNode('root', data, 0);
+ console.log('✅ JSON Tree gerendert');
+}
+
+// Polling-Funktion
+async function pollLiveState() {
+ if (jsonInspectorPaused) {
+ console.log('⏸ Polling pausiert');
+ return;
+ }
+
+ try {
+ const res = await fetch('/api/live-state/');
+ if (!res.ok) {
+ console.warn('❌ Live-State API Fehler:', res.status);
+ return;
+ }
+
+ const data = await res.json();
+ const devices = Object.keys(data);
+
+ console.log('📥 Live-State empfangen:', devices.length, 'Geräte');
+
+ if (devices.length === 0) {
+ const container = document.getElementById('json-inspector-tree');
+ if (container) {
+ container.innerHTML = 'Keine Geräte gefunden
';
+ }
+ return;
+ }
+
+ // Erstes Gerät nehmen
+ const firstDevice = data[devices[0]];
+ if (firstDevice && firstDevice.payload) {
+ renderJsonInspectorTree(firstDevice.payload);
+
+ // Status-Badge aktualisieren
+ const badge = document.getElementById('json-inspector-status');
+ if (badge) {
+ badge.className = 'status-badge status-ok';
+ badge.textContent = 'Live';
+ }
+ }
+ } catch (err) {
+ console.error('❌ Polling Fehler:', err);
+ }
+}
+
+// Start/Stop Funktionen
+function startJsonInspectorPolling() {
+ console.log('▶️ Starte JSON Inspector Polling');
+ jsonInspectorPaused = false;
+
+ const btn = document.getElementById('json-pause-btn');
+ if (btn) {
+ btn.textContent = '⏸ Pause';
+ btn.title = 'Pausieren';
+ }
+
+ pollLiveState(); // Sofort einmal aufrufen
+
+ if (jsonInspectorPollInterval) clearInterval(jsonInspectorPollInterval);
+ jsonInspectorPollInterval = setInterval(pollLiveState, 2500);
+}
+
+function stopJsonInspectorPolling() {
+ console.log('⏸ Stoppe JSON Inspector Polling');
+ jsonInspectorPaused = true;
+
+ const btn = document.getElementById('json-pause-btn');
+ if (btn) {
+ btn.textContent = '▶ Start';
+ btn.title = 'Starten';
+ }
+
+ if (jsonInspectorPollInterval) {
+ clearInterval(jsonInspectorPollInterval);
+ jsonInspectorPollInterval = null;
+ }
+}
+
+function toggleJsonInspectorPolling() {
+ if (jsonInspectorPaused) {
+ startJsonInspectorPolling();
+ } else {
+ stopJsonInspectorPolling();
+ }
+}
+
+// Auto-Start wenn MQTT verbunden
+async function initJsonInspector() {
+ console.log('🔧 Initialisiere JSON Inspector');
+
+ // Button-Event
+ const pauseBtn = document.getElementById('json-pause-btn');
+ if (pauseBtn) {
+ pauseBtn.addEventListener('click', toggleJsonInspectorPolling);
+ console.log('✅ Pause-Button registriert');
+ }
+
+ // Start-Button
+ const startBtn = document.getElementById('json-start-btn');
+ if (startBtn) {
+ startBtn.addEventListener('click', () => {
+ if (jsonInspectorPaused) {
+ startJsonInspectorPolling();
+ }
+ });
+ console.log('✅ Start-Button registriert');
+ }
+
+ // MQTT-Status prüfen
+ try {
+ const res = await fetch('/api/mqtt/runtime/status');
+ const data = await res.json();
+
+ if (data && data.connected === true) {
+ console.log('🟢 MQTT verbunden - Auto-Start');
+ startJsonInspectorPolling();
+ } else {
+ console.log('⚫ MQTT nicht verbunden');
+ const container = document.getElementById('json-inspector-tree');
+ if (container) {
+ container.innerHTML = 'MQTT nicht verbunden Klicke auf ▶ Start zum Testen
';
+ }
+ }
+ } catch (err) {
+ console.error('❌ MQTT Status Check fehlgeschlagen:', err);
+ }
+}
+
+// Warten bis DOM bereit ist
+if (document.readyState === 'loading') {
+ document.addEventListener('DOMContentLoaded', initJsonInspector);
+} else {
+ initJsonInspector();
+}
+
+console.log('✅ JSON Inspector Modul geladen');
diff --git a/app/static/logs.css b/app/static/logs.css
new file mode 100644
index 0000000..d6f1b85
--- /dev/null
+++ b/app/static/logs.css
@@ -0,0 +1,38 @@
+body {
+ margin: 0;
+ background: #121212;
+ color: #e5e5e5;
+ font-family: Consolas, monospace;
+}
+
+#container {
+ width: 95%;
+ margin: 20px auto;
+}
+
+header h1 {
+ color: #00ffae;
+ text-align: center;
+}
+
+#controls {
+ display: flex;
+ gap: 10px;
+ margin-bottom: 15px;
+ align-items: center;
+ flex-wrap: wrap;
+}
+
+#logOutput {
+ background: #1b1b1b;
+ padding: 15px;
+ border-radius: 6px;
+ height: 70vh;
+ overflow-y: auto;
+ white-space: pre-wrap;
+ font-size: 14px;
+}
+
+.error { color: #ff4e4e; }
+.warning { color: #ffbf00; }
+.info { color: #00d9ff; }
diff --git a/app/static/logs.js b/app/static/logs.js
new file mode 100644
index 0000000..6e210f6
--- /dev/null
+++ b/app/static/logs.js
@@ -0,0 +1,122 @@
+let ws = null;
+const output = document.getElementById("logOutput");
+const moduleSelect = document.getElementById("moduleSelect");
+const dateSelect = document.getElementById("dateSelect");
+const filterSelect = document.getElementById("filterSelect");
+const zoomSlider = document.getElementById("zoomSlider");
+
+
+// -------------------------------
+// Log normal laden (REST API)
+// -------------------------------
+async function loadLog() {
+ output.innerHTML = "Lade...";
+
+ const module = moduleSelect.value;
+ const date = dateSelect.value;
+
+ let lines = [];
+ if (module === "mqtt") {
+ // MQTT-Log ist zu groß - zeige Warnung und nutze Live-Stream
+ output.innerHTML = `
+
+
⚠️ MQTT-Log zu groß
+
Die MQTT-Logdatei ist zu groß zum vollständigen Laden.
+
Nutze den Live-Stream Button um neue Nachrichten zu sehen.
+
+ Oder lösche die alte Logdatei im Services-Tab und lade neu.
+
+
+ `;
+ return;
+ } else {
+ const endpoint = date
+ ? `/api/logs/date/${date}?module=${module}`
+ : `/api/logs/today?module=${module}`;
+
+ const res = await fetch(endpoint);
+ const data = await res.json();
+ // Falls lines ein String ist (statt Array), splitten
+ if (typeof data.lines === "string") {
+ lines = data.lines.split('\n');
+ } else {
+ lines = data.lines;
+ }
+ }
+
+ output.innerHTML = "";
+ applyLogLines(lines);
+}
+
+
+// -------------------------------
+// Live Log Stream (WebSocket)
+// -------------------------------
+function startLive() {
+ const module = moduleSelect.value;
+
+ if (ws) {
+ ws.close();
+ ws = null;
+ }
+
+ // Für MQTT: Zeige nur neue Zeilen (tail=0), für andere: zeige letzte 100 Zeilen
+ const tail = module === "mqtt" ? 0 : 100;
+ ws = new WebSocket(`ws://${location.host}/api/mqtt/ws/logs/${module}?tail=${tail}`);
+
+ ws.onopen = () => {
+ output.innerHTML = `✅ Live-Stream verbunden - warte auf neue Nachrichten...
`;
+ };
+
+ ws.onmessage = (event) => {
+ appendLogLine(event.data);
+ };
+
+ ws.onerror = () => {
+ output.innerHTML += `❌ Verbindungsfehler
`;
+ };
+}
+
+
+// -------------------------------
+// Log Rendering + Filter
+// -------------------------------
+function applyLogLines(lines) {
+ output.innerHTML = "";
+ // Zeige nur die letzten 50 Einträge
+ const lastLines = lines.slice(-50);
+ lastLines.forEach(appendLogLine);
+}
+
+function appendLogLine(line) {
+ const filter = filterSelect.value;
+ if (filter !== "ALL" && !line.includes(filter)) return;
+
+ // Zeilenumbruch und bessere Lesbarkeit
+ const div = document.createElement("div");
+ div.style.wordBreak = "break-all";
+ div.style.whiteSpace = "pre-wrap";
+ div.style.padding = "2px 0";
+ if (line.includes("ERROR")) div.className = "error";
+ else if (line.includes("WARNING")) div.className = "warning";
+ else div.className = "info";
+ div.textContent = line;
+ output.appendChild(div);
+ // Auto-scroll down
+ output.scrollTop = output.scrollHeight;
+}
+
+
+// -------------------------------
+// Zoom
+// -------------------------------
+zoomSlider.oninput = () => {
+ output.style.fontSize = `${zoomSlider.value}px`;
+};
+
+
+// -------------------------------
+// Events
+// -------------------------------
+document.getElementById("loadBtn").onclick = loadLog;
+document.getElementById("liveBtn").onclick = startLive;
diff --git a/app/static/uploads/printers/417ee274-2e28-46d3-8185-febab0e41e60.png b/app/static/uploads/printers/417ee274-2e28-46d3-8185-febab0e41e60.png
new file mode 100644
index 0000000..b7a94aa
Binary files /dev/null and b/app/static/uploads/printers/417ee274-2e28-46d3-8185-febab0e41e60.png differ
diff --git a/app/templates/ams_help.html b/app/templates/ams_help.html
new file mode 100644
index 0000000..8f90bd5
--- /dev/null
+++ b/app/templates/ams_help.html
@@ -0,0 +1,371 @@
+
+
+
+
+ AMS Helper
+
+
+
+
+
AMS Helper
+
Zeigt die AMS-Slots aus der letzten MQTT-Report-Nachricht (device/<serial>/report), plus Job-Zusammenfassung.
+
+
+
+ MQTT-Topic (Report)
+
+
+
+ Neu laden
+
+
+
Warte auf Laden...
+
+
+
+
+ Rohdaten der letzten Nachricht
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/templates/debug.html b/app/templates/debug.html
new file mode 100644
index 0000000..2499756
--- /dev/null
+++ b/app/templates/debug.html
@@ -0,0 +1,2804 @@
+{% extends "layout.html" %}
+
+{% block extra_styles %}
+
+
+
+
+
+
+{% endblock %}
+
+{% block content %}
+
+
INTERNE DIAGNOSE-, SYSTEM- UND LIVE-STATUS-ANSICHT
+
Debug Center
+
Zentrale Uebersicht fuer Monitoring, Analyse und Systemstatus.
+
+
+
+
+
System Status
+
Performance
+
Printer Scanner
+
MQTT
+
JSON Inspector
+
Log Viewer
+
Config Manager
+
Services
+
+
+
+ Lite
+ Pro
+
+
Mode: Lite
+
+
+
+
+
+
System Health
+
+ Warning
+ Some services require attention
+
+
+ System is operating normally
+
+
+
+
+
Application
+
FilamentHub
+
0.0.0
+
development
+
0
+
+
+
+
Systemressourcen
+
+
CPU: - n/a
+
RAM: - -
+
Disk: - -
+
+
+
+
+
Backend & Services Status
+
+
API: i offline
+
DB: i offline
+
MQTT: i offline
+
WebSocket: i offline
+
+
+
+
+
Runtime & Requests Idle
+
+
Requests/min: -
+
Avg Response: i -
+
+
+
+
+
+
Status: -
+
Response: -
+
+
+
+
+
Status: -
+
Type: -
+
+
+
+
+
WebSocket Detail
+
PRO
+
+
Status: -
+
Clients: -
+
+
+
+
+
+
+
+ OK
+ Erklaert aktive Warnhinweise im System.
+
+
+ Keine Warnungen aktiv.
+
+
+
+
+
+
AMS Detail
+
Coming soon
+
+
+
Weitere Pro-Funktionen
+
Pro-Funktion - folgt in Kuerze
+
+
+
+
+
+
Live Payload
+
Aktueller Status des ausgewählten Druckers (Live)
+
+
+
Status: -
+
Letztes Update: -
+
Job: -
+
+
AMS: -
+
+
+
+
+
+
+
+
+
+
+
Pro Mode
+
Pro-Funktionen werden hier angezeigt.
+
+
+
+
+
+
Scanner Pro
+
+
+
+
+ Deep Probe
+ IDLE
+
+
Führt eine erweiterte Verbindungs- und Erreichbarkeitsprüfung für das ausgewählte Gerät durch.
+
+ Probe starten
+
+
Noch keine Probe-Daten vorhanden.
+
Antwortzeit: -
+
Erkannt: -
+
Fehlerklasse: -
+
HTTP-Status: -
+
Hinweis: -
+
+
+
+
+ Geraete-Fingerprint
+ IDLE
+
+
Ermittelt den tatsaechlichen Geraetetyp anhand technischer Merkmale.
+
+ Fingerprint starten
+
+
Status: -
+
Erkannt: -
+
Vertrauensgrad: -
+
Ports:
+
+
+
+
+
+ Rohdaten / JSON-Ansicht
+
+
Einmalige Rohdaten-Aufnahme zur Analyse und Fehlersuche.
+
+ Snapshot erfassen
+
+
Funktion folgt in Kürze
+
Es wurde noch kein Snapshot erstellt.
+
+
+
+
+
+
+
+
+ IP-Adresse
+
+
+
+
+ Drucker-Typ
+
+ Bambu Lab
+ Klipper / Moonraker
+ Generisch / Andere
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Drucker-Typ
+
+ Bambu Lab
+ Klipper / Moonraker
+ Generisch / Andere
+
+
+
+
+
+
+
+
+
+
+
+
+
+
System Health
+
+
+ Health-Überwachung aktiv
+
+
+ Warn-Schwelle Latenz (ms)
+
+
+
+ Fehler-Schwelle Latenz (ms)
+
+
+
Steuert die Ampel-Logik im System Health Bereich.
+
+ Abbrechen
+ Speichern
+
+
+
+
Logging
+
+ Log-Level
+
+ off
+ basic
+ verbose
+
+
+
+
+ Logs in Datei schreiben
+
+
+
off = kein Logging, basic = Basis-Events, verbose = detailliertes Debug.
+
Module aktiv: App false , Bambu false , Klipper false , MQTT false
+
+
+ Abbrechen
+ Speichern
+
+
+
+
Runtime Monitoring
+
+
+ Runtime-Überwachung aktiv
+
+
+ Poll-Intervall (ms)
+
+
+
Beeinflusst Aktualisierung von Runtime & Requests.
+
+ Abbrechen
+ Speichern
+
+
+
+
JSON Inspector Limits
+
+ Max JSON Size (MB)
+
+
+
+ Max JSON Depth
+
+
+
+
+ Allow override in JSON Inspector
+
+
Schutzmechanismen für JSON-Upload und -Analyse im JSON Inspector.
+
+ Abbrechen
+ Speichern
+
+
+
+
⚠️ Hinweis: Speicherfunktion wird gerade implementiert
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Docker Status
+
Not available
+
+
+
+ Restart Backend
+ Docker Up
+ Docker Down
+ Docker Status
+
+
+
+
+
+
+
Tests
+
+ Smoke CRUD
+ DB CRUD
+ All Tests
+ Coverage
+
+
Dependencies
+
+ Install Requirements
+ Update Packages
+ List Packages
+ Outdated Packages
+
+
+
+
+
+
+
+
Log Viewer
+
+
+ Modul:
+
+ app
+ mqtt (Status)
+ 3D-Drucker
+ klipper
+ errors
+
+ Limit:
+
+ 50
+ 100
+ 200
+
+ 🔄 Neu laden
+ ⏸ Pause
+ 🗑 Löschen
+
+
+
+ Level: Alle
+ ERROR
+ WARNING
+ INFO
+ DEBUG
+
+
+
+
+
+
+
+
+
Bestätigung
+
Logs löschen?
+
Die Log-Datei des gewählten Moduls wird gelöscht. Dieser Vorgang kann nicht rückgängig gemacht werden.
+
+ Abbrechen
+ Ja, löschen
+
+
+
+
+
+
+
+
Gesamt
+
0
+
Log-Einträge
+
+
+
+
Warnings
+
0
+
Warnungen
+
+
+
Status
+
Live
+
Letzter Log: Nie aktualisiert
+
+
+
+
+
+
+
+
Log-Einträge
+
+
+ Keine Logs verfügbar. Klicke auf "Neu laden" um Logs zu laden.
+
+
+
+
+
+
+
+
MQTT – ÜBERSICHT
+
+
+
+
Status
+
Disconnected
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Nachrichten empfangen
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
MQTT Connection
+
+
+
+
+
+ Broker Address
+
+
+
+
+
+
+ Connect
+ Disconnect
+ Bambu-Topics
+
+
+ 💡
+ Tipp: Für Bambu Lab Drucker verwende Port 8883 mit Username "bblp" und dem Access Code als Passwort. Der Access Code befindet sich auf dem Drucker-Display unter Einstellungen → Netzwerk → MQTT.
+
+
+
+
+
+
Abonnierte Topics und Status
+
+
+
+
+
+
📡
+
Keine Topics (noch keine MQTT-Nachrichten empfangen).
+
+ Keine Daten
+
+
+ MQTT Status:
+ Nicht verbunden
+
+
+
+
+
+
+
+
+
+
Live Messages
+
+
Warte auf Nachrichten...
+
+
+
+
+
+
+
+
+
Weitere Pro-Funktionen
+
Pro-Funktion – folgt in Kürze
+
+
+
+
+
+
+
+
+
+
+{% endblock %}
+
+{% block scripts %}
+
+
+{% endblock %}
+{% block extra_scripts %}
+
+
+
+
+
+{% endblock %}
diff --git a/app/templates/debug_legacy.html b/app/templates/debug_legacy.html
new file mode 100644
index 0000000..10ea236
--- /dev/null
+++ b/app/templates/debug_legacy.html
@@ -0,0 +1,18 @@
+{% extends "layout.html" %}
+
+{% block content %}
+
+
+
+
+
+
+
Das Debug Center wird gerade auf das neue Layout migriert.
+
Panels folgen schrittweise aus debug_legacy.html.
+
+
+
+{% endblock %}
diff --git a/app/templates/layout.html b/app/templates/layout.html
new file mode 100644
index 0000000..4addb87
--- /dev/null
+++ b/app/templates/layout.html
@@ -0,0 +1,106 @@
+
+
+
+
+
+ {{ title or "FilamentHub" }}
+
+
+ {% block extra_styles %}{% endblock %}
+
+
+ {% include "sidebar.html" %}
+
+
+ {% block header %}
+ {% if active_page != "debug" %}
+
+ {% endif %}
+ {% endblock %}
+
+ {% block content %}{% endblock %}
+
+
+
+
+
+
+
+ {% if active_page == "debug" %}
+
+
+ {% endif %}
+
+ {% if active_page == "dashboard" %}
+
+ {% endif %}
+
+ {% if active_page == "materials" %}
+
+ {% endif %}
+
+ {% if active_page == "spools" %}
+
+ {% endif %}
+
+ {% if active_page == "printers" %}
+
+ {% endif %}
+
+ {% block scripts %}{% endblock %}
+ {% block extra_scripts %}{% endblock %}
+
+
diff --git a/app/templates/logs.html b/app/templates/logs.html
new file mode 100644
index 0000000..8e60846
--- /dev/null
+++ b/app/templates/logs.html
@@ -0,0 +1,51 @@
+
+
+
+
+ FilamentHub Log Dashboard
+
+
+
+
+
+
+
+ FilamentHub – Log Dashboard
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/templates/sidebar.html b/app/templates/sidebar.html
new file mode 100644
index 0000000..c752b76
--- /dev/null
+++ b/app/templates/sidebar.html
@@ -0,0 +1,18 @@
+
diff --git a/app/websocket/log_stream.py b/app/websocket/log_stream.py
new file mode 100644
index 0000000..b710da0
--- /dev/null
+++ b/app/websocket/log_stream.py
@@ -0,0 +1,28 @@
+import os
+import asyncio
+from fastapi import WebSocket
+from datetime import datetime
+
+LOG_ROOT = "logs"
+
+
+async def stream_log(websocket: WebSocket, module: str):
+ await websocket.accept()
+ today = datetime.now().strftime("%Y-%m-%d")
+ file_path = os.path.join(LOG_ROOT, module, f"{today}.log")
+
+ if not os.path.exists(file_path):
+ await websocket.close()
+ return
+
+ # Datei öffnen + ans Ende springen
+ with open(file_path, "r", encoding="utf-8") as f:
+ f.seek(0, os.SEEK_END)
+
+ while True:
+ line = f.readline()
+ if not line:
+ await asyncio.sleep(0.5)
+ continue
+
+ await websocket.send_text(line)
diff --git a/config.json b/config.json
new file mode 100644
index 0000000..e6ee17d
--- /dev/null
+++ b/config.json
@@ -0,0 +1,55 @@
+{
+ "name": "FilamentHub",
+ "version": "0.1.5-Beta",
+ "generated": "21.11.2025 11:53:38,62",
+ "debug": {
+ "runtime": {
+ "enabled": true,
+ "poll_interval_ms": 2000
+ }
+ },
+ "scanner": {
+ "pro": {
+ "deep_probe": false,
+ "fingerprint_enabled": false
+ }
+ },
+ "fingerprint": {
+ "enabled": false,
+ "ports": [
+ 8883,
+ 6000,
+ 7125
+ ],
+ "timeout_ms": 1500
+ },
+ "json_inspector": {
+ "max_size_mb": 5,
+ "max_depth": 50,
+ "allow_override": false
+ },
+ "logging": {
+ "enabled": true,
+ "keep_days": 14,
+ "level": "DEBUG",
+ "max_size_mb": 5,
+ "backup_count": 3,
+ "modules": {
+ "app": {
+ "enabled": true
+ },
+ "bambu": {
+ "enabled": true
+ },
+ "errors": {
+ "enabled": true
+ },
+ "klipper": {
+ "enabled": false
+ },
+ "mqtt": {
+ "enabled": true
+ }
+ }
+ }
+}
diff --git a/config.yaml b/config.yaml
index 3e14ada..1637ef4 100644
--- a/config.yaml
+++ b/config.yaml
@@ -2,12 +2,12 @@ app:
developer: Denis (d3nn3s08)
environment: development
name: FilamentHub
- version: 0.1.1
+ version: 0.1.6
logging:
enabled: true
keep_days: 14
level: DEBUG
- max_size_mb: 10 # Maximale Logdateigröße in MB
+ max_size_mb: 5 # Maximale Logdateigröße in MB
backup_count: 3 # Anzahl Backup-Dateien
modules:
app:
@@ -19,7 +19,7 @@ logging:
klipper:
enabled: false
mqtt:
- enabled: false # MQTT-Logging standardmäßig aus
+ enabled: true
paths:
logs: ./logs
integrations:
diff --git a/data/X1C.png b/data/X1C.png
new file mode 100644
index 0000000..e969da2
Binary files /dev/null and b/data/X1C.png differ
diff --git a/data/X1C.svg b/data/X1C.svg
new file mode 100644
index 0000000..3090fa5
--- /dev/null
+++ b/data/X1C.svg
@@ -0,0 +1,101 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/data/icon/64px-Klipper-logo_svg.svg.png b/data/icon/64px-Klipper-logo_svg.svg.png
new file mode 100644
index 0000000..7a1b41d
Binary files /dev/null and b/data/icon/64px-Klipper-logo_svg.svg.png differ
diff --git a/data/icon/BambuLab_logo.svg b/data/icon/BambuLab_logo.svg
new file mode 100644
index 0000000..13e0e93
--- /dev/null
+++ b/data/icon/BambuLab_logo.svg
@@ -0,0 +1,39 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/data/icon/spool.svg b/data/icon/spool.svg
new file mode 100644
index 0000000..be00b78
--- /dev/null
+++ b/data/icon/spool.svg
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/data/screenshots/ChatGPT Image 7. Dez. 2025, 23_07_46.png b/data/screenshots/ChatGPT Image 7. Dez. 2025, 23_07_46.png
new file mode 100644
index 0000000..80caf1d
Binary files /dev/null and b/data/screenshots/ChatGPT Image 7. Dez. 2025, 23_07_46.png differ
diff --git a/data/screenshots/ChatGPT Image 7. Dez. 2025, 23_29_29.png b/data/screenshots/ChatGPT Image 7. Dez. 2025, 23_29_29.png
new file mode 100644
index 0000000..87848f1
Binary files /dev/null and b/data/screenshots/ChatGPT Image 7. Dez. 2025, 23_29_29.png differ
diff --git a/data/screenshots/Material.png b/data/screenshots/Material.png
new file mode 100644
index 0000000..85d27a7
Binary files /dev/null and b/data/screenshots/Material.png differ
diff --git a/data/screenshots/Spulen.png b/data/screenshots/Spulen.png
new file mode 100644
index 0000000..cdb7c17
Binary files /dev/null and b/data/screenshots/Spulen.png differ
diff --git a/data/screenshots/Statistiken.png b/data/screenshots/Statistiken.png
new file mode 100644
index 0000000..3744999
Binary files /dev/null and b/data/screenshots/Statistiken.png differ
diff --git a/data/screenshots/Statistiken02.png b/data/screenshots/Statistiken02.png
new file mode 100644
index 0000000..6c35a5b
Binary files /dev/null and b/data/screenshots/Statistiken02.png differ
diff --git a/data/screenshots/daten-achtektur.png b/data/screenshots/daten-achtektur.png
new file mode 100644
index 0000000..0c1ec00
Binary files /dev/null and b/data/screenshots/daten-achtektur.png differ
diff --git a/data/screenshots/jops.png b/data/screenshots/jops.png
new file mode 100644
index 0000000..a322269
Binary files /dev/null and b/data/screenshots/jops.png differ
diff --git a/data/screenshots/mini user menu.png b/data/screenshots/mini user menu.png
new file mode 100644
index 0000000..ec0e9a0
Binary files /dev/null and b/data/screenshots/mini user menu.png differ
diff --git a/data/test.db b/data/test.db
deleted file mode 100644
index eb244ca..0000000
Binary files a/data/test.db and /dev/null differ
diff --git a/docker-compose.yml b/docker-compose.yml
index 3fe30fd..4440b34 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,14 +1,24 @@
services:
- fillamenthub:
- container_name: fillamenthub
- build: .
+ filamenthub:
+ container_name: filamenthub
+ image: filamenthub:latest
restart: unless-stopped
- ports:
- - "8080:8080"
+ network_mode: host
+ env_file:
+ - .env
environment:
- FILAMENTHUB_DB_PATH: /data/filamenthub.db
+ FILAMENTHUB_DB_PATH: /app/data/filamenthub.db
PYTHONPATH: /app
+ # Versionsbezeichnungen (optional - überschreibt Dockerfile Defaults)
+ # APP_VERSION: "Beta v1.6 · FilamentHub"
+ # DESIGN_VERSION: "Design Beta-1.0"
volumes:
- - /mnt/user/appdata/fillamenthub/data:/data
- - /mnt/user/appdata/fillamenthub/logs:/logs
+ - /mnt/user/appdata/filamenthub/data:/app/data
+ - /mnt/user/appdata/filamenthub/logs:/app/logs
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://localhost:8085/health"]
+ interval: 30s
+ timeout: 10s
+ retries: 3
+ start_period: 40s
entrypoint: ["./entrypoint.sh"]
diff --git a/entrypoint.sh b/entrypoint.sh
index 7be7e51..36d6e70 100644
--- a/entrypoint.sh
+++ b/entrypoint.sh
@@ -1,22 +1,5 @@
#!/bin/sh
set -e
-# Defaults
-export FILAMENTHUB_DB_PATH="${FILAMENTHUB_DB_PATH:-/app/data/filamenthub.db}"
-export PYTHONPATH="${PYTHONPATH:-/app}"
-
-echo "[entrypoint] DB_PATH=${FILAMENTHUB_DB_PATH}"
-
-# Ensure directories exist
-mkdir -p "$(dirname "$FILAMENTHUB_DB_PATH")" /app/logs
-
-# Run Alembic migrations (if available)
-if [ -f /app/alembic.ini ]; then
- echo "[entrypoint] Running Alembic migrations..."
- alembic upgrade head || { echo "[entrypoint] Alembic failed"; exit 1; }
-else
- echo "[entrypoint] alembic.ini not found, skipping migrations"
-fi
-
-# Start the app
-exec python run.py
+# Starte FilamentHub mit den Settings aus config.yaml
+exec python /app/run.py
diff --git a/frontend/static/admin_db_editor.js b/frontend/static/admin_db_editor.js
new file mode 100644
index 0000000..bfc146d
--- /dev/null
+++ b/frontend/static/admin_db_editor.js
@@ -0,0 +1,139 @@
+// Admin DB Editor (SQL) + Tables Overview (simpel)
+document.addEventListener('DOMContentLoaded', () => {
+ const queryInput = document.getElementById('dbEditorQuery');
+ const outputBox = document.getElementById('dbEditorOutput');
+ const executeBtn = document.getElementById('dbEditorExecute');
+ const clearBtn = document.getElementById('dbEditorClear');
+ const overviewBox = document.getElementById('dbTables');
+ const exampleButtons = document.querySelectorAll('[data-sql-example]');
+
+ // SQL-Editor
+ if (queryInput && outputBox && executeBtn && clearBtn) {
+ executeBtn.onclick = async () => {
+ const sql = queryInput.value.trim();
+ if (!sql) return;
+ outputBox.textContent = 'Wird ausgeführt...';
+ try {
+ const res = await fetch('/api/debug/db/exec', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ sql })
+ });
+ const data = await res.json();
+ outputBox.textContent = data.success ? (data.output || 'OK') : (data.error || 'Fehler');
+ } catch (e) {
+ outputBox.textContent = 'Fehler beim Ausführen!';
+ }
+ };
+ clearBtn.onclick = () => {
+ queryInput.value = '';
+ outputBox.textContent = '';
+ };
+ }
+
+ if (queryInput && exampleButtons.length) {
+ const SQL_EXAMPLES = {
+ update: "UPDATE material SET notes = 'Beispieltext' WHERE id = 'INSERT_UUID_HERE';",
+ insert: "INSERT INTO material (name, brand, density, diameter) VALUES ('Mustermaterial', 'Demo', 1.24, 1.75);",
+ delete: "DELETE FROM material WHERE id = 'INSERT_UUID_HERE';"
+ };
+ exampleButtons.forEach(btn => {
+ const key = btn.dataset.sqlExample;
+ btn.addEventListener('click', () => {
+ if (!key) return;
+ queryInput.value = SQL_EXAMPLES[key] || '';
+ queryInput.focus();
+ });
+ });
+ }
+
+ // Tables Overview
+ if (overviewBox) {
+ loadTablesOverview(overviewBox);
+ }
+});
+
+async function loadTablesOverview(box) {
+ const renderError = (msg) => {
+ box.innerHTML = `${msg}
`;
+ };
+ try {
+ const res = await fetch('/api/database/tables');
+ const data = await res.json();
+ const tables = data.tables || [];
+ if (!tables.length) {
+ box.innerHTML = 'Keine Tabellen gefunden.
';
+ return;
+ }
+ const cards = tables.map(renderTableCard).join('');
+ box.innerHTML = `${cards}
`;
+ } catch (e) {
+ renderError('Fehler beim Laden der Tabellenübersicht!');
+ }
+}
+
+function renderTableCard(t) {
+ const cols = (t.columns || []).map(col => `
+
+ ${col.name} (${col.type})
+ ${col.primary_key ? 'PK ' : ''}
+
+ `).join('');
+ const rowCount = t.row_count ?? 0;
+ const colCount = t.column_count ?? (t.columns ? t.columns.length : 0);
+ const previewHtml = renderPreviewTable(t.preview);
+ return `
+
+
+
${cols}
+ ${previewHtml}
+
+ `;
+}
+
+function renderPreviewTable(preview) {
+ if (!preview || !Array.isArray(preview.headers) || !Array.isArray(preview.rows) || preview.rows.length === 0) {
+ return '';
+ }
+ const headers = preview.headers;
+ const rows = preview.rows.slice(0, 5);
+ const totalRows = preview.rows.length;
+ const more = totalRows > 5 ? `… ${totalRows - 5} weitere Zeilen
` : '';
+
+ const tableRows = rows.map(r => {
+ const cells = headers.map((_, idx) => {
+ const value = r[idx];
+ // Wandle Werte in lesbares Format um
+ let display = value;
+ if (value === null || value === undefined) {
+ display = 'null ';
+ } else if (typeof value === 'object') {
+ display = JSON.stringify(value).substring(0, 50);
+ } else {
+ display = String(value).substring(0, 100);
+ }
+ return `${display} `;
+ }).join('');
+ return `${cells} `;
+ }).join('');
+
+ return `
+
+
Preview (${totalRows} Zeilen)
+
+
+
+ ${headers.map(h => `${h} `).join('')}
+
+
+ ${tableRows}
+
+
+
+ ${more}
+
+ `;
+}
diff --git a/frontend/static/admin_panel.css b/frontend/static/admin_panel.css
new file mode 100644
index 0000000..c58dc02
--- /dev/null
+++ b/frontend/static/admin_panel.css
@@ -0,0 +1,2241 @@
+:root {
+ --bg: #000000;
+ --bg-card: #001a1f;
+ --bg-card-2: #002833;
+ --border: #00d4ff;
+ --text: #ffffff;
+ --text-dim: #999999;
+ --accent: #00d4ff;
+ --accent-2: #0088cc;
+ --success: #00d4ff;
+ --danger: #00d4ff;
+ --shadow: 0 0 30px rgba(0,212,255,0.3);
+ --glass: rgba(0,212,255,0.05);
+ --glow-accent: 0 0 20px rgba(0,212,255,0.5), 0 0 40px rgba(0,212,255,0.3);
+ --glow-accent-strong: 0 0 30px rgba(0,212,255,0.8), 0 0 60px rgba(0,212,255,0.5), 0 0 90px rgba(0,212,255,0.2);
+}
+
+* {
+ box-sizing: border-box;
+}
+
+html {
+ scroll-behavior: smooth;
+}
+
+/* Custom Scrollbar for body */
+::-webkit-scrollbar {
+ width: 10px;
+ height: 10px;
+}
+
+::-webkit-scrollbar-track {
+ background: #000;
+ border: 1px solid #00d4ff;
+}
+
+::-webkit-scrollbar-thumb {
+ background: linear-gradient(180deg, #00d4ff, #0088cc);
+ border-radius: 0;
+ border: 1px solid #00d4ff;
+ box-shadow: 0 0 10px rgba(0,212,255,0.5);
+}
+
+::-webkit-scrollbar-thumb:hover {
+ background: #00d4ff;
+ box-shadow: 0 0 20px rgba(0,212,255,0.8);
+}
+
+::-webkit-scrollbar-corner {
+ background: #000;
+ border: 1px solid #00d4ff;
+}
+
+body {
+ background:
+ repeating-linear-gradient(
+ 0deg,
+ rgba(0, 212, 255, 0.02) 0px,
+ rgba(0, 212, 255, 0.02) 1px,
+ transparent 1px,
+ transparent 2px
+ ),
+ radial-gradient(circle at 20% 20%, rgba(0,212,255,0.08), transparent 40%),
+ radial-gradient(circle at 80% 80%, rgba(0,100,150,0.1), transparent 40%),
+ #000000;
+ background-attachment: fixed;
+ color: var(--text);
+ font-family: 'Courier New', 'Consolas', monospace;
+ margin: 0;
+ min-height: 100vh;
+ overflow-x: hidden;
+ position: relative;
+}
+
+body::before {
+ content: '';
+ position: fixed;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 100%;
+ background:
+ repeating-linear-gradient(
+ 0deg,
+ rgba(0, 212, 255, 0.03) 0px,
+ rgba(0, 212, 255, 0.03) 1px,
+ transparent 1px,
+ transparent 3px
+ );
+ pointer-events: none;
+ z-index: 1;
+ animation: scanlines 8s linear infinite;
+}
+
+@keyframes scanlines {
+ 0% { transform: translateY(0); }
+ 100% { transform: translateY(3px); }
+}
+
+.admin-wrapper {
+ display: flex;
+ min-height: 100vh;
+ position: relative;
+ z-index: 2;
+}
+
+.admin-sidebar {
+ width: 260px;
+ padding: 2.2rem 1.4rem;
+ background:
+ repeating-linear-gradient(
+ 0deg,
+ rgba(0, 212, 255, 0.02) 0px,
+ rgba(0, 212, 255, 0.02) 1px,
+ transparent 1px,
+ transparent 2px
+ ),
+ linear-gradient(180deg, rgba(0,20,30,0.95) 0%, rgba(0,10,20,0.98) 100%);
+ backdrop-filter: blur(5px);
+ -webkit-backdrop-filter: blur(5px);
+ border-right: 2px solid #00d4ff;
+ box-shadow:
+ 6px 0 30px rgba(0,212,255,0.4),
+ inset -2px 0 10px rgba(0,212,255,0.1);
+ position: relative;
+ z-index: 100;
+ clip-path: polygon(0 0, 100% 0, 100% calc(100% - 30px), calc(100% - 30px) 100%, 0 100%);
+}
+
+.admin-sidebar::before {
+ content: '';
+ position: absolute;
+ top: 0;
+ right: 0;
+ width: 2px;
+ height: 100%;
+ background: linear-gradient(180deg,
+ transparent 0%,
+ #00d4ff 20%,
+ #00d4ff 80%,
+ transparent 100%
+ );
+ box-shadow: 0 0 10px #00d4ff;
+ pointer-events: none;
+}
+
+.admin-sidebar::after {
+ content: '◆ NEXUS TERMINAL';
+ position: absolute;
+ top: 10px;
+ right: 10px;
+ font-size: 8px;
+ color: #00d4ff;
+ text-shadow: 0 0 5px #00d4ff;
+ letter-spacing: 2px;
+ opacity: 0.7;
+}
+.brand {
+ display:flex;
+ align-items:center;
+ gap:14px;
+ margin-bottom:2.5rem;
+ padding: 1rem;
+ background:
+ repeating-linear-gradient(
+ 0deg,
+ rgba(0, 212, 255, 0.05) 0px,
+ rgba(0, 212, 255, 0.05) 1px,
+ transparent 1px,
+ transparent 2px
+ ),
+ rgba(0, 20, 30, 0.6);
+ border-radius: 0;
+ border: 2px solid #00d4ff;
+ border-left: 5px solid #00d4ff;
+ position: relative;
+ overflow: hidden;
+ clip-path: polygon(0 0, calc(100% - 12px) 0, 100% 12px, 100% 100%, 0 100%);
+ box-shadow:
+ 0 0 20px rgba(0,212,255,0.3),
+ inset 0 0 20px rgba(0,212,255,0.1);
+}
+
+.brand::before {
+ content: '//';
+ position: absolute;
+ top: 5px;
+ left: 5px;
+ font-size: 10px;
+ color: #00d4ff;
+ text-shadow: 0 0 5px #00d4ff;
+ font-family: 'Courier New', monospace;
+}
+
+.brand-logo {
+ height:46px;
+ width:46px;
+ border-radius:0;
+ background: linear-gradient(135deg, #00d4ff, #0088cc);
+ padding:7px;
+ box-shadow:
+ 0 0 20px rgba(0,212,255,0.6),
+ inset 0 0 10px rgba(0,212,255,0.3);
+ transition: all 0.3s ease;
+ position: relative;
+ z-index: 1;
+ clip-path: polygon(8px 0, 100% 0, 100% calc(100% - 8px), calc(100% - 8px) 100%, 0 100%, 0 8px);
+}
+
+.brand-logo:hover {
+ transform: scale(1.05);
+ box-shadow:
+ 0 0 30px rgba(0,212,255,0.8),
+ inset 0 0 15px rgba(0,212,255,0.5);
+ filter: brightness(1.2);
+}
+
+.brand-copy {
+ display:flex;
+ flex-direction:column;
+ position: relative;
+ z-index: 1;
+}
+
+.brand-name {
+ font-size:1.4rem;
+ font-weight:700;
+ letter-spacing:2px;
+ color: #00d4ff;
+ text-shadow: 0 0 10px rgba(0,212,255,0.8);
+ text-transform: uppercase;
+ font-family: 'Courier New', monospace;
+}
+
+.brand-tag {
+ color: #0088cc;
+ font-size:0.75rem;
+ letter-spacing:2px;
+ font-weight: 500;
+ text-transform: uppercase;
+ font-family: 'Courier New', monospace;
+}
+.sidebar-group {
+ margin-bottom: 2.2rem;
+}
+
+.sidebar-label {
+ color: #00d4ff;
+ font-size: 0.7rem;
+ letter-spacing: 2px;
+ margin-bottom: 0.8rem;
+ font-weight: 700;
+ text-transform: uppercase;
+ padding-left: 0.5rem;
+ text-shadow: 0 0 5px rgba(0,212,255,0.5);
+ font-family: 'Courier New', monospace;
+}
+
+.sidebar-label::before {
+ content: '> ';
+ color: #00d4ff;
+}
+
+.admin-sidebar .nav-btn {
+ width: 100%;
+ display: flex;
+ align-items: center;
+ gap: 14px;
+ background: rgba(0, 20, 30, 0.3);
+ color: #00d4ff;
+ border: 1px solid #0088cc;
+ border-left: 3px solid transparent;
+ padding: 0.95rem 1rem;
+ margin-bottom: 0.5rem;
+ border-radius: 0;
+ font-size: 0.9rem;
+ font-weight: 600;
+ cursor: pointer;
+ transition: all 0.2s ease;
+ position: relative;
+ overflow: hidden;
+ font-family: 'Courier New', monospace;
+ text-transform: uppercase;
+ letter-spacing: 1px;
+ clip-path: polygon(0 0, calc(100% - 8px) 0, 100% 8px, 100% 100%, 0 100%);
+}
+
+.admin-sidebar .nav-btn::before {
+ content: '';
+ position: absolute;
+ left: 0;
+ top: 0;
+ width: 0;
+ height: 100%;
+ background: linear-gradient(90deg, rgba(0,212,255,0.3), transparent);
+ transition: width 0.3s ease;
+}
+
+.admin-sidebar .nav-btn svg {
+ transition: all 0.2s ease;
+ filter: drop-shadow(0 0 0 transparent);
+}
+
+.admin-sidebar .nav-btn:hover {
+ border-color: #00d4ff;
+ border-left-color: #00d4ff;
+ background: rgba(0, 212, 255, 0.15);
+ transform: translateX(5px);
+ box-shadow: 0 0 15px rgba(0,212,255,0.3);
+}
+
+.admin-sidebar .nav-btn:hover svg {
+ transform: scale(1.1);
+ filter: drop-shadow(0 0 8px rgba(0,212,255,0.8));
+}
+
+.admin-sidebar .nav-btn:hover::before {
+ width: 100%;
+}
+
+.admin-sidebar .nav-btn.active {
+ background: rgba(0, 212, 255, 0.2);
+ border-color: #00d4ff;
+ border-left-color: #00d4ff;
+ border-left-width: 5px;
+ color: #00d4ff;
+ box-shadow:
+ 0 0 20px rgba(0,212,255,0.4),
+ inset 0 0 10px rgba(0,212,255,0.2);
+ transform: translateX(5px);
+}
+
+.admin-sidebar .nav-btn.active::before {
+ width: 100%;
+}
+
+.admin-sidebar .nav-btn.active svg {
+ filter: drop-shadow(0 0 10px rgba(0,212,255,1));
+}
+
+.admin-content {
+ flex: 1;
+ padding: 2.4rem 5vw 3rem;
+ max-width: 1280px;
+ margin: 0 auto;
+}
+
+.hero {
+ background:
+ repeating-linear-gradient(
+ 0deg,
+ rgba(0, 212, 255, 0.03) 0px,
+ rgba(0, 212, 255, 0.03) 1px,
+ transparent 1px,
+ transparent 2px
+ ),
+ linear-gradient(135deg, rgba(0,40,60,0.8), rgba(0,20,30,0.9));
+ backdrop-filter: blur(5px);
+ -webkit-backdrop-filter: blur(5px);
+ border: 2px solid #00d4ff;
+ border-top: 5px solid #00d4ff;
+ border-radius: 0;
+ padding: 2.2rem 2rem;
+ display: flex;
+ align-items: center;
+ justify-content: space-between;
+ gap: 1.8rem;
+ box-shadow:
+ 0 0 40px rgba(0,212,255,0.4),
+ inset 0 0 30px rgba(0,212,255,0.1);
+ margin-bottom: 2rem;
+ position: relative;
+ overflow: hidden;
+ clip-path: polygon(0 0, calc(100% - 20px) 0, 100% 20px, 100% 100%, 0 100%);
+}
+
+.hero::before {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 5px;
+ background: linear-gradient(90deg, transparent, #00d4ff, transparent);
+ box-shadow: 0 0 10px #00d4ff;
+ animation: terminalLoad 3s ease-in-out infinite;
+ pointer-events: none;
+}
+
+.hero::after {
+ content: '// COMMAND CENTER ACTIVE';
+ position: absolute;
+ top: 10px;
+ right: 15px;
+ font-size: 8px;
+ color: #00d4ff;
+ text-shadow: 0 0 5px #00d4ff;
+ letter-spacing: 2px;
+ font-family: 'Courier New', monospace;
+ opacity: 0.7;
+}
+
+@keyframes float {
+ 0%, 100% { transform: translate(0, 0) rotate(0deg); }
+ 33% { transform: translate(-30px, -30px) rotate(120deg); }
+ 66% { transform: translate(30px, 30px) rotate(240deg); }
+}
+
+.hero-text h1 {
+ margin: 0 0 0.5rem 0;
+ font-size: 2.2rem;
+ letter-spacing: 3px;
+ font-weight: 700;
+ color: #00d4ff;
+ text-shadow: 0 0 15px rgba(0,212,255,0.8);
+ text-transform: uppercase;
+ font-family: 'Courier New', monospace;
+}
+
+.lead {
+ margin: 0 0 1rem 0;
+ color: #0088cc;
+ max-width: 760px;
+ line-height: 1.6;
+ font-size: 1rem;
+ font-family: 'Courier New', monospace;
+}
+
+.eyebrow {
+ margin: 0 0 0.5rem 0;
+ color: #00d4ff;
+ font-weight: 700;
+ letter-spacing: 2px;
+ text-transform: uppercase;
+ font-size: 0.7rem;
+ display: inline-block;
+ position: relative;
+ font-family: 'Courier New', monospace;
+ text-shadow: 0 0 5px rgba(0,212,255,0.5);
+}
+
+.eyebrow::before {
+ content: '> ';
+ color: #00d4ff;
+ margin-right: 5px;
+}
+
+.hero-pills {
+ display:flex;
+ gap:10px;
+ flex-wrap:wrap;
+}
+
+.pill {
+ background: rgba(0, 20, 30, 0.6);
+ backdrop-filter: blur(5px);
+ border: 1px solid #00d4ff;
+ color: #00d4ff;
+ padding: 7px 14px;
+ border-radius: 0;
+ font-size: 0.75rem;
+ font-weight: 600;
+ transition: all 0.2s ease;
+ font-family: 'Courier New', monospace;
+ text-transform: uppercase;
+ letter-spacing: 1px;
+ clip-path: polygon(4px 0, 100% 0, calc(100% - 4px) 100%, 0 100%);
+ box-shadow: 0 0 10px rgba(0,212,255,0.2);
+}
+
+.pill:hover {
+ background: rgba(0, 212, 255, 0.2);
+ border-color: #00d4ff;
+ transform: translateY(-2px);
+ box-shadow: 0 0 15px rgba(0,212,255,0.5);
+}
+
+.hero-actions {
+ display:flex;
+ gap:12px;
+ flex-wrap:wrap;
+}
+
+.btn-ghost {
+ background: rgba(0, 20, 30, 0.8);
+ backdrop-filter: blur(5px);
+ color: #00d4ff;
+ border: 2px solid #00d4ff;
+ padding: 0.85rem 1.3rem;
+ border-radius: 0;
+ cursor: pointer;
+ transition: all 0.2s ease;
+ font-weight: 700;
+ font-size: 0.85rem;
+ position: relative;
+ overflow: hidden;
+ font-family: 'Courier New', monospace;
+ text-transform: uppercase;
+ letter-spacing: 2px;
+ clip-path: polygon(0 0, calc(100% - 10px) 0, 100% 10px, 100% 100%, 0 100%);
+ box-shadow: 0 0 15px rgba(0,212,255,0.3);
+}
+
+.btn-ghost::before {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: -100%;
+ width: 100%;
+ height: 100%;
+ background: linear-gradient(90deg, transparent, rgba(0,212,255,0.3), transparent);
+ transition: left 0.4s ease;
+}
+
+.btn-ghost:hover::before {
+ left: 100%;
+}
+
+.btn-ghost:hover {
+ border-color: #00d4ff;
+ color: #00d4ff;
+ background: rgba(0, 212, 255, 0.2);
+ box-shadow: 0 0 25px rgba(0,212,255,0.6);
+ transform: translateY(-2px) translateX(2px);
+}
+
+.card-header { display:flex; justify-content: space-between; align-items: flex-start; gap: 1rem; margin-bottom: 1rem; }
+.notif-grid { display:grid; grid-template-columns: repeat(auto-fit, minmax(320px, 1fr)); gap: 1rem; }
+.panel-light { background: var(--glass); border: 1px solid var(--border); border-radius: 14px; padding: 1rem 1.1rem; box-shadow: var(--shadow); }
+.field-row { display: grid; grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); gap: 10px; }
+.field { display:flex; flex-direction:column; gap:6px; }
+.toggle { display:flex; align-items:center; gap:8px; color: var(--text); }
+.card-grid { display:grid; grid-template-columns: repeat(auto-fit, minmax(220px, 1fr)); gap: 10px; }
+
+.notif-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ gap: 1.4rem;
+ margin-bottom: 1.4rem;
+}
+.notif-header h2 {
+ margin: 0.5rem 0 0 0;
+ font-size: 1.6rem;
+}
+.btn-group-compact {
+ display: flex;
+ gap: 0.6rem;
+ flex-wrap: wrap;
+}
+.btn-compact {
+ background: rgba(0,20,30,0.8);
+ color: #00d4ff;
+ border: 2px solid #00d4ff;
+ border-radius: 0;
+ padding: 0.75rem 1.3rem;
+ font-weight: 700;
+ font-size: 0.8rem;
+ cursor: pointer;
+ transition: all 0.2s ease;
+ box-shadow: 0 0 20px rgba(0,212,255,0.3);
+ letter-spacing: 2px;
+ text-transform: uppercase;
+ position: relative;
+ overflow: hidden;
+ font-family: 'Courier New', monospace;
+ clip-path: polygon(0 0, calc(100% - 8px) 0, 100% 8px, 100% 100%, 0 100%);
+}
+
+.btn-compact::before {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: -100%;
+ width: 100%;
+ height: 100%;
+ background: linear-gradient(90deg, transparent, rgba(0,212,255,0.3), transparent);
+ transition: left 0.4s ease;
+}
+
+.btn-compact:hover::before {
+ left: 100%;
+}
+
+.btn-compact:hover {
+ transform: translateY(-2px) translateX(2px);
+ background: rgba(0, 212, 255, 0.2);
+ box-shadow: 0 0 30px rgba(0,212,255,0.6);
+}
+
+.btn-compact:active {
+ transform: translateY(0) scale(0.98);
+}
+
+.notif-container {
+ display: grid;
+ grid-template-columns: 1fr 1fr;
+ gap: 1.4rem;
+}
+.notif-form,
+.notif-list {
+ background: linear-gradient(135deg, rgba(17,24,38,0.6), rgba(5,10,18,0.5));
+ backdrop-filter: blur(10px);
+ -webkit-backdrop-filter: blur(10px);
+ border: 1px solid rgba(92,211,255,0.25);
+ border-radius: 18px;
+ padding: 1.8rem;
+ box-shadow: 0 8px 30px rgba(0,0,0,0.3), 0 0 0 1px rgba(255,255,255,0.03) inset;
+}
+.form-title {
+ color: var(--accent);
+ margin: 0 0 1.2rem 0;
+ font-size: 1.1rem;
+ font-weight: 700;
+ letter-spacing: 0.3px;
+}
+.form-group {
+ display: flex;
+ flex-direction: column;
+ gap: 0.5rem;
+ margin-bottom: 1rem;
+}
+.form-group label {
+ color: var(--accent);
+ font-weight: 600;
+ font-size: 0.9rem;
+ letter-spacing: 0.2px;
+}
+.required {
+ color: #ff6b6b;
+}
+.form-row {
+ display: grid;
+ grid-template-columns: 1fr 1fr;
+ gap: 1rem;
+}
+.form-checkboxes {
+ display: flex;
+ gap: 1.2rem;
+ margin-top: 1rem;
+ padding-top: 1rem;
+ border-top: 1px solid rgba(92,211,255,0.15);
+}
+.checkbox-label {
+ display: flex;
+ align-items: center;
+ gap: 0.6rem;
+ color: var(--text);
+ font-weight: 500;
+ cursor: pointer;
+}
+.checkbox-label input {
+ cursor: pointer;
+ width: 18px;
+ height: 18px;
+ accent-color: var(--accent);
+}
+
+.notif-items {
+ display: flex;
+ flex-direction: column;
+ gap: 0.8rem;
+}
+.notif-item {
+ background: linear-gradient(135deg, rgba(92,211,255,0.08), rgba(92,211,255,0.05));
+ backdrop-filter: blur(5px);
+ border: 1px solid rgba(92,211,255,0.25);
+ border-radius: 14px;
+ padding: 1.1rem 1.2rem;
+ transition: all 0.3s cubic-bezier(0.4, 0, 0.2, 1);
+ cursor: pointer;
+ position: relative;
+ overflow: hidden;
+}
+
+.notif-item::before {
+ content: '';
+ position: absolute;
+ left: 0;
+ top: 0;
+ height: 100%;
+ width: 3px;
+ background: linear-gradient(180deg, var(--accent), var(--accent-2));
+ opacity: 0;
+ transition: opacity 0.3s ease;
+}
+
+.notif-item:hover::before {
+ opacity: 1;
+}
+
+.notif-item:hover {
+ background: linear-gradient(135deg, rgba(92,211,255,0.15), rgba(92,211,255,0.1));
+ border-color: rgba(92,211,255,0.5);
+ transform: translateX(5px);
+ box-shadow: 0 8px 25px rgba(92,211,255,0.2);
+}
+.notif-item-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ margin-bottom: 0.5rem;
+}
+.notif-item-id {
+ color: var(--accent);
+ font-weight: 700;
+ font-size: 0.9rem;
+}
+.notif-item-type {
+ background: rgba(92,211,255,0.15);
+ border: 1px solid rgba(92,211,255,0.3);
+ color: var(--accent);
+ padding: 2px 8px;
+ border-radius: 4px;
+ font-size: 0.75rem;
+ font-weight: 600;
+}
+.notif-item-label {
+ color: var(--text);
+ font-weight: 600;
+ font-size: 0.95rem;
+ margin-bottom: 0.3rem;
+}
+.notif-item-msg {
+ color: var(--text-dim);
+ font-size: 0.85rem;
+ line-height: 1.4;
+ margin-bottom: 0.5rem;
+}
+.notif-item-meta {
+ color: var(--text-dim);
+ font-size: 0.8rem;
+ margin-bottom: 0.7rem;
+}
+.notif-item-actions {
+ display: flex;
+ gap: 0.5rem;
+}
+.notif-item-actions button {
+ flex: 1;
+ background: transparent;
+ color: var(--accent);
+ border: 1px solid rgba(92,211,255,0.3);
+ border-radius: 6px;
+ padding: 0.4rem 0.6rem;
+ font-size: 0.75rem;
+ font-weight: 600;
+ cursor: pointer;
+ transition: all 0.2s ease;
+}
+.notif-item-actions button:hover {
+ background: rgba(92,211,255,0.1);
+ border-color: rgba(92,211,255,0.6);
+}
+
+@media (max-width: 1100px) {
+ .notif-container {
+ grid-template-columns: 1fr;
+ }
+}
+
+.quick-grid {
+ display:grid;
+ grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
+ gap: 1.2rem;
+ margin-bottom: 2rem;
+}
+
+.quick-card {
+ background:
+ repeating-linear-gradient(
+ 0deg,
+ rgba(0, 212, 255, 0.03) 0px,
+ rgba(0, 212, 255, 0.03) 1px,
+ transparent 1px,
+ transparent 2px
+ ),
+ linear-gradient(135deg, rgba(0,30,45,0.9), rgba(0,15,23,0.95));
+ backdrop-filter: blur(5px);
+ -webkit-backdrop-filter: blur(5px);
+ border: 2px solid #00d4ff;
+ border-left: 5px solid #00d4ff;
+ border-radius: 0;
+ padding: 1.4rem 1.3rem;
+ box-shadow:
+ 0 0 25px rgba(0,212,255,0.3),
+ inset 0 0 20px rgba(0,212,255,0.05);
+ position: relative;
+ overflow: hidden;
+ transition: all 0.2s ease;
+ clip-path: polygon(0 0, calc(100% - 12px) 0, 100% 12px, 100% 100%, 0 100%);
+}
+
+.quick-card::before {
+ content: '//';
+ position: absolute;
+ top: 8px;
+ left: 8px;
+ font-size: 10px;
+ color: #00d4ff;
+ text-shadow: 0 0 5px #00d4ff;
+ font-family: 'Courier New', monospace;
+ opacity: 0.7;
+}
+
+.quick-card::after {
+ content:'';
+ position:absolute;
+ top: 0;
+ right: 0;
+ width: 0;
+ height: 3px;
+ background: #00d4ff;
+ box-shadow: 0 0 10px #00d4ff;
+ transition: width 0.3s ease;
+}
+
+.quick-card:hover {
+ transform: translateY(-8px) translateX(3px);
+ border-color: #00d4ff;
+ box-shadow:
+ 0 0 40px rgba(0,212,255,0.6),
+ inset 0 0 30px rgba(0,212,255,0.1);
+}
+
+.quick-card:hover::after {
+ width: 100%;
+}
+
+.quick-label {
+ color: #0088cc;
+ font-size: 0.7rem;
+ letter-spacing: 2px;
+ text-transform: uppercase;
+ font-weight: 700;
+ position: relative;
+ z-index: 1;
+ font-family: 'Courier New', monospace;
+}
+
+.quick-label::before {
+ content: '> ';
+ color: #00d4ff;
+}
+
+.quick-value {
+ color: #00d4ff;
+ font-size: 1.8rem;
+ font-weight: 700;
+ margin: 8px 0;
+ text-shadow: 0 0 15px rgba(0,212,255,0.8);
+ position: relative;
+ z-index: 1;
+ font-family: 'Courier New', monospace;
+}
+
+.quick-meta {
+ color: #0088cc;
+ font-size: 0.75rem;
+ margin-bottom: 12px;
+ position: relative;
+ z-index: 1;
+ font-family: 'Courier New', monospace;
+ letter-spacing: 1px;
+}
+
+
+.admin-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(360px, 1fr));
+ gap: 1.8rem;
+ margin-bottom: 2rem;
+}
+
+.admin-card {
+ background:
+ repeating-linear-gradient(
+ 0deg,
+ rgba(0, 212, 255, 0.03) 0px,
+ rgba(0, 212, 255, 0.03) 1px,
+ transparent 1px,
+ transparent 2px
+ ),
+ linear-gradient(135deg, rgba(0,25,38,0.95), rgba(0,15,23,0.9));
+ backdrop-filter: blur(5px);
+ -webkit-backdrop-filter: blur(5px);
+ border-radius: 0;
+ border: 2px solid #00d4ff;
+ border-top: 5px solid #00d4ff;
+ box-shadow:
+ 0 0 30px rgba(0,212,255,0.4),
+ inset 0 0 25px rgba(0,212,255,0.05);
+ padding: 2rem;
+ transition: all 0.2s ease;
+ position: relative;
+ overflow: hidden;
+ clip-path: polygon(0 0, calc(100% - 15px) 0, 100% 15px, 100% 100%, 0 100%);
+}
+
+.admin-card::before {
+ content: '// TERMINAL';
+ position: absolute;
+ top: 10px;
+ left: 15px;
+ font-size: 8px;
+ color: #00d4ff;
+ text-shadow: 0 0 5px #00d4ff;
+ font-family: 'Courier New', monospace;
+ letter-spacing: 2px;
+ opacity: 0.7;
+}
+
+.admin-card::after {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: 0;
+ right: 0;
+ height: 5px;
+ background: linear-gradient(90deg, transparent, #00d4ff, transparent);
+ box-shadow: 0 0 10px #00d4ff;
+ animation: terminalLoad 3s ease-in-out infinite;
+}
+
+.admin-card:hover {
+ transform: translateY(-6px) translateX(3px);
+ border-color: #00d4ff;
+ box-shadow:
+ 0 0 50px rgba(0,212,255,0.6),
+ inset 0 0 35px rgba(0,212,255,0.1);
+}
+.admin-card h2 {
+ color: #00d4ff;
+ margin: 0 0 1.2rem 0;
+ font-size: 1.3rem;
+ letter-spacing: 2px;
+ font-weight: 700;
+ position: relative;
+ z-index: 1;
+ text-transform: uppercase;
+ font-family: 'Courier New', monospace;
+ text-shadow: 0 0 10px rgba(0,212,255,0.6);
+}
+.admin-card label {
+ color: #00d4ff;
+ font-weight: 700;
+ margin: 1rem 0 0.5rem 0;
+ display: block;
+ font-size: 0.8rem;
+ letter-spacing: 2px;
+ text-transform: uppercase;
+ font-family: 'Courier New', monospace;
+}
+
+.admin-card label::before {
+ content: '> ';
+ color: #00d4ff;
+}
+.admin-card input,
+.admin-card select,
+.admin-card textarea {
+ width: 100%;
+ background: rgba(0,10,20,0.8);
+ color: #00d4ff;
+ border: 2px solid #0088cc;
+ border-radius: 0;
+ padding: 1rem 1.2rem;
+ margin-bottom: 1.2rem;
+ font-size: 0.9rem;
+ font-family: 'Courier New', monospace;
+ font-weight: 500;
+ transition: all 0.2s ease;
+ position: relative;
+ letter-spacing: 1px;
+}
+
+.admin-card input::placeholder,
+.admin-card select::placeholder,
+.admin-card textarea::placeholder {
+ color: #0088cc;
+ opacity: 0.7;
+}
+
+.admin-card input:hover,
+.admin-card select:hover,
+.admin-card textarea:hover {
+ border-color: #00d4ff;
+ background: rgba(0,20,30,0.85);
+ box-shadow: 0 0 10px rgba(0,212,255,0.2);
+}
+
+.admin-card input:focus,
+.admin-card select:focus,
+.admin-card textarea:focus {
+ border-color: #00d4ff;
+ background: rgba(0,20,30,0.9);
+ box-shadow:
+ 0 0 0 3px rgba(0,212,255,0.2),
+ inset 0 0 15px rgba(0,212,255,0.1),
+ 0 0 20px rgba(0,212,255,0.3);
+ outline: none;
+ transform: translateX(2px);
+}
+
+.admin-card button {
+ background: rgba(0,20,30,0.8);
+ color: #00d4ff;
+ border: 2px solid #00d4ff;
+ border-radius: 0;
+ padding: 1rem 1.5rem;
+ font-weight: 700;
+ font-size: 0.85rem;
+ cursor: pointer;
+ margin-right: 0.8rem;
+ margin-bottom: 0.8rem;
+ transition: all 0.2s ease;
+ box-shadow: 0 0 20px rgba(0,212,255,0.3);
+ text-transform: uppercase;
+ letter-spacing: 2px;
+ position: relative;
+ overflow: hidden;
+ z-index: 1;
+ font-family: 'Courier New', monospace;
+ clip-path: polygon(0 0, calc(100% - 10px) 0, 100% 10px, 100% 100%, 0 100%);
+}
+
+.admin-card button::before {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: -100%;
+ width: 100%;
+ height: 100%;
+ background: linear-gradient(90deg, transparent, rgba(0,212,255,0.3), transparent);
+ transition: left 0.4s ease;
+ z-index: -1;
+}
+
+.admin-card button::after {
+ content: '';
+ position: absolute;
+ inset: 0;
+ background:
+ repeating-linear-gradient(
+ 0deg,
+ rgba(0, 212, 255, 0.05) 0px,
+ rgba(0, 212, 255, 0.05) 1px,
+ transparent 1px,
+ transparent 2px
+ );
+ pointer-events: none;
+}
+
+.admin-card button:hover::before {
+ left: 100%;
+}
+
+.admin-card button:hover {
+ transform: translateY(-3px) translateX(2px);
+ background: rgba(0, 212, 255, 0.15);
+ box-shadow: 0 0 30px rgba(0,212,255,0.6);
+}
+
+.admin-card button:active {
+ transform: translateY(-1px) scale(0.98);
+}
+
+.admin-card button.danger {
+ background: rgba(0,20,30,0.8);
+ color: #00d4ff;
+ border-color: #00d4ff;
+ box-shadow: 0 0 20px rgba(0,212,255,0.3);
+}
+
+.admin-card button.danger::before {
+ background: linear-gradient(90deg, transparent, rgba(0,212,255,0.3), transparent);
+}
+
+.admin-card button.danger:hover {
+ background: rgba(0, 212, 255, 0.15);
+ box-shadow: 0 0 30px rgba(0,212,255,0.6);
+}
+.admin-card .status {
+ color: var(--accent);
+ margin: 0.8rem 0;
+ font-size: 0.95rem;
+ font-weight: 500;
+ padding: 0.6rem 0.8rem;
+ background: rgba(92,211,255,0.08);
+ border-left: 3px solid var(--accent);
+ border-radius: 6px;
+}
+.admin-card .note {
+ background: rgba(92,211,255,0.08);
+ color: var(--accent-2);
+ padding: 0.85rem 1rem;
+ border-radius: 10px;
+ margin: 0 0 1rem 0;
+ font-size: 0.95rem;
+ border: 1px solid rgba(255,179,71,0.3);
+ line-height: 1.5;
+}
+
+.note {
+ background: rgba(92,211,255,0.08);
+ color: var(--accent-2);
+ padding: 0.85rem 1rem;
+ border-radius: 10px;
+ margin: 0 0 1rem 0;
+ font-size: 0.95rem;
+ border: 1px solid rgba(255,179,71,0.3);
+ line-height: 1.5;
+}
+
+.sql-example {
+ background: linear-gradient(135deg, rgba(92,211,255,0.15), rgba(77,216,196,0.12));
+ color: var(--accent);
+ border: 1px solid rgba(92,211,255,0.35);
+ padding: 0.7rem 1.2rem;
+ border-radius: 12px;
+ cursor: pointer;
+ font-size: 0.85rem;
+ font-weight: 700;
+ transition: all 0.3s cubic-bezier(0.4, 0, 0.2, 1);
+ font-family: 'Manrope', sans-serif;
+ text-transform: uppercase;
+ letter-spacing: 0.5px;
+ position: relative;
+ overflow: hidden;
+}
+
+.sql-example::before {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: -100%;
+ width: 100%;
+ height: 100%;
+ background: linear-gradient(90deg, transparent, rgba(92,211,255,0.3), transparent);
+ transition: left 0.5s ease;
+}
+
+.sql-example:hover::before {
+ left: 100%;
+}
+
+.sql-example:hover {
+ background: linear-gradient(135deg, rgba(92,211,255,0.25), rgba(77,216,196,0.2));
+ border-color: var(--accent);
+ box-shadow: 0 6px 18px rgba(92,211,255,0.3);
+ transform: translateY(-2px);
+}
+
+.sql-example:active {
+ transform: translateY(0);
+}
+
+.admin-table-wrapper {
+ background: var(--bg-card);
+ border: 1px solid var(--border);
+ border-radius: 14px;
+ padding: 1.2rem;
+ box-shadow: var(--shadow);
+}
+
+.table-list {
+ display: flex;
+ flex-direction: column;
+ gap: 12px;
+}
+.table-card {
+ background: linear-gradient(135deg, rgba(255,255,255,0.03), rgba(255,255,255,0.01));
+ border: 1px solid rgba(92,211,255,0.2);
+ border-radius: 14px;
+ padding: 16px 18px;
+ transition: all 0.3s ease;
+}
+
+.table-card:hover {
+ background: linear-gradient(135deg, rgba(255,255,255,0.05), rgba(92,211,255,0.03));
+ border-color: rgba(92,211,255,0.35);
+ transform: translateX(5px);
+ box-shadow: 0 4px 20px rgba(92,211,255,0.15);
+}
+.table-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ margin-bottom: 10px;
+ color: var(--text);
+ font-weight: 700;
+}
+.table-meta {
+ color: var(--text-dim);
+ font-size: 0.9rem;
+}
+.column-pills {
+ display: flex;
+ flex-wrap: wrap;
+ gap: 8px;
+}
+.column-pill {
+ background: #1c222c;
+ border: 1px solid var(--border);
+ color: var(--text);
+ padding: 6px 10px;
+ border-radius: 10px;
+ font-size: 0.92rem;
+ display: inline-flex;
+ align-items: center;
+ gap: 6px;
+}
+.column-pill .type {
+ color: var(--text-dim);
+ font-size: 0.85rem;
+}
+.pk-pill {
+ background: rgba(79,195,247,0.15);
+ border: 1px solid rgba(79,195,247,0.4);
+ color: var(--accent);
+ padding: 2px 6px;
+ border-radius: 6px;
+ font-size: 0.78rem;
+ font-weight: 700;
+}
+.preview-block {
+ margin-top: 12px;
+ border-top: 1px solid var(--border);
+ padding-top: 8px;
+ display: flex;
+ flex-direction: column;
+ gap: 6px;
+}
+.preview-title {
+ color: var(--text-dim);
+ font-size: 0.92rem;
+ letter-spacing: 0.2px;
+}
+.preview-row {
+ display: flex;
+ flex-wrap: wrap;
+ gap: 8px;
+}
+.preview-table-wrapper { overflow-x: auto; }
+.preview-table {
+ width: 100%;
+ border-collapse: collapse;
+ font-size: 0.92rem;
+}
+.preview-table th,
+.preview-table td {
+ padding: 4px 6px;
+ border-bottom: 1px solid var(--border);
+ color: var(--text);
+ text-align: left;
+}
+.preview-table th { color: var(--text-dim); }
+.preview-more {
+ color: var(--text-dim);
+ font-size: 0.85rem;
+}
+
+.panel-light select,
+.panel-light input,
+.panel-light textarea {
+ background: rgba(5,10,18,0.8) !important;
+ border: 1px solid rgba(92,211,255,0.3) !important;
+ color: var(--text) !important;
+ font-size: 1rem !important;
+ transition: all 0.2s ease !important;
+}
+.panel-light select:focus,
+.panel-light input:focus,
+.panel-light textarea:focus {
+ border-color: var(--accent) !important;
+ background: rgba(5,10,18,0.9) !important;
+ box-shadow: 0 0 0 3px rgba(92,211,255,0.15) !important;
+ outline: none !important;
+}
+
+/* Modal Styles */
+.admin-modal {
+ display: none;
+ position: fixed;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 100%;
+ z-index: 2000;
+ align-items: center;
+ justify-content: center;
+ background: transparent;
+}
+
+.admin-modal.active {
+ display: flex;
+}
+
+.modal-overlay {
+ position: absolute;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 100%;
+ background: rgba(0, 0, 0, 0.9);
+ backdrop-filter: blur(8px);
+ -webkit-backdrop-filter: blur(8px);
+ cursor: pointer;
+ z-index: 2000;
+ animation: fadeIn 0.3s ease;
+}
+
+.modal-box {
+ position: relative;
+ z-index: 2001;
+ background:
+ repeating-linear-gradient(
+ 0deg,
+ rgba(0, 212, 255, 0.03) 0px,
+ rgba(0, 212, 255, 0.03) 1px,
+ transparent 1px,
+ transparent 2px
+ ),
+ linear-gradient(135deg, rgba(0,25,38,0.98) 0%, rgba(0,15,23,0.98) 100%);
+ backdrop-filter: blur(10px);
+ -webkit-backdrop-filter: blur(10px);
+ border: 2px solid #00d4ff;
+ border-top: 5px solid #00d4ff;
+ border-radius: 0;
+ max-width: 650px;
+ width: 90%;
+ max-height: 85vh;
+ overflow: hidden;
+ display: flex;
+ flex-direction: column;
+ box-shadow:
+ 0 0 60px rgba(0,212,255,0.6),
+ inset 0 0 40px rgba(0,212,255,0.1);
+ animation: slideUp 0.4s cubic-bezier(0.4, 0, 0.2, 1);
+ clip-path: polygon(0 0, calc(100% - 20px) 0, 100% 20px, 100% 100%, 0 100%);
+}
+
+.modal-box::before {
+ content: '// MODAL TERMINAL';
+ position: absolute;
+ top: 10px;
+ left: 15px;
+ font-size: 8px;
+ color: #00d4ff;
+ text-shadow: 0 0 5px #00d4ff;
+ font-family: 'Courier New', monospace;
+ letter-spacing: 2px;
+ z-index: 10;
+}
+
+.modal-box.modal-large {
+ max-width: 800px;
+}
+
+.modal-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 2.2rem 2.5rem;
+ border-bottom: 2px solid #00d4ff;
+ background: linear-gradient(135deg, rgba(0, 40, 60, 0.5) 0%, transparent 100%);
+ position: relative;
+ z-index: 1;
+}
+
+.modal-header::after {
+ content: '';
+ position: absolute;
+ bottom: -2px;
+ left: 0;
+ right: 0;
+ height: 2px;
+ background: linear-gradient(90deg,
+ transparent,
+ #00d4ff 50%,
+ transparent
+ );
+ box-shadow: 0 0 10px #00d4ff;
+}
+
+.modal-header h2 {
+ margin: 0;
+ font-size: 1.5rem;
+ font-weight: 700;
+ letter-spacing: 2px;
+ color: #00d4ff;
+ text-shadow: 0 0 10px rgba(0,212,255,0.8);
+ text-transform: uppercase;
+ font-family: 'Courier New', monospace;
+}
+
+.modal-close {
+ background: rgba(0, 20, 30, 0.8);
+ border: 2px solid #00d4ff;
+ color: #00d4ff;
+ font-size: 1.5rem;
+ cursor: pointer;
+ padding: 0;
+ width: 40px;
+ height: 40px;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ border-radius: 0;
+ transition: all 0.2s ease;
+ clip-path: polygon(6px 0, 100% 0, calc(100% - 6px) 100%, 0 100%);
+}
+
+.modal-close:hover {
+ background: rgba(0, 212, 255, 0.2);
+ border-color: #00d4ff;
+ color: #00d4ff;
+ transform: scale(1.1);
+ box-shadow: 0 0 15px rgba(0,212,255,0.5);
+}
+
+.modal-body {
+ padding: 2.5rem;
+ overflow-y: auto;
+ flex: 1;
+ position: relative;
+ z-index: 1;
+}
+
+.modal-body::-webkit-scrollbar {
+ width: 8px;
+}
+
+.modal-body::-webkit-scrollbar-track {
+ background: #000;
+ border: 1px solid #00d4ff;
+ border-radius: 0;
+}
+
+.modal-body::-webkit-scrollbar-thumb {
+ background: linear-gradient(180deg, #00d4ff, #0088cc);
+ border-radius: 0;
+ border: 1px solid #00d4ff;
+ box-shadow: 0 0 10px rgba(0,212,255,0.5);
+}
+
+.modal-body::-webkit-scrollbar-thumb:hover {
+ background: #00d4ff;
+ box-shadow: 0 0 20px rgba(0,212,255,0.8);
+}
+
+.modal-body > *:first-child {
+ margin-top: 0;
+}
+
+.modal-body > *:last-child {
+ margin-bottom: 0;
+}
+
+@keyframes fadeIn {
+ from {
+ opacity: 0;
+ }
+ to {
+ opacity: 1;
+ }
+}
+
+@keyframes slideUp {
+ from {
+ opacity: 0;
+ transform: translateY(50px) scale(0.95);
+ }
+ to {
+ opacity: 1;
+ transform: translateY(0) scale(1);
+ }
+}
+
+@keyframes pulse {
+ 0%, 100% {
+ opacity: 1;
+ }
+ 50% {
+ opacity: 0.7;
+ }
+}
+
+@keyframes shimmer {
+ 0% {
+ background-position: -1000px 0;
+ }
+ 100% {
+ background-position: 1000px 0;
+ }
+}
+
+
+@media (max-width: 900px) {
+ .admin-wrapper { flex-direction: column; }
+ .admin-sidebar {
+ width: 100%;
+ flex-direction: row;
+ align-items: center;
+ gap: 10px;
+ border-right: none;
+ border-bottom: 1px solid var(--border);
+ overflow-x: auto;
+ }
+ .admin-content { padding: 1.4rem 6vw 2rem; max-width: 100%; }
+ .hero { flex-direction: column; align-items: flex-start; }
+ .hero-actions { width: 100%; }
+ .hero-actions .btn-ghost { width: 100%; text-align: center; }
+ .admin-grid { grid-template-columns: 1fr; }
+ .admin-card { padding: 1.4rem; }
+ .admin-card h2 { font-size: 1.2rem; margin-bottom: 1rem; }
+
+ /* Modal responsive */
+ .modal-box {
+ max-width: 95%;
+ width: 95%;
+ max-height: 90vh;
+ }
+ .modal-box.modal-large {
+ max-width: 95%;
+ width: 95%;
+ }
+ .modal-header {
+ padding: 1.5rem;
+ }
+ .modal-body {
+ padding: 1.5rem;
+ }
+}
+
+
+/* ===== COMMAND & CONQUER MATRIX NOTIFICATIONS SYSTEM ===== */
+
+.notif-header-modern {
+ display: flex;
+ justify-content: space-between;
+ align-items: flex-start;
+ margin-bottom: 32px;
+ padding-bottom: 24px;
+ border-bottom: 2px solid #00d4ff;
+ position: relative;
+ background: linear-gradient(90deg, rgba(0,212,255,0.05) 0%, transparent 100%);
+ clip-path: polygon(0 0, calc(100% - 20px) 0, 100% 20px, 100% 100%, 0 100%);
+}
+
+.notif-header-modern::before {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: 0;
+ right: 0;
+ height: 100%;
+ background:
+ repeating-linear-gradient(
+ 0deg,
+ rgba(0, 212, 255, 0.03) 0px,
+ rgba(0, 212, 255, 0.03) 1px,
+ transparent 1px,
+ transparent 2px
+ );
+ pointer-events: none;
+ animation: scanlines 8s linear infinite;
+}
+
+.notif-header-modern::after {
+ content: '';
+ position: absolute;
+ bottom: -2px;
+ left: 0;
+ width: 0;
+ height: 2px;
+ background: #00d4ff;
+ box-shadow: 0 0 10px #00d4ff, 0 0 20px #00d4ff;
+ animation: terminalLoad 3s ease-in-out infinite;
+}
+
+@keyframes terminalLoad {
+ 0%, 100% { width: 0%; opacity: 0.5; }
+ 50% { width: 100%; opacity: 1; }
+}
+
+@keyframes scanlines {
+ 0% { transform: translateY(0); }
+ 100% { transform: translateY(2px); }
+}
+
+.btn-add-notif {
+ background: #000;
+ border: 2px solid #00d4ff;
+ border-radius: 0;
+ color: #00d4ff;
+ font-weight: 700;
+ font-family: 'Courier New', monospace;
+ text-transform: uppercase;
+ letter-spacing: 2px;
+ padding: 14px 28px;
+ display: flex;
+ align-items: center;
+ gap: 10px;
+ cursor: pointer;
+ transition: all 0.2s ease;
+ box-shadow:
+ 0 0 10px rgba(0, 212, 255, 0.3),
+ inset 0 0 10px rgba(0, 212, 255, 0.1);
+ position: relative;
+ overflow: hidden;
+ clip-path: polygon(0 0, calc(100% - 12px) 0, 100% 12px, 100% 100%, 0 100%);
+}
+
+.btn-add-notif::before {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: -100%;
+ width: 100%;
+ height: 100%;
+ background: linear-gradient(90deg, transparent, rgba(0, 212, 255, 0.3), transparent);
+ transition: left 0.4s ease;
+}
+
+.btn-add-notif::after {
+ content: '';
+ position: absolute;
+ inset: 0;
+ background:
+ repeating-linear-gradient(
+ 0deg,
+ rgba(0, 212, 255, 0.05) 0px,
+ rgba(0, 212, 255, 0.05) 1px,
+ transparent 1px,
+ transparent 2px
+ );
+ pointer-events: none;
+}
+
+.btn-add-notif:hover::before {
+ left: 100%;
+}
+
+.btn-add-notif:hover {
+ background: rgba(0, 212, 255, 0.15);
+ box-shadow:
+ 0 0 20px rgba(0, 212, 255, 0.6),
+ inset 0 0 20px rgba(0, 212, 255, 0.2);
+ transform: translateX(2px);
+}
+
+.btn-add-notif:active {
+ transform: translateX(0);
+ box-shadow:
+ 0 0 10px rgba(0, 212, 255, 0.4),
+ inset 0 0 10px rgba(0, 212, 255, 0.3);
+}
+
+.notif-cards-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fill, minmax(360px, 1fr));
+ gap: 20px;
+}
+
+.notif-card {
+ background:
+ linear-gradient(135deg, rgba(0, 20, 30, 0.95) 0%, rgba(0, 10, 20, 0.98) 100%),
+ repeating-linear-gradient(
+ 0deg,
+ rgba(0, 212, 255, 0.02) 0px,
+ rgba(0, 212, 255, 0.02) 1px,
+ transparent 1px,
+ transparent 2px
+ );
+ border: 2px solid #00d4ff;
+ border-radius: 0;
+ padding: 20px;
+ transition: all 0.2s ease;
+ position: relative;
+ overflow: hidden;
+ box-shadow:
+ 0 0 20px rgba(0, 212, 255, 0.2),
+ inset 0 0 20px rgba(0, 212, 255, 0.05);
+ clip-path: polygon(
+ 0 0,
+ calc(100% - 15px) 0,
+ 100% 15px,
+ 100% 100%,
+ 15px 100%,
+ 0 calc(100% - 15px)
+ );
+}
+
+.notif-card::before {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: 0;
+ width: 3px;
+ height: 0;
+ background: #00d4ff;
+ box-shadow: 0 0 10px #00d4ff;
+ transition: height 0.3s ease;
+}
+
+.notif-card::after {
+ content: '';
+ position: absolute;
+ top: 0;
+ right: 0;
+ width: 0;
+ height: 3px;
+ background: #00d4ff;
+ box-shadow: 0 0 10px #00d4ff;
+ transition: width 0.3s ease;
+}
+
+.notif-card:hover {
+ border-color: #00d4ff;
+ box-shadow:
+ 0 0 30px rgba(0, 212, 255, 0.5),
+ inset 0 0 30px rgba(0, 212, 255, 0.1);
+ transform: translateX(3px);
+}
+
+.notif-card:hover::before {
+ height: 100%;
+}
+
+.notif-card:hover::after {
+ width: 100%;
+}
+
+.notif-card.inactive {
+ opacity: 0.4;
+ border-color: #006620;
+ box-shadow:
+ 0 0 10px rgba(0, 212, 255, 0.1),
+ inset 0 0 10px rgba(0, 212, 255, 0.02);
+}
+
+.notif-card.inactive:hover {
+ opacity: 0.6;
+ border-color: #0088cc;
+}
+
+.notif-card-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: flex-start;
+ margin-bottom: 12px;
+}
+
+.notif-card-id {
+ font-size: 13px;
+ font-weight: 700;
+ color: #00d4ff;
+ font-family: 'Courier New', monospace;
+ text-transform: uppercase;
+ letter-spacing: 1px;
+ text-shadow: 0 0 5px #00d4ff;
+}
+
+.notif-card-type-badge {
+ padding: 4px 10px;
+ border-radius: 0;
+ font-size: 10px;
+ font-weight: 700;
+ font-family: 'Courier New', monospace;
+ text-transform: uppercase;
+ letter-spacing: 1.5px;
+ clip-path: polygon(5px 0, 100% 0, calc(100% - 5px) 100%, 0 100%);
+}
+
+.notif-card-type-badge.success {
+ background: rgba(0, 212, 255, 0.15);
+ color: #00d4ff;
+ border: 1px solid #00d4ff;
+ box-shadow: 0 0 5px rgba(0, 212, 255, 0.3);
+}
+
+.notif-card-type-badge.warn {
+ background: rgba(255, 165, 0, 0.15);
+ color: #ffa500;
+ border: 1px solid #ffa500;
+ box-shadow: 0 0 5px rgba(255, 165, 0, 0.3);
+}
+
+.notif-card-type-badge.error {
+ background: rgba(0, 212, 255, 0.15);
+ color: #00d4ff;
+ border: 1px solid #00d4ff;
+ box-shadow: 0 0 5px rgba(0, 212, 255, 0.3);
+}
+
+.notif-card-type-badge.info {
+ background: rgba(0, 200, 255, 0.15);
+ color: #00c8ff;
+ border: 1px solid #00c8ff;
+ box-shadow: 0 0 5px rgba(0, 200, 255, 0.3);
+}
+
+.notif-card-label {
+ font-size: 16px;
+ font-weight: 600;
+ color: var(--text);
+ margin-bottom: 8px;
+}
+
+.notif-card-message {
+ font-size: 14px;
+ color: var(--text-dim);
+ line-height: 1.5;
+ margin-bottom: 16px;
+ display: -webkit-box;
+ -webkit-line-clamp: 2;
+ -webkit-box-orient: vertical;
+ overflow: hidden;
+}
+
+.notif-card-meta {
+ display: flex;
+ gap: 12px;
+ font-size: 12px;
+ color: var(--text-dim);
+ margin-bottom: 16px;
+}
+
+.notif-card-meta-item {
+ display: flex;
+ align-items: center;
+ gap: 4px;
+}
+
+.notif-card-actions {
+ display: flex;
+ gap: 8px;
+ flex-wrap: wrap;
+}
+
+.notif-card-btn {
+ background: rgba(0, 0, 0, 0.8);
+ border: 1px solid #00d4ff;
+ border-radius: 0;
+ padding: 6px 14px;
+ font-size: 11px;
+ font-weight: 700;
+ font-family: 'Courier New', monospace;
+ text-transform: uppercase;
+ letter-spacing: 1px;
+ color: #00d4ff;
+ cursor: pointer;
+ transition: all 0.15s ease;
+ position: relative;
+ z-index: 10;
+ clip-path: polygon(4px 0, 100% 0, calc(100% - 4px) 100%, 0 100%);
+}
+
+.notif-card-btn:hover {
+ background: rgba(0, 212, 255, 0.2);
+ border-color: #00d4ff;
+ color: #00d4ff;
+ transform: translateX(2px);
+ box-shadow: 0 0 10px rgba(0, 212, 255, 0.4);
+}
+
+.notif-card-btn:active {
+ transform: translateX(0);
+ box-shadow: 0 0 5px rgba(0, 212, 255, 0.3);
+}
+
+.notif-card-btn.danger {
+ border-color: #00d4ff;
+ color: #00d4ff;
+}
+
+.notif-card-btn.danger:hover {
+ background: rgba(0, 212, 255, 0.2);
+ border-color: #00d4ff;
+ color: #00d4ff;
+ box-shadow: 0 0 10px rgba(0, 212, 255, 0.4);
+}
+
+/* Modal Styles */
+.notif-modal {
+ position: fixed;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 100%;
+ z-index: 10000;
+ display: none;
+ align-items: center;
+ justify-content: center;
+}
+
+.notif-modal.active {
+ display: flex;
+}
+
+.notif-modal-overlay {
+ position: fixed;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 100%;
+ background: rgba(0, 0, 0, 0.85);
+ backdrop-filter: blur(8px);
+ -webkit-backdrop-filter: blur(8px);
+}
+
+.notif-modal-dialog {
+ position: relative;
+ background:
+ repeating-linear-gradient(
+ 0deg,
+ rgba(0, 212, 255, 0.03) 0px,
+ rgba(0, 212, 255, 0.03) 1px,
+ transparent 1px,
+ transparent 2px
+ ),
+ linear-gradient(135deg, rgba(0,25,38,0.98) 0%, rgba(0,15,23,0.98) 100%);
+ backdrop-filter: blur(10px);
+ -webkit-backdrop-filter: blur(10px);
+ border: 2px solid #00d4ff;
+ border-top: 5px solid #00d4ff;
+ border-radius: 0;
+ box-shadow:
+ 0 0 60px rgba(0,212,255,0.6),
+ inset 0 0 40px rgba(0,212,255,0.1);
+ max-width: 900px;
+ width: 95%;
+ max-height: 95vh;
+ display: flex;
+ flex-direction: column;
+ z-index: 1;
+ clip-path: polygon(0 0, calc(100% - 20px) 0, 100% 20px, 100% 100%, 0 100%);
+}
+
+.notif-modal-dialog::before {
+ content: '// NOTIFICATION TERMINAL';
+ position: absolute;
+ top: 10px;
+ left: 15px;
+ font-size: 8px;
+ color: #00d4ff;
+ text-shadow: 0 0 5px #00d4ff;
+ font-family: 'Courier New', monospace;
+ letter-spacing: 2px;
+ z-index: 10;
+}
+
+.notif-modal-header {
+ padding: 2.2rem 2rem 1.5rem 2rem;
+ border-bottom: 2px solid #00d4ff;
+ background: linear-gradient(135deg, rgba(0, 40, 60, 0.5) 0%, transparent 100%);
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ position: relative;
+ flex-shrink: 0;
+}
+
+.notif-modal-header::after {
+ content: '';
+ position: absolute;
+ bottom: -2px;
+ left: 0;
+ right: 0;
+ height: 2px;
+ background: linear-gradient(90deg,
+ transparent,
+ #00d4ff 50%,
+ transparent
+ );
+ box-shadow: 0 0 10px #00d4ff;
+}
+
+.notif-modal-header h3 {
+ margin: 0;
+ font-size: 1.3rem;
+ font-weight: 700;
+ letter-spacing: 2px;
+ color: #00d4ff;
+ text-shadow: 0 0 10px rgba(0,212,255,0.8);
+ text-transform: uppercase;
+ font-family: 'Courier New', monospace;
+}
+
+.notif-modal-close {
+ background: rgba(0, 20, 30, 0.8);
+ border: 2px solid #00d4ff;
+ color: #00d4ff;
+ font-size: 1.5rem;
+ cursor: pointer;
+ width: 35px;
+ height: 35px;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ border-radius: 0;
+ transition: all 0.2s ease;
+ clip-path: polygon(5px 0, 100% 0, calc(100% - 5px) 100%, 0 100%);
+}
+
+.notif-modal-close:hover {
+ background: rgba(0, 212, 255, 0.2);
+ border-color: #00d4ff;
+ color: #00d4ff;
+ transform: scale(1.1);
+ box-shadow: 0 0 15px rgba(0,212,255,0.5);
+}
+
+.notif-modal-body {
+ padding: 1.5rem 2rem 2rem 2rem;
+ overflow-y: auto;
+ flex: 1;
+}
+
+.notif-modal-body::-webkit-scrollbar {
+ width: 8px;
+}
+
+.notif-modal-body::-webkit-scrollbar-track {
+ background: #000;
+ border: 1px solid #00d4ff;
+ border-radius: 0;
+}
+
+.notif-modal-body::-webkit-scrollbar-thumb {
+ background: linear-gradient(180deg, #00d4ff, #0088cc);
+ border-radius: 0;
+ border: 1px solid #00d4ff;
+ box-shadow: 0 0 10px rgba(0,212,255,0.5);
+}
+
+.notif-modal-body::-webkit-scrollbar-thumb:hover {
+ background: #00d4ff;
+ box-shadow: 0 0 20px rgba(0,212,255,0.8);
+}
+
+.notif-modal-footer {
+ padding: 1.5rem 2rem;
+ border-top: 2px solid #00d4ff;
+ background: linear-gradient(135deg, rgba(0, 40, 60, 0.5) 0%, transparent 100%);
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ flex-shrink: 0;
+ position: relative;
+}
+
+.notif-modal-footer::before {
+ content: '';
+ position: absolute;
+ top: -2px;
+ left: 0;
+ right: 0;
+ height: 2px;
+ background: linear-gradient(90deg,
+ transparent,
+ #00d4ff 50%,
+ transparent
+ );
+ box-shadow: 0 0 10px #00d4ff;
+}
+
+.btn-primary {
+ background: rgba(0,20,30,0.8);
+ border: 2px solid #00d4ff;
+ border-radius: 0;
+ color: #00d4ff;
+ font-weight: 700;
+ font-size: 0.85rem;
+ padding: 0.75rem 1.5rem;
+ cursor: pointer;
+ transition: all 0.2s ease;
+ font-family: 'Courier New', monospace;
+ text-transform: uppercase;
+ letter-spacing: 2px;
+ clip-path: polygon(0 0, calc(100% - 8px) 0, 100% 8px, 100% 100%, 0 100%);
+ box-shadow: 0 0 15px rgba(0,212,255,0.3);
+}
+
+.btn-primary:hover {
+ transform: translateY(-2px) translateX(2px);
+ background: rgba(0, 212, 255, 0.2);
+ box-shadow: 0 0 25px rgba(0,212,255,0.6);
+}
+
+.btn-secondary {
+ background: rgba(0, 10, 20, 0.6);
+ border: 1px solid #0088cc;
+ border-radius: 0;
+ color: #0088cc;
+ font-weight: 600;
+ font-size: 0.85rem;
+ padding: 0.75rem 1.5rem;
+ cursor: pointer;
+ transition: all 0.2s ease;
+ font-family: 'Courier New', monospace;
+ text-transform: uppercase;
+ letter-spacing: 1px;
+}
+
+.btn-secondary:hover {
+ background: rgba(0, 212, 255, 0.1);
+ border-color: #00d4ff;
+ color: #00d4ff;
+ box-shadow: 0 0 10px rgba(0,212,255,0.3);
+}
+
+.btn-test {
+ background: rgba(255, 128, 0, 0.1);
+ border: 2px solid #ff8000;
+ border-radius: 0;
+ color: #ff8000;
+ font-weight: 700;
+ font-size: 0.85rem;
+ padding: 0.75rem 1.2rem;
+ cursor: pointer;
+ transition: all 0.2s ease;
+ font-family: 'Courier New', monospace;
+ text-transform: uppercase;
+ letter-spacing: 1px;
+ clip-path: polygon(0 0, calc(100% - 6px) 0, 100% 6px, 100% 100%, 0 100%);
+}
+
+.btn-test:hover {
+ background: rgba(255, 128, 0, 0.2);
+ border-color: #ff8000;
+ box-shadow: 0 0 15px rgba(255, 128, 0, 0.4);
+ transform: translateY(-2px);
+}
+
+/* Empty State */
+.notif-empty-state {
+ text-align: center;
+ padding: 60px 20px;
+ color: var(--text-dim);
+}
+
+.notif-empty-state svg {
+ width: 80px;
+ height: 80px;
+ margin-bottom: 20px;
+ opacity: 0.3;
+}
+
+.notif-empty-state h3 {
+ font-size: 18px;
+ margin-bottom: 8px;
+ color: var(--text);
+}
+
+.notif-empty-state p {
+ font-size: 14px;
+ margin-bottom: 24px;
+}
+
+/* Form Section Styling - Command Terminal */
+.form-section {
+ background:
+ rgba(0, 20, 30, 0.5),
+ repeating-linear-gradient(
+ 0deg,
+ rgba(0, 212, 255, 0.03) 0px,
+ rgba(0, 212, 255, 0.03) 1px,
+ transparent 1px,
+ transparent 2px
+ );
+ border-radius: 0;
+ padding: 20px;
+ border: 1px solid #00d4ff;
+ border-left: 3px solid #00d4ff;
+ box-shadow:
+ 0 0 15px rgba(0, 212, 255, 0.2),
+ inset 0 0 15px rgba(0, 212, 255, 0.05);
+ position: relative;
+}
+
+.form-section::before {
+ content: '// SYSTEM PARAMETERS';
+ position: absolute;
+ top: -10px;
+ left: 10px;
+ background: #000;
+ padding: 0 8px;
+ color: #00d4ff;
+ font-size: 10px;
+ font-family: 'Courier New', monospace;
+ letter-spacing: 2px;
+ text-shadow: 0 0 5px #00d4ff;
+}
+
+.form-section h4 {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+ color: #00d4ff;
+ font-family: 'Courier New', monospace;
+ text-transform: uppercase;
+ letter-spacing: 2px;
+ text-shadow: 0 0 10px rgba(0, 212, 255, 0.5);
+ font-size: 14px;
+}
+
+.condition-config {
+ animation: terminalBoot 0.3s ease;
+}
+
+@keyframes terminalBoot {
+ from {
+ opacity: 0;
+ transform: translateX(-10px);
+ filter: blur(2px);
+ }
+ to {
+ opacity: 1;
+ transform: translateX(0);
+ filter: blur(0);
+ }
+}
+
+/* Responsive improvements */
+@media (max-width: 768px) {
+ .notif-cards-grid {
+ grid-template-columns: 1fr;
+ }
+
+ .notif-header-modern {
+ flex-direction: column;
+ gap: 16px;
+ }
+
+ .btn-add-notif {
+ width: 100%;
+ justify-content: center;
+ }
+}
diff --git a/frontend/static/admin_table.css b/frontend/static/admin_table.css
new file mode 100644
index 0000000..df077cd
--- /dev/null
+++ b/frontend/static/admin_table.css
@@ -0,0 +1,66 @@
+/* Modernes, dunkles Tabellen-Design wie im Screenshot */
+.admin-table {
+ width: 100%;
+ border-collapse: separate;
+ border-spacing: 0;
+ background: #181a20;
+ color: #e0e0e0;
+ font-family: 'Inter', 'Segoe UI', Arial, sans-serif;
+ font-size: 1.05rem;
+ margin-bottom: 1.5rem;
+ box-shadow: 0 2px 16px #000a;
+ border-radius: 12px;
+ overflow: hidden;
+}
+.admin-table th {
+ background: #23242a;
+ color: #4fc3f7;
+ font-weight: 600;
+ padding: 0.85rem 0.7rem;
+ border-bottom: 2px solid #23242a;
+ text-align: left;
+ font-size: 1.07rem;
+}
+.admin-table td {
+ padding: 0.7rem 0.7rem;
+ border-bottom: 1px solid #23242a;
+ vertical-align: middle;
+ font-size: 1.05rem;
+}
+.admin-table tr:last-child td {
+ border-bottom: none;
+}
+.admin-table tr:hover td {
+ background: #22232b;
+}
+.admin-table th:first-child,
+.admin-table td:first-child {
+ border-left: none;
+}
+.admin-table th:last-child,
+.admin-table td:last-child {
+ border-right: none;
+}
+/* Scrollbar für große Tabellen */
+.admin-table-wrapper {
+ max-width: 100%;
+ overflow-x: auto;
+}
+/* Farben für Status, Prozent, etc. */
+.status-badge {
+ display: inline-block;
+ padding: 0.2em 0.7em;
+ border-radius: 8px;
+ font-size: 0.98em;
+ font-weight: 500;
+ background: #23242a;
+ color: #ffb300;
+ margin-right: 0.3em;
+}
+.percent-bar {
+ display: inline-block;
+ height: 8px;
+ border-radius: 4px;
+ background: linear-gradient(90deg,#4fc3f7,#ffb300);
+ margin-right: 0.5em;
+}
diff --git a/frontend/static/ams.css b/frontend/static/ams.css
new file mode 100644
index 0000000..cfd6f53
--- /dev/null
+++ b/frontend/static/ams.css
@@ -0,0 +1,870 @@
+/* AMS Page Styles */
+
+/* Stats Section */
+.ams-stats {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
+ gap: 1.5rem;
+ margin-bottom: 2rem;
+}
+
+@media (min-width: 1400px) {
+ .ams-stats {
+ grid-template-columns: repeat(4, 1fr);
+ }
+}
+
+.stat-card {
+ background: linear-gradient(135deg, rgba(255, 255, 255, 0.03) 0%, rgba(255, 255, 255, 0.01) 100%);
+ backdrop-filter: blur(10px);
+ -webkit-backdrop-filter: blur(10px);
+ border: 1px solid rgba(255, 255, 255, 0.1);
+ border-radius: 16px;
+ padding: 1.75rem;
+ display: flex;
+ align-items: center;
+ gap: 1.25rem;
+ transition: all 0.4s cubic-bezier(0.4, 0, 0.2, 1);
+ box-shadow: 0 8px 24px rgba(0, 0, 0, 0.15), inset 0 1px 0 rgba(255, 255, 255, 0.05);
+ position: relative;
+ overflow: hidden;
+}
+
+.stat-card::before {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: 0;
+ right: 0;
+ height: 3px;
+ background: linear-gradient(90deg, var(--accent) 0%, #e67e22 100%);
+ opacity: 0;
+ transition: opacity 0.4s ease;
+}
+
+.stat-card:hover {
+ transform: translateY(-4px) scale(1.02);
+ box-shadow: 0 12px 32px rgba(0, 0, 0, 0.25),
+ 0 0 40px rgba(243, 156, 18, 0.15),
+ inset 0 1px 0 rgba(255, 255, 255, 0.1);
+ border-color: rgba(243, 156, 18, 0.3);
+}
+
+.stat-card:hover::before {
+ opacity: 1;
+}
+
+.stat-icon {
+ font-size: 2.75rem;
+ filter: drop-shadow(0 4px 12px rgba(243, 156, 18, 0.3));
+ transition: all 0.4s ease;
+}
+
+.stat-card:hover .stat-icon {
+ transform: scale(1.1) rotate(5deg);
+ filter: drop-shadow(0 6px 16px rgba(243, 156, 18, 0.5));
+}
+
+.stat-content {
+ flex: 1;
+}
+
+.stat-label {
+ font-size: 0.8125rem;
+ color: var(--text-dim);
+ margin-bottom: 0.5rem;
+ text-transform: uppercase;
+ letter-spacing: 1.2px;
+ font-weight: 600;
+ opacity: 0.8;
+}
+
+.stat-value {
+ font-size: 2.25rem;
+ font-weight: 800;
+ color: var(--text);
+ background: linear-gradient(135deg, var(--accent) 0%, #e67e22 50%, #d68910 100%);
+ -webkit-background-clip: text;
+ -webkit-text-fill-color: transparent;
+ background-clip: text;
+ letter-spacing: -0.5px;
+ line-height: 1;
+}
+
+/* Alerts KPI Card - Special Styling */
+.stat-card-alerts {
+ position: relative;
+ cursor: pointer;
+ overflow: visible;
+ z-index: 10;
+}
+
+.stat-card-alerts .stat-icon {
+ animation: warningPulse 2s ease-in-out infinite;
+}
+
+.stat-card-alerts .stat-value {
+ background: linear-gradient(135deg, #ffa726 0%, #ff6b6b 50%, #ee5a6f 100%);
+ -webkit-background-clip: text;
+ -webkit-text-fill-color: transparent;
+ background-clip: text;
+}
+
+.stat-card-alerts::before {
+ background: linear-gradient(90deg, #ffa726 0%, #ff6b6b 100%);
+}
+
+.stat-card-alerts:hover {
+ border-color: rgba(255, 167, 38, 0.5);
+ box-shadow: 0 12px 32px rgba(0, 0, 0, 0.25),
+ 0 0 40px rgba(255, 167, 38, 0.25),
+ inset 0 1px 0 rgba(255, 255, 255, 0.1);
+}
+
+@keyframes warningPulse {
+ 0%, 100% {
+ transform: scale(1);
+ filter: drop-shadow(0 4px 12px rgba(255, 167, 38, 0.3));
+ }
+ 50% {
+ transform: scale(1.1);
+ filter: drop-shadow(0 6px 20px rgba(255, 167, 38, 0.6));
+ }
+}
+
+/* Alerts Dropdown (Hidden by default, shown on hover/click) */
+.alerts-dropdown {
+ position: absolute;
+ top: calc(100% + 12px);
+ left: 0;
+ right: 0;
+ background: linear-gradient(135deg, rgba(255, 255, 255, 0.05) 0%, rgba(255, 255, 255, 0.02) 100%);
+ backdrop-filter: blur(16px);
+ -webkit-backdrop-filter: blur(16px);
+ border: 1px solid rgba(255, 167, 38, 0.3);
+ border-radius: 16px;
+ padding: 1rem;
+ box-shadow: 0 12px 40px rgba(0, 0, 0, 0.3), 0 0 60px rgba(255, 167, 38, 0.2);
+ z-index: 50;
+ opacity: 0;
+ pointer-events: none;
+ transform: translateY(-10px);
+ transition: all 0.3s cubic-bezier(0.4, 0, 0.2, 1);
+ max-height: 300px;
+ overflow-y: auto;
+}
+
+.stat-card-alerts:hover .alerts-dropdown,
+.stat-card-alerts.active .alerts-dropdown {
+ opacity: 1;
+ pointer-events: all;
+ transform: translateY(0);
+}
+
+.alerts-dropdown-header {
+ font-size: 0.75rem;
+ color: var(--text-dim);
+ text-transform: uppercase;
+ letter-spacing: 1px;
+ font-weight: 700;
+ margin-bottom: 0.75rem;
+ padding-bottom: 0.5rem;
+ border-bottom: 1px solid rgba(255, 167, 38, 0.2);
+}
+
+.alerts-dropdown-empty {
+ text-align: center;
+ padding: 1.5rem;
+ color: var(--text-dim);
+ font-size: 0.875rem;
+}
+
+.alerts-dropdown-list {
+ display: flex;
+ flex-direction: column;
+ gap: 8px;
+}
+
+/* Warning Card Special Style */
+.stat-card-warning {
+ background: linear-gradient(135deg, rgba(255, 193, 7, 0.15) 0%, rgba(255, 152, 0, 0.1) 100%);
+ border-color: rgba(255, 193, 7, 0.3);
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ flex-direction: column;
+ text-align: center;
+}
+
+.stat-card-warning:hover {
+ border-color: #ffc107;
+ box-shadow: 0 6px 12px rgba(0, 0, 0, 0.15),
+ 0 0 20px rgba(255, 193, 7, 0.2);
+}
+
+.warning-icon {
+ font-size: 3.5rem;
+ margin-bottom: 0.5rem;
+ filter: drop-shadow(0 4px 8px rgba(255, 193, 7, 0.3));
+}
+
+.warning-content {
+ display: flex;
+ flex-direction: column;
+ align-items: center;
+ gap: 0.25rem;
+}
+
+.warning-label {
+ font-size: 0.75rem;
+ color: var(--text-dim);
+ text-transform: uppercase;
+ letter-spacing: 1px;
+ font-weight: 600;
+}
+
+.warning-value {
+ font-size: 3rem;
+ font-weight: 700;
+ color: #ffc107;
+ text-shadow: 0 2px 4px rgba(0, 0, 0, 0.2);
+}
+
+/* AMS Grid */
+.ams-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(320px, 1fr));
+ gap: 1.5rem;
+}
+
+/* AMS Unit Card */
+.ams-unit {
+ background: linear-gradient(135deg, rgba(255, 255, 255, 0.04) 0%, rgba(255, 255, 255, 0.01) 100%);
+ backdrop-filter: blur(12px);
+ -webkit-backdrop-filter: blur(12px);
+ border: 1px solid rgba(255, 255, 255, 0.08);
+ border-radius: 20px;
+ overflow: hidden;
+ transition: all 0.4s cubic-bezier(0.4, 0, 0.2, 1);
+ box-shadow: 0 8px 32px rgba(0, 0, 0, 0.2), inset 0 1px 0 rgba(255, 255, 255, 0.05);
+ position: relative;
+}
+
+.ams-unit::before {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: 0;
+ right: 0;
+ bottom: 0;
+ background: radial-gradient(circle at top left, rgba(243, 156, 18, 0.05) 0%, transparent 50%);
+ opacity: 0;
+ transition: opacity 0.4s ease;
+ pointer-events: none;
+}
+
+.ams-unit:hover {
+ transform: translateY(-6px) scale(1.01);
+ box-shadow: 0 16px 48px rgba(0, 0, 0, 0.3),
+ 0 0 60px rgba(243, 156, 18, 0.15),
+ inset 0 1px 0 rgba(255, 255, 255, 0.1);
+ border-color: rgba(243, 156, 18, 0.3);
+}
+
+.ams-unit:hover::before {
+ opacity: 1;
+}
+
+/* AMS Header */
+.ams-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 1.5rem;
+ background: linear-gradient(135deg, rgba(243, 156, 18, 0.12) 0%, rgba(230, 126, 34, 0.08) 100%);
+ border-bottom: 2px solid rgba(243, 156, 18, 0.3);
+ position: relative;
+ overflow: hidden;
+}
+
+.ams-header::after {
+ content: '';
+ position: absolute;
+ bottom: 0;
+ left: 0;
+ right: 0;
+ height: 1px;
+ background: linear-gradient(90deg, transparent 0%, rgba(243, 156, 18, 0.5) 50%, transparent 100%);
+}
+
+.ams-title {
+ display: flex;
+ align-items: flex-start;
+ gap: 0.75rem;
+}
+
+.ams-icon {
+ font-size: 1.5rem;
+}
+
+.ams-title h3 {
+ margin: 0 0 4px 0;
+ color: var(--text);
+ font-size: 1.25rem;
+ font-weight: 600;
+}
+
+.ams-meta {
+ display: flex;
+ gap: 12px;
+ font-size: 11px;
+ color: var(--text-dim);
+ margin-top: 2px;
+}
+
+.ams-status-group {
+ display: flex;
+ flex-direction: column;
+ align-items: flex-end;
+ gap: 6px;
+}
+
+.ams-status {
+ display: flex;
+ align-items: center;
+}
+
+.ams-connection {
+ display: flex;
+ gap: 6px;
+ font-size: 11px;
+ color: var(--text-dim);
+ align-items: center;
+}
+
+.connection-type {
+ background: rgba(255, 255, 255, 0.05);
+ backdrop-filter: blur(8px);
+ padding: 3px 8px;
+ border-radius: 8px;
+ font-weight: 700;
+ border: 1px solid rgba(255, 255, 255, 0.1);
+ transition: all 0.3s ease;
+}
+
+.connection-type:hover {
+ background: rgba(255, 255, 255, 0.08);
+ box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15);
+}
+
+.connection-quality {
+ color: var(--accent-2);
+}
+
+.ams-printer {
+ font-size: 11px;
+}
+
+.printer-badge {
+ background: rgba(255, 255, 255, 0.05);
+ backdrop-filter: blur(8px);
+ padding: 4px 10px;
+ border-radius: 10px;
+ color: var(--text);
+ font-weight: 600;
+ border: 1px solid rgba(255, 255, 255, 0.1);
+ transition: all 0.3s ease;
+}
+
+.printer-badge:hover {
+ background: rgba(255, 255, 255, 0.08);
+ box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15);
+}
+
+/* Status Badges */
+.status-badge {
+ display: inline-block;
+ padding: 0.375rem 0.875rem;
+ border-radius: 20px;
+ font-size: 0.6875rem;
+ font-weight: 700;
+ text-transform: uppercase;
+ letter-spacing: 0.8px;
+ transition: all 0.3s ease;
+ border: 1px solid transparent;
+}
+
+.status-badge.status-online {
+ background: rgba(46, 213, 115, 0.15);
+ color: var(--success);
+ border-color: rgba(46, 213, 115, 0.3);
+ box-shadow: 0 4px 12px rgba(46, 213, 115, 0.15), inset 0 1px 0 rgba(255, 255, 255, 0.1);
+}
+
+.status-badge.status-online:hover {
+ background: rgba(46, 213, 115, 0.25);
+ box-shadow: 0 6px 16px rgba(46, 213, 115, 0.3);
+}
+
+.status-badge.status-offline {
+ background: rgba(231, 76, 60, 0.15);
+ color: var(--error);
+ border-color: rgba(231, 76, 60, 0.3);
+ box-shadow: 0 4px 12px rgba(231, 76, 60, 0.15), inset 0 1px 0 rgba(255, 255, 255, 0.1);
+}
+
+.status-badge.status-offline:hover {
+ background: rgba(231, 76, 60, 0.25);
+ box-shadow: 0 6px 16px rgba(231, 76, 60, 0.3);
+}
+
+.status-badge.status-warning {
+ background: rgba(255, 167, 38, 0.15);
+ color: var(--warning);
+ border-color: rgba(255, 167, 38, 0.3);
+ box-shadow: 0 4px 12px rgba(255, 167, 38, 0.15), inset 0 1px 0 rgba(255, 255, 255, 0.1);
+}
+
+.status-badge.status-warning:hover {
+ background: rgba(255, 167, 38, 0.25);
+ box-shadow: 0 6px 16px rgba(255, 167, 38, 0.3);
+}
+
+/* AMS Slots Grid */
+.ams-slots {
+ display: grid;
+ grid-template-columns: 1fr 1fr;
+ gap: 0.75rem;
+ padding: 1.25rem;
+}
+
+/* Individual Slot */
+.slot {
+ background: linear-gradient(135deg, rgba(255, 255, 255, 0.05) 0%, rgba(255, 255, 255, 0.02) 100%);
+ backdrop-filter: blur(8px);
+ -webkit-backdrop-filter: blur(8px);
+ border: 2px solid rgba(255, 255, 255, 0.08);
+ border-radius: 14px;
+ padding: 1.125rem;
+ transition: all 0.3s cubic-bezier(0.4, 0, 0.2, 1);
+ cursor: pointer;
+ min-height: 100px;
+ display: flex;
+ flex-direction: column;
+ position: relative;
+ overflow: hidden;
+ box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15), inset 0 1px 0 rgba(255, 255, 255, 0.05);
+}
+
+.slot::before {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: 0;
+ right: 0;
+ height: 4px;
+ background: linear-gradient(90deg, var(--accent) 0%, #e67e22 100%);
+ opacity: 0;
+ transition: opacity 0.3s ease;
+}
+
+.slot::after {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: 0;
+ right: 0;
+ bottom: 0;
+ background: radial-gradient(circle at bottom right, rgba(243, 156, 18, 0.08) 0%, transparent 70%);
+ opacity: 0;
+ transition: opacity 0.3s ease;
+ pointer-events: none;
+}
+
+.slot:hover {
+ border-color: rgba(243, 156, 18, 0.4);
+ transform: translateY(-3px) scale(1.02);
+ box-shadow: 0 8px 24px rgba(0, 0, 0, 0.25),
+ 0 0 30px rgba(243, 156, 18, 0.15),
+ inset 0 1px 0 rgba(255, 255, 255, 0.1);
+}
+
+.slot:hover::before,
+.slot:hover::after {
+ opacity: 1;
+}
+
+.slot-empty {
+ background: rgba(231, 76, 60, 0.03);
+ backdrop-filter: blur(6px);
+ border: 2px dashed rgba(255, 255, 255, 0.15);
+ border-radius: 14px;
+ padding: 1.125rem;
+ min-height: 100px;
+ display: flex;
+ flex-direction: column;
+ align-items: center;
+ justify-content: center;
+ text-align: center;
+ opacity: 0.6;
+ transition: all 0.3s cubic-bezier(0.4, 0, 0.2, 1);
+}
+
+.slot-empty:hover {
+ opacity: 0.9;
+ border-color: rgba(243, 156, 18, 0.4);
+ background: rgba(231, 76, 60, 0.05);
+ transform: scale(1.02);
+ box-shadow: 0 4px 16px rgba(0, 0, 0, 0.15);
+}
+
+.slot-number {
+ font-size: 0.75rem;
+ color: var(--text-dim);
+ text-transform: uppercase;
+ font-weight: 600;
+ letter-spacing: 0.5px;
+ margin-bottom: 0.5rem;
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+}
+
+.slot-icon {
+ font-size: 14px;
+ opacity: 0.8;
+}
+
+.slot-label {
+ font-size: 0.875rem;
+ color: var(--text-dim);
+ font-weight: 500;
+}
+
+/* Slot with Material */
+.slot-color {
+ position: absolute;
+ top: 0;
+ left: 0;
+ right: 0;
+ height: 6px;
+ border-radius: 10px 10px 0 0;
+}
+
+.slot-material {
+ font-size: 0.9375rem;
+ color: var(--text);
+ font-weight: 600;
+ margin-bottom: 0.25rem;
+ line-height: 1.3;
+}
+
+.slot-brand {
+ font-size: 0.8125rem;
+ color: var(--text-dim);
+ margin-bottom: 0.5rem;
+}
+
+.slot-weight {
+ font-size: 0.875rem;
+ color: var(--accent);
+ font-weight: 600;
+ margin-top: auto;
+}
+
+.slot-progress {
+ width: 100%;
+ height: 12px;
+ background: rgba(255, 255, 255, 0.08);
+ border: 1px solid rgba(255, 255, 255, 0.16);
+ border-radius: 12px;
+ overflow: hidden;
+ margin-top: 0.65rem;
+ box-shadow: inset 0 2px 6px rgba(0, 0, 0, 0.35), 0 2px 8px rgba(0, 0, 0, 0.25);
+}
+
+.slot-progress-bar {
+ height: 100%;
+ background: linear-gradient(90deg, #ff5f6d 0%, #ffa726 40%, #2ed573 100%);
+ transition: width 0.45s cubic-bezier(0.4, 0, 0.2, 1);
+ border-radius: 12px;
+ box-shadow: 0 0 14px rgba(46, 213, 115, 0.55), 0 0 18px rgba(255, 167, 38, 0.35);
+ position: relative;
+ overflow: hidden;
+}
+
+.slot-progress-bar::after {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: 0;
+ right: 0;
+ bottom: 0;
+ background: linear-gradient(90deg, transparent 0%, rgba(255, 255, 255, 0.35) 50%, transparent 100%);
+ animation: shimmer 1.6s infinite;
+}
+
+@keyframes shimmer {
+ 0% { transform: translateX(-100%); }
+ 100% { transform: translateX(100%); }
+}
+
+.slot-progress-bar.low {
+ background: linear-gradient(90deg, #ff5f6d 0%, #ff9f43 100%);
+ box-shadow: 0 0 14px rgba(231, 76, 60, 0.6), 0 0 22px rgba(255, 159, 67, 0.4);
+}
+
+/* Notification Toast */
+.notification {
+ position: fixed;
+ bottom: 2rem;
+ right: 2rem;
+ padding: 1rem 1.5rem;
+ background: var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ color: var(--text);
+ box-shadow: 0 4px 12px rgba(0, 0, 0, 0.3);
+ opacity: 0;
+ transform: translateY(20px);
+ transition: all 0.3s ease;
+ z-index: 2000;
+}
+
+.notification.show {
+ opacity: 1;
+ transform: translateY(0);
+}
+
+.notification-success {
+ border-left: 4px solid var(--success);
+}
+
+.notification-error {
+ border-left: 4px solid var(--error);
+}
+
+.notification-warning {
+ border-left: 4px solid var(--warning);
+}
+
+.notification-info {
+ border-left: 4px solid var(--accent);
+}
+
+/* Loading Overlay */
+.loading-overlay {
+ position: fixed;
+ top: 0;
+ left: 0;
+ right: 0;
+ bottom: 0;
+ background: rgba(0, 0, 0, 0.85);
+ backdrop-filter: blur(12px);
+ -webkit-backdrop-filter: blur(12px);
+ display: flex;
+ flex-direction: column;
+ align-items: center;
+ justify-content: center;
+ z-index: 9999;
+ color: var(--text);
+}
+
+.loading-spinner {
+ width: 60px;
+ height: 60px;
+ border: 5px solid rgba(255, 255, 255, 0.1);
+ border-top-color: var(--accent);
+ border-right-color: #e67e22;
+ border-radius: 50%;
+ animation: spin 1s cubic-bezier(0.68, -0.55, 0.265, 1.55) infinite;
+ margin-bottom: 1.5rem;
+ box-shadow: 0 0 30px rgba(243, 156, 18, 0.4);
+}
+
+@keyframes spin {
+ 0% { transform: rotate(0deg); }
+ 100% { transform: rotate(360deg); }
+}
+
+/* Responsive */
+@media (max-width: 1200px) {
+ .ams-grid {
+ grid-template-columns: repeat(2, 1fr);
+ }
+}
+
+@media (max-width: 768px) {
+ .ams-stats {
+ grid-template-columns: 1fr;
+ }
+
+ .ams-grid {
+ grid-template-columns: 1fr;
+ }
+
+ .ams-slots {
+ grid-template-columns: 1fr;
+ }
+}
+
+/* Loading Animation */
+@keyframes pulse {
+ 0%, 100% {
+ opacity: 1;
+ }
+ 50% {
+ opacity: 0.5;
+ }
+}
+
+.ams-unit.loading {
+ animation: pulse 2s ease-in-out infinite;
+}
+
+/* Active Slot Indicator */
+.slot.active {
+ border-color: var(--accent);
+ box-shadow: 0 0 0 2px rgba(243, 156, 18, 0.2);
+}
+
+.slot.active::before {
+ opacity: 1;
+}
+
+/* Alerts */
+.alerts-list {
+ display: grid;
+ gap: 10px;
+}
+
+.alert-item {
+ display: grid;
+ grid-template-columns: auto 1fr auto;
+ align-items: center;
+ gap: 12px;
+ background: rgba(255, 255, 255, 0.03);
+ backdrop-filter: blur(10px);
+ border: 1px solid rgba(255, 167, 38, 0.2);
+ border-radius: 12px;
+ padding: 12px 14px;
+ cursor: pointer;
+ transition: all 0.3s cubic-bezier(0.4, 0, 0.2, 1);
+ box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1), inset 0 1px 0 rgba(255, 255, 255, 0.05);
+}
+
+.alert-item:hover {
+ border-color: rgba(255, 167, 38, 0.5);
+ background: rgba(255, 167, 38, 0.08);
+ transform: translateX(4px) scale(1.01);
+ box-shadow: 0 6px 20px rgba(0, 0, 0, 0.15), 0 0 30px rgba(255, 167, 38, 0.15);
+}
+
+.alert-dot {
+ width: 12px;
+ height: 12px;
+ border-radius: 50%;
+ background: var(--warning);
+ box-shadow: 0 0 12px var(--warning), inset 0 1px 2px rgba(255, 255, 255, 0.3);
+ animation: pulse 2s infinite;
+}
+
+@keyframes pulse {
+ 0%, 100% { opacity: 1; transform: scale(1); }
+ 50% { opacity: 0.7; transform: scale(0.95); }
+}
+
+.alert-text {
+ font-size: 13px;
+ color: var(--text);
+}
+
+.alert-meta {
+ font-size: 12px;
+ color: var(--text-dim);
+}
+
+/* Slot Actions */
+.slot-actions {
+ display: flex;
+ gap: 6px;
+ margin-top: 10px;
+ padding-top: 10px;
+ border-top: 1px solid rgba(255, 255, 255, 0.08);
+}
+
+.slot-action-btn {
+ flex: 1;
+ background: rgba(255, 255, 255, 0.05);
+ backdrop-filter: blur(8px);
+ border: 1px solid rgba(255, 255, 255, 0.15);
+ border-radius: 10px;
+ padding: 8px 6px;
+ color: var(--text);
+ font-size: 18px;
+ cursor: pointer;
+ transition: all 0.3s cubic-bezier(0.4, 0, 0.2, 1);
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ box-shadow: 0 2px 8px rgba(0, 0, 0, 0.1), inset 0 1px 0 rgba(255, 255, 255, 0.05);
+}
+
+.slot-action-btn:hover {
+ background: rgba(243, 156, 18, 0.15);
+ border-color: rgba(243, 156, 18, 0.4);
+ transform: translateY(-2px) scale(1.08);
+ box-shadow: 0 6px 16px rgba(0, 0, 0, 0.2), 0 0 20px rgba(243, 156, 18, 0.3);
+}
+
+.slot-action-btn:active {
+ transform: translateY(0) scale(1);
+}
+
+/* Smooth Scroll Animations */
+@keyframes fadeInUp {
+ from {
+ opacity: 0;
+ transform: translateY(20px);
+ }
+ to {
+ opacity: 1;
+ transform: translateY(0);
+ }
+}
+
+.ams-stats, .ams-grid, .panel {
+ animation: fadeInUp 0.6s ease-out;
+}
+
+.ams-unit:nth-child(1) { animation-delay: 0.1s; }
+.ams-unit:nth-child(2) { animation-delay: 0.2s; }
+.ams-unit:nth-child(3) { animation-delay: 0.3s; }
+.ams-unit:nth-child(4) { animation-delay: 0.4s; }
+
+/* Single AMS Mode - Hide AMS 2-4 */
+.ams-grid.single-mode .ams-unit[data-ams="2"],
+.ams-grid.single-mode .ams-unit[data-ams="3"],
+.ams-grid.single-mode .ams-unit[data-ams="4"] {
+ display: none;
+}
+
+.ams-grid.single-mode {
+ grid-template-columns: 1fr;
+ max-width: 800px;
+ margin: 0 auto;
+}
+
+/* Panel Improvements */
+section.panel {
+ background: linear-gradient(135deg, rgba(255, 255, 255, 0.04) 0%, rgba(255, 255, 255, 0.01) 100%);
+ backdrop-filter: blur(12px);
+ -webkit-backdrop-filter: blur(12px);
+ border: 1px solid rgba(255, 255, 255, 0.08);
+ border-radius: 16px;
+ box-shadow: 0 8px 32px rgba(0, 0, 0, 0.2), inset 0 1px 0 rgba(255, 255, 255, 0.05);
+}
+
+.slot-action-btn span {
+ display: inline-block;
+}
diff --git a/frontend/static/ams.js b/frontend/static/ams.js
new file mode 100644
index 0000000..f99b8e9
--- /dev/null
+++ b/frontend/static/ams.js
@@ -0,0 +1,802 @@
+// AMS Monitoring JavaScript
+
+let amsData = [];
+let spools = [];
+let materials = [];
+let liveState = {}; // Live-State von MQTT
+
+let isLoading = false;
+let singleAmsMode = true; // Single AMS Mode aktiviert
+
+// === INIT ===
+document.addEventListener('DOMContentLoaded', () => {
+ // Check if multi-AMS mode was requested
+ const urlParams = new URLSearchParams(window.location.search);
+ if (urlParams.get('mode') === 'multi') {
+ singleAmsMode = false;
+ localStorage.setItem('amsMode', 'multi');
+ } else if (urlParams.get('mode') === 'single') {
+ singleAmsMode = true;
+ localStorage.setItem('amsMode', 'single');
+ } else {
+ // Load from localStorage or default to single
+ const savedMode = localStorage.getItem('amsMode');
+ singleAmsMode = savedMode !== 'multi';
+ }
+
+ applySingleAmsMode();
+ loadData();
+
+ // Auto-refresh every 15 seconds (reduziert Server-Last)
+ setInterval(() => {
+ if (!isLoading) {
+ loadData();
+ }
+ }, 15000);
+
+ // Setup AMS mode toggle listeners
+ setupAmsModeToogle();
+});
+
+// Apply Single AMS Mode styling
+function applySingleAmsMode() {
+ const amsGrid = document.querySelector('.ams-grid');
+ if (amsGrid && singleAmsMode) {
+ amsGrid.classList.add('single-mode');
+ } else if (amsGrid) {
+ amsGrid.classList.remove('single-mode');
+ }
+}
+
+// Setup AMS Mode Toggle
+function setupAmsModeToogle() {
+ // Find Single AMS and Multi AMS radio buttons by name
+ const amsRadios = document.querySelectorAll('input[name="ams_mode"]');
+
+ amsRadios.forEach(radio => {
+ // Set initial checked state
+ if (radio.value === 'single' && singleAmsMode) {
+ radio.checked = true;
+ } else if (radio.value === 'multi' && !singleAmsMode) {
+ radio.checked = true;
+ }
+
+ // Add change listener
+ radio.addEventListener('change', () => {
+ if (radio.checked) {
+ const newMode = radio.value;
+ localStorage.setItem('amsMode', newMode);
+ // Reload page with new mode
+ window.location.href = `/ams?mode=${newMode}`;
+ }
+ });
+ });
+}
+
+// === LOAD DATA ===
+async function loadData() {
+ if (isLoading) return;
+
+ isLoading = true;
+ const loadingOverlay = document.getElementById('loadingOverlay');
+
+ // Zeige Loading nur beim ersten Mal
+ if (spools.length === 0 && loadingOverlay) {
+ loadingOverlay.style.display = 'flex';
+ }
+
+ try {
+ // Lade Spools, Materials und Live-State parallel (schneller)
+ await Promise.all([
+ loadSpools(),
+ loadMaterials(),
+ loadLiveState()
+ ]);
+
+ // Verarbeite AMS-Daten (basiert auf spools + live-state)
+ loadAMSData();
+
+ updateStats();
+ renderAMSUnits();
+ renderAlerts();
+
+ } catch (error) {
+ console.error('Fehler beim Laden der AMS-Daten:', error);
+ showNotification('Fehler beim Laden der AMS-Daten', 'error');
+ } finally {
+ isLoading = false;
+ if (loadingOverlay) {
+ loadingOverlay.style.display = 'none';
+ }
+ }
+}
+
+async function loadSpools() {
+ try {
+ const response = await fetch('/api/spools/');
+ spools = await response.json();
+ } catch (error) {
+ console.error('Fehler beim Laden der Spulen:', error);
+ }
+}
+
+async function loadMaterials() {
+ try {
+ const response = await fetch('/api/materials/');
+ materials = await response.json();
+ } catch (error) {
+ console.error('Fehler beim Laden der Materialien:', error);
+ }
+}
+
+let amsDevices = []; // normalized devices from /api/ams/
+
+async function loadLiveState() {
+ try {
+ const response = await fetch('/api/ams/');
+ if (response.ok) {
+ const data = await response.json();
+ amsDevices = Array.isArray(data.devices) ? data.devices : [];
+ console.log('[AMS] Normalized AMS geladen:', amsDevices.length, 'Geräte');
+ } else {
+ amsDevices = [];
+ }
+ } catch (error) {
+ console.error('Fehler beim Laden des AMS-Endpunkts:', error);
+ amsDevices = [];
+ }
+}
+
+function loadAMSData() {
+ // Generate AMS structure based on spools with ams_slot
+ // Single AMS Mode: Only create 1 AMS unit
+ // Multi AMS Mode: Create 4 AMS units
+
+ // Build AMS data from normalized /api/ams/ response (amsDevices)
+ let printerName = 'Kein Drucker';
+
+ if (amsDevices.length === 0) {
+ // No normalized devices — fallback to empty single AMS
+ amsData = [
+ { id: 1, online: false, slots: [], serial: 'AMS-001', firmware: '–', signal: '–', printer: null, temp: null, humidity: null }
+ ];
+ } else {
+ if (singleAmsMode) {
+ // Use first device + first AMS unit
+ const dev = amsDevices[0];
+ const amsUnit = (dev.ams_units && dev.ams_units[0]) || null;
+ printerName = dev.device_serial ? `Bambu ${dev.device_serial.substring(0,8)}...` : printerName;
+
+ amsData = [
+ {
+ id: 1,
+ online: !!dev?.online,
+ slots: [],
+ serial: dev.device_serial || (amsUnit ? `AMS-${amsUnit.ams_id}` : 'AMS-001'),
+ firmware: dev.firmware || '–',
+ signal: dev.signal || '–',
+ printer: printerName,
+ temp: amsUnit ? amsUnit.temp : null,
+ humidity: amsUnit ? amsUnit.humidity : null
+ }
+ ];
+
+ if (amsUnit && Array.isArray(amsUnit.trays)) {
+ amsUnit.trays.forEach((tray, index) => {
+ const ams = amsData[0];
+ const matchingSpool = spools.find(s => (s.ams_slot == index) || (s.rfid_uid && s.rfid_uid === tray.tag_uid));
+ const material = matchingSpool ? materials.find(m => m.id === matchingSpool.material_id) : null;
+
+ ams.slots[index] = {
+ slot: index + 1,
+ spool: matchingSpool || {
+ id: null,
+ ams_slot: index,
+ material_type: tray.material || null,
+ color: tray.color || '#000000',
+ weight_remaining: tray.remain_percent != null ? Math.round((tray.remain_percent || 0) * 1000) : 0,
+ weight_total: tray.total_len || 1000,
+ rfid_uid: tray.tag_uid,
+ tray_uuid: tray.tray_uuid,
+ from_live_state: true
+ },
+ material: material,
+ liveData: tray
+ };
+ });
+ }
+ } else {
+ // Multi mode: create one amsData entry per device/ams_unit
+ amsData = [];
+ amsDevices.forEach((dev) => {
+ (dev.ams_units || []).forEach((u, unitIndex) => {
+ const idx = amsData.length + 1;
+ const amsEntry = {
+ id: idx,
+ online: !!dev?.online,
+ slots: [],
+ serial: dev.device_serial || `AMS-${u.ams_id}`,
+ firmware: dev.firmware || '–',
+ signal: dev.signal || '–',
+ printer: dev.device_serial ? `Bambu ${dev.device_serial.substring(0,8)}...` : null,
+ temp: u.temp || null,
+ humidity: u.humidity || null
+ };
+
+ if (Array.isArray(u.trays)) {
+ u.trays.forEach((tray, index) => {
+ const matchingSpool = spools.find(s => (s.ams_slot == index) || (s.rfid_uid && s.rfid_uid === tray.tag_uid));
+ const material = matchingSpool ? materials.find(m => m.id === matchingSpool.material_id) : null;
+
+ amsEntry.slots[index] = {
+ slot: index + 1,
+ spool: matchingSpool || {
+ id: null,
+ ams_slot: index,
+ material_type: tray.material || null,
+ color: tray.color || '#000000',
+ weight_remaining: tray.remain_percent != null ? Math.round((tray.remain_percent || 0) * 1000) : 0,
+ weight_total: tray.total_len || 1000,
+ rfid_uid: tray.tag_uid,
+ tray_uuid: tray.tray_uuid,
+ from_live_state: true
+ },
+ material: material,
+ liveData: tray
+ };
+ });
+ }
+
+ amsData.push(amsEntry);
+ });
+ });
+ }
+ }
+}
+
+// === UPDATE STATS ===
+function updateStats() {
+ const onlineCount = amsData.filter(a => a.online).length;
+ const activeSlots = amsData.reduce((sum, ams) => sum + ams.slots.filter(s => s).length, 0);
+
+ // Calculate available filament (kg)
+ let totalFilament = 0;
+ amsData.forEach(ams => {
+ ams.slots.forEach(slot => {
+ if (slot && slot.spool) {
+ totalFilament += getRemaining(slot.spool);
+ }
+ });
+ });
+
+ // Count warnings (low spools in AMS)
+ let warnings = 0;
+ amsData.forEach(ams => {
+ ams.slots.forEach(slot => {
+ if (slot && slot.spool) {
+ const remaining = getRemaining(slot.spool);
+ const percentage = getPercentage(slot.spool);
+ if (percentage <= 20 || remaining < 200) {
+ warnings++;
+ }
+ }
+ });
+ });
+
+ document.getElementById('amsOnlineCount').textContent = onlineCount;
+ document.getElementById('amsActiveSlots').textContent = activeSlots;
+ document.getElementById('amsAvailableFilament').textContent = (totalFilament / 1000).toFixed(2) + 'kg';
+
+ // Update warning count in KPI card
+ updateWarningCount();
+}
+
+// === RENDER AMS UNITS ===
+function renderAMSUnits() {
+ amsData.forEach((ams, index) => {
+ const amsId = index + 1;
+ const statusElement = document.getElementById(`amsStatus${amsId}`);
+ const slotsContainer = document.getElementById(`amsSlots${amsId}`);
+
+ // Update AMS device info
+ const serialEl = document.getElementById(`amsSerial${amsId}`);
+ const firmwareEl = document.getElementById(`amsFirmware${amsId}`);
+ const connectionEl = document.getElementById(`amsConnection${amsId}`);
+ const signalEl = document.getElementById(`amsSignal${amsId}`);
+ const printerEl = document.getElementById(`amsPrinter${amsId}`);
+
+ if (serialEl) serialEl.textContent = `SN: ${ams.serial || '–'}`;
+ if (firmwareEl) firmwareEl.textContent = `FW: ${ams.firmware || '–'}`;
+ if (signalEl) signalEl.textContent = ams.signal || '–';
+ if (printerEl) {
+ printerEl.innerHTML = ams.printer
+ ? `${ams.printer} `
+ : `Kein Drucker `;
+ }
+
+ // Update status badge
+ if (ams.online) {
+ statusElement.innerHTML = 'Online ';
+ } else {
+ statusElement.innerHTML = 'Offline ';
+ }
+
+ // Render slots
+ let slotsHTML = '';
+ for (let i = 0; i < 4; i++) {
+ const slotNumber = i + 1;
+ const slot = ams.slots[i];
+
+ if (slot && slot.spool && slot.material) {
+ const spool = slot.spool;
+ const material = slot.material;
+ const remaining = getRemaining(spool);
+ const percentage = getPercentage(spool);
+ const color = spool.tray_color ? `#${spool.tray_color.substring(0, 6)}` : '#999';
+
+ const isLow = percentage <= 20 || remaining < 200;
+ const progressClass = isLow ? 'low' : '';
+
+ const spoolNumberDisplay = spool.spool_number ? `#${spool.spool_number} ` : '';
+
+ slotsHTML += `
+
+
+
+ Slot ${slotNumber}
+ ${spool.tray_uuid ? '🏷️ ' : ''}
+
+ ${spoolNumberDisplay ? `
${spoolNumberDisplay}
` : ''}
+
${material.name}
+ ${material.brand ? `
${material.brand}
` : ''}
+
${Math.round(remaining)}g (${Math.round(percentage)}%)
+
+
+
+ 📋
+
+
+ ✖️
+
+
+ 🔄
+
+
+
+ `;
+ } else {
+ slotsHTML += `
+
+
Slot ${slotNumber}
+
Leer
+
+ + Zuweisen
+
+
+ `;
+ }
+ }
+
+ slotsContainer.innerHTML = slotsHTML;
+ });
+}
+
+// === HELPER FUNCTIONS ===
+function updateWarningCount() {
+ const warningCountEl = document.getElementById('amsWarningCount');
+ if (!warningCountEl) return;
+
+ // Count warnings
+ let warningCount = 0;
+ amsData.forEach((ams) => {
+ ams.slots.forEach((slot) => {
+ if (slot && slot.spool) {
+ const remaining = getRemaining(slot.spool);
+ const percentage = getPercentage(slot.spool);
+ if (percentage <= 20 || remaining < 200) {
+ warningCount++;
+ }
+ }
+ });
+ });
+
+ warningCountEl.textContent = warningCount;
+
+ // Create/update alerts dropdown
+ createAlertsDropdown(warningCount);
+}
+
+function createAlertsDropdown(warningCount) {
+ const alertCard = document.querySelector('.stat-card-alerts');
+ if (!alertCard) return;
+
+ // Remove existing dropdown
+ let dropdown = alertCard.querySelector('.alerts-dropdown');
+ if (!dropdown) {
+ dropdown = document.createElement('div');
+ dropdown.className = 'alerts-dropdown';
+ alertCard.appendChild(dropdown);
+ }
+
+ if (warningCount === 0) {
+ dropdown.innerHTML = `
+
+
+
✅
+ Alle Slots haben ausreichend Filament
+
+ `;
+ return;
+ }
+
+ // Collect warning items
+ const items = [];
+ amsData.forEach((ams, i) => {
+ ams.slots.forEach((slot, idx) => {
+ if (slot && slot.spool) {
+ const remaining = getRemaining(slot.spool);
+ const percentage = getPercentage(slot.spool);
+ if (percentage <= 20 || remaining < 200) {
+ items.push({
+ ams: i + 1,
+ slot: idx + 1,
+ material: slot.material?.name || 'Unbekannt',
+ remaining: Math.round(remaining),
+ percent: Math.round(percentage),
+ spoolId: slot.spool.id,
+ });
+ }
+ }
+ });
+ });
+
+ dropdown.innerHTML = `
+
+
+ ${items.map(it => `
+
+
+
AMS #${it.ams} · Slot ${it.slot} · ${it.material}
+
${it.remaining}g (${it.percent}%)
+
+ `).join('')}
+
+ `;
+}
+
+function renderAlerts() {
+ // This function is now integrated into updateWarningCount
+ // Keep it for compatibility but it does nothing
+}
+
+function getRemaining(spool) {
+ const wf = parseFloat(spool.weight_full) || 0;
+ const rp = parseFloat(spool.remain_percent);
+
+ // Priorität: weight > weight_current > weight_remaining > berechnet aus remain_percent
+ const remaining = parseFloat(spool.weight) ??
+ parseFloat(spool.weight_current) ??
+ parseFloat(spool.weight_remaining) ??
+ (rp != null && wf ? (rp / 100) * wf : wf);
+
+ return remaining || 0;
+}
+
+function getPercentage(spool) {
+ const wf = parseFloat(spool.weight_full) || 0;
+ const rp = parseFloat(spool.remain_percent);
+
+ if (rp != null) {
+ return Math.max(0, Math.min(100, rp));
+ }
+
+ const remaining = getRemaining(spool);
+ return wf ? Math.max(0, Math.min(100, (remaining / wf) * 100)) : 0;
+}
+
+function goToSpool(spoolId) {
+ window.location.href = `/spools?highlight=${spoolId}`;
+}
+
+function showNotification(message, type = 'info') {
+ const notification = document.getElementById('notification');
+ notification.textContent = message;
+ notification.className = `notification notification-${type} show`;
+
+ setTimeout(() => {
+ notification.classList.remove('show');
+ }, 3000);
+}
+
+// === SLOT ACTIONS ===
+function refreshRFID(amsId, slotNumber) {
+ showNotification(`RFID-Scan für AMS #${amsId} Slot ${slotNumber} wird ausgeführt...`, 'info');
+ // TODO: Backend-Call für RFID-Refresh
+ setTimeout(() => {
+ showNotification(`RFID erfolgreich aktualisiert`, 'success');
+ loadData(); // Refresh data
+ }, 1500);
+}
+
+function changeSpoolDialog(amsId, slotNumber) {
+ const confirmed = confirm(`Spule in AMS #${amsId} Slot ${slotNumber} wechseln?\n\nDies öffnet die Spulenverwaltung.`);
+ if (confirmed) {
+ window.location.href = '/spools';
+ }
+}
+
+// === QUICK-ASSIGN SYSTEM ===
+let currentAssignPrinter = null;
+let currentAssignSlot = null;
+let searchTimeout = null;
+
+function openAssignModal(printerId, slotNumber) {
+ currentAssignPrinter = printerId;
+ currentAssignSlot = slotNumber;
+
+ document.getElementById('assignModalTitle').textContent = `Spule zuweisen - Slot ${slotNumber}`;
+ document.getElementById('assignModal').classList.add('show');
+ document.getElementById('spoolSearch').value = '';
+ document.getElementById('spoolSearch').focus();
+
+ // Initial load: Zeige alle freien Spulen
+ searchSpools('');
+
+ // Live-Suche
+ document.getElementById('spoolSearch').oninput = (e) => {
+ clearTimeout(searchTimeout);
+ searchTimeout = setTimeout(() => {
+ searchSpools(e.target.value);
+ }, 300);
+ };
+}
+
+function closeAssignModal() {
+ document.getElementById('assignModal').classList.remove('show');
+ currentAssignPrinter = null;
+ currentAssignSlot = null;
+}
+
+async function searchSpools(searchTerm) {
+ try {
+ const response = await fetch('/api/spools/');
+ if (!response.ok) throw new Error('Fehler beim Laden der Spulen');
+
+ let allSpools = await response.json();
+
+ // Filtere nur freie Spulen (nicht zugewiesen)
+ let availableSpools = allSpools.filter(s => s.printer_id == null && s.ams_slot == null);
+
+ // Suche filtern
+ if (searchTerm) {
+ const term = searchTerm.toLowerCase();
+ availableSpools = availableSpools.filter(s => {
+ const num = s.spool_number ? s.spool_number.toString() : '';
+ const name = s.name || '';
+ const vendor = s.vendor || '';
+ const color = s.color || '';
+
+ return num.includes(term) ||
+ name.toLowerCase().includes(term) ||
+ vendor.toLowerCase().includes(term) ||
+ color.toLowerCase().includes(term);
+ });
+ }
+
+ renderSpoolSearchResults(availableSpools);
+ } catch (error) {
+ console.error('Fehler bei Spulen-Suche:', error);
+ document.getElementById('spoolSearchResults').innerHTML = `
+
+ Fehler beim Laden der Spulen
+
+ `;
+ }
+}
+
+function renderSpoolSearchResults(spools) {
+ const container = document.getElementById('spoolSearchResults');
+
+ if (spools.length === 0) {
+ container.innerHTML = `
+
+
📭
+
+ Keine freien Spulen gefunden
+
+
+ Alle Spulen sind bereits zugewiesen
+
+
+ `;
+ return;
+ }
+
+ container.innerHTML = `
+
+ 🎯 ${spools.length} ${spools.length === 1 ? 'Spule' : 'Spulen'} verfügbar
+
+ ` + spools.map(s => {
+ const numberDisplay = s.spool_number ? `#${s.spool_number}` : '–';
+ const nameDisplay = s.name || 'Unbekannt';
+ const vendorDisplay = s.vendor || '';
+ const colorDisplay = s.color && s.color !== 'unknown' ? s.color : '';
+ const weightDisplay = s.weight_current ? `${Math.round(s.weight_current)}g` : 'N/A';
+ const percentDisplay = s.remain_percent != null ? Math.round(s.remain_percent) + '%' : '';
+
+ // Farb-Badge falls Farbe bekannt
+ const colorBadge = colorDisplay ? ` ` : '';
+
+ return `
+
+
+
+
+ ${numberDisplay}
+ ${s.tray_uuid ? '🏷️ ' : ''}
+
+
+ ${nameDisplay}
+
+ ${vendorDisplay || colorDisplay ? `
+
+ ${vendorDisplay}
+ ${colorDisplay ? `${colorDisplay}${colorBadge} ` : ''}
+
+ ` : ''}
+
+
+
+ ${weightDisplay}
+
+ ${percentDisplay ? `
+
+ ${percentDisplay}
+
+ ` : ''}
+
+
+
+ `;
+ }).join('');
+}
+
+async function assignSpool(spoolId) {
+ try {
+ const response = await fetch(
+ `/api/spools/${spoolId}/assign?printer_id=${currentAssignPrinter}&slot_number=${currentAssignSlot}`,
+ { method: 'POST' }
+ );
+
+ if (!response.ok) {
+ const error = await response.json();
+ throw new Error(error.detail || 'Zuweisung fehlgeschlagen');
+ }
+
+ const result = await response.json();
+
+ showNotification(
+ `Spule ${result.spool_number ? '#' + result.spool_number : ''} zu Slot ${currentAssignSlot} zugewiesen`,
+ 'success'
+ );
+
+ closeAssignModal();
+ loadData(); // Refresh AMS view
+ } catch (error) {
+ console.error('Fehler bei Zuweisung:', error);
+ showNotification(error.message, 'error');
+ }
+}
+
+// Confirm Unassign Modal
+let pendingUnassignSpoolId = null;
+
+function unassignSpool(spoolId) {
+ // Finde Spule für Anzeige im Modal
+ const spool = spools.find(s => s.id === spoolId);
+ const spoolName = spool ? (spool.spool_number ? `#${spool.spool_number}` : spool.name || 'diese Spule') : 'diese Spule';
+
+ document.getElementById('confirmSpoolName').textContent = spoolName;
+ pendingUnassignSpoolId = spoolId;
+ document.getElementById('confirmUnassignModal').classList.add('show');
+}
+
+function closeConfirmUnassignModal() {
+ document.getElementById('confirmUnassignModal').classList.remove('show');
+ pendingUnassignSpoolId = null;
+}
+
+async function confirmUnassignSpool() {
+ if (!pendingUnassignSpoolId) return;
+
+ const spoolId = pendingUnassignSpoolId;
+ closeConfirmUnassignModal();
+
+ try {
+ const response = await fetch(`/api/spools/${spoolId}/unassign`, {
+ method: 'POST'
+ });
+
+ if (!response.ok) {
+ const error = await response.json();
+ throw new Error(error.detail || 'Entfernen fehlgeschlagen');
+ }
+
+ const result = await response.json();
+
+ showNotification(
+ `Spule ${result.spool_number ? '#' + result.spool_number : ''} entfernt`,
+ 'success'
+ );
+
+ loadData(); // Refresh AMS view
+ } catch (error) {
+ console.error('Fehler beim Entfernen:', error);
+ showNotification(error.message, 'error');
+ }
+}
+
+// Close modal on ESC
+document.addEventListener('keydown', (e) => {
+ if (e.key === 'Escape') {
+ closeAssignModal();
+ closeConfirmUnassignModal();
+ }
+});
+
+// Close modal on background click
+document.getElementById('assignModal').addEventListener('click', (e) => {
+ if (e.target.id === 'assignModal') {
+ closeAssignModal();
+ }
+});
+
+document.getElementById('confirmUnassignModal').addEventListener('click', (e) => {
+ if (e.target.id === 'confirmUnassignModal') {
+ closeConfirmUnassignModal();
+ }
+});
diff --git a/frontend/static/css/activePrintCard.css b/frontend/static/css/activePrintCard.css
new file mode 100644
index 0000000..fc64ae9
--- /dev/null
+++ b/frontend/static/css/activePrintCard.css
@@ -0,0 +1,30 @@
+/* Styles for Active Print Card - use theme variables where possible */
+.active-print-card {
+ background: linear-gradient(135deg, var(--panel), var(--panel-2));
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ padding: 12px;
+}
+.print-card-inner { display:flex; flex-direction:column; gap:10px; }
+.print-card-header { display:flex; justify-content:space-between; align-items:center; }
+.print-card-title { display:flex; gap:8px; align-items:center; }
+.print-dot { width:8px; height:8px; background:var(--accent); border-radius:50%; box-shadow:0 0 8px rgba(74,222,128,0.2); display:inline-block; }
+.print-name { font-weight:700; color:var(--text); font-size:14px; }
+.print-sub { color:var(--text-dim); font-size:12px; }
+.print-updated { color:var(--text-dim); font-size:11px; }
+.print-progress-row { display:flex; align-items:center; gap:16px; }
+.print-remaining { display:flex; flex-direction:column; align-items:flex-start; min-width:80px; }
+.remaining-num { color:var(--accent); font-size:30px; font-weight:700; }
+.remaining-label { color:var(--text-dim); font-size:12px; }
+.print-progress-bar { flex:1; }
+.progress-bar-track { background:rgba(255,255,255,0.03); border-radius:8px; height:14px; overflow:hidden; border:1px solid rgba(255,255,255,0.02); }
+.progress-bar-fill { height:100%; background:linear-gradient(90deg,var(--accent-2),var(--accent)); transition:width 0.8s ease; }
+.print-time { color:var(--text); font-size:14px; min-width:60px; text-align:right; font-weight:600; }
+.print-time-footer { display:flex; justify-content:space-between; font-size:13px; color:var(--text-dim); }
+
+/* small job-card tweaks */
+.job-card.small { padding:10px; border-radius:10px; }
+
+/* Animations */
+@keyframes pulseSmall { 0%{transform:scale(1)}50%{transform:scale(1.05)}100%{transform:scale(1)} }
+.print-dot { animation: pulseSmall 2s infinite; }
diff --git a/frontend/static/css/debug_ams.css b/frontend/static/css/debug_ams.css
new file mode 100644
index 0000000..3811d70
--- /dev/null
+++ b/frontend/static/css/debug_ams.css
@@ -0,0 +1,80 @@
+.ams-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(320px, 1fr));
+ gap: 16px;
+}
+.ams-card {
+ background: radial-gradient(circle at 20% 20%, rgba(255, 255, 255, 0.03), transparent 40%), var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 14px;
+ padding: 14px;
+ box-shadow: var(--shadow);
+}
+.ams-card__header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ margin-bottom: 10px;
+}
+.badge {
+ padding: 6px 10px;
+ border-radius: 999px;
+ border: 1px solid var(--border);
+ background: #1f2835;
+ color: var(--text);
+ font-weight: 700;
+ font-size: 0.9rem;
+}
+.badge.active {
+ background: rgba(46, 204, 113, 0.12);
+ color: #2ecc71;
+ border-color: rgba(46, 204, 113, 0.3);
+}
+.tray-grid {
+ display: grid;
+ grid-template-columns: repeat(2, minmax(140px, 1fr));
+ gap: 10px;
+}
+.tray {
+ border: 1px solid var(--border);
+ border-radius: 12px;
+ padding: 10px;
+ background: var(--panel-2);
+}
+.tray.active {
+ box-shadow: 0 0 0 2px rgba(46, 204, 113, 0.35);
+}
+.tray.empty {
+ opacity: 0.6;
+ border-style: dashed;
+}
+.tray h4 {
+ margin: 0 0 6px 0;
+ font-size: 1rem;
+}
+.tray .meta {
+ color: var(--text-dim);
+ font-size: 0.9rem;
+}
+.raw-toggle {
+ margin-top: 14px;
+ display: flex;
+ gap: 8px;
+}
+.raw-block {
+ margin-top: 8px;
+ display: none;
+}
+.raw-block.show {
+ display: block;
+}
+.raw-block pre {
+ background: #0f141c;
+ color: #e8ecf2;
+ border: 1px solid var(--border);
+ border-radius: 12px;
+ padding: 10px;
+ max-height: 320px;
+ overflow: auto;
+ font-size: 0.9rem;
+}
diff --git a/frontend/static/css/global_alerts.css b/frontend/static/css/global_alerts.css
new file mode 100644
index 0000000..b4517e5
--- /dev/null
+++ b/frontend/static/css/global_alerts.css
@@ -0,0 +1,93 @@
+.alert-container {
+ position: fixed;
+ top: 20px;
+ left: 50%;
+ transform: translateX(-50%);
+ right: auto;
+ display: flex;
+ flex-direction: column;
+ gap: 10px;
+ z-index: 1200;
+ pointer-events: none;
+ align-items: center;
+}
+
+.alert {
+ min-width: 260px;
+ max-width: 360px;
+ background: linear-gradient(135deg, rgba(255, 255, 255, 0.02), rgba(255, 255, 255, 0.04));
+ border: 1px solid rgba(255, 255, 255, 0.07);
+ border-radius: 12px;
+ padding: 12px 14px 12px 14px;
+ color: #e8ecf2;
+ box-shadow: 0 20px 40px rgba(0, 0, 0, 0.35);
+ display: grid;
+ grid-template-columns: 1fr auto;
+ gap: 8px;
+ pointer-events: all;
+ animation: alert-in 140ms ease-out forwards;
+}
+
+.alert__content {
+ display: flex;
+ flex-direction: column;
+ gap: 4px;
+}
+
+.alert__title {
+ font-weight: 700;
+ letter-spacing: 0.2px;
+}
+
+.alert__message {
+ color: #cbd3e1;
+ font-size: 0.95rem;
+ line-height: 1.4;
+}
+
+.alert__close {
+ background: none;
+ border: none;
+ color: inherit;
+ font-size: 18px;
+ cursor: pointer;
+ align-self: flex-start;
+ padding: 2px 6px;
+ border-radius: 8px;
+ transition: background 0.12s ease, color 0.12s ease;
+}
+.alert__close:hover {
+ background: rgba(255, 255, 255, 0.08);
+ color: #fff;
+}
+
+.alert--success {
+ border-color: rgba(46, 204, 113, 0.45);
+ background: linear-gradient(135deg, rgba(46, 204, 113, 0.12), rgba(46, 204, 113, 0.18));
+}
+
+.alert--warn {
+ border-color: rgba(243, 156, 18, 0.5);
+ background: linear-gradient(135deg, rgba(243, 156, 18, 0.12), rgba(243, 156, 18, 0.2));
+}
+
+.alert--error {
+ border-color: rgba(231, 76, 60, 0.6);
+ background: linear-gradient(135deg, rgba(231, 76, 60, 0.12), rgba(231, 76, 60, 0.22));
+}
+
+.alert--info {
+ border-color: rgba(52, 152, 219, 0.6);
+ background: linear-gradient(135deg, rgba(52, 152, 219, 0.12), rgba(52, 152, 219, 0.18));
+}
+
+.alert--closing {
+ opacity: 0;
+ transform: translateY(-6px);
+ transition: all 120ms ease-in;
+}
+
+@keyframes alert-in {
+ from { opacity: 0; transform: translateY(-8px); }
+ to { opacity: 1; transform: translateY(0); }
+}
diff --git a/frontend/static/css/main.css b/frontend/static/css/main.css
new file mode 100644
index 0000000..bc1fabe
--- /dev/null
+++ b/frontend/static/css/main.css
@@ -0,0 +1,449 @@
+@import url("https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700&display=swap");
+
+:root {
+ --bg: #0e1116;
+ --panel: #141a22;
+ --panel-2: #1a212c;
+ --border: #222a35;
+ --text: #e8ecf2;
+ --text-dim: #a7b2c3;
+ --accent: #f39c12;
+ --accent-2: #2ecc71;
+ --accent-3: #3498db;
+ --error: #e74c3c;
+ --shadow: 0 20px 50px rgba(0, 0, 0, 0.45);
+ --theme-icon: #e8ecf2;
+}
+
+body.theme-light {
+ --bg: #f4f6fb;
+ --panel: #ffffff;
+ --panel-2: #f1f4f9;
+ --border: #d9e0ea;
+ --text: #1e2430;
+ --text-dim: #5c6777;
+ --accent: #e67e22;
+ --accent-2: #27ae60;
+ --accent-3: #2980b9;
+ --error: #c0392b;
+ --shadow: 0 12px 30px rgba(0, 0, 0, 0.1);
+ --theme-icon: #1e2430;
+}
+
+* { box-sizing: border-box; }
+
+/* Verhindert weißen Flash beim Seitenwechsel */
+html {
+ background-color: #0e1116;
+}
+
+body.page {
+ margin: 0;
+ display: grid;
+ grid-template-columns: 260px 1fr;
+ min-height: 100vh;
+ background:
+ radial-gradient(circle at 20% 20%, rgba(52, 152, 219, 0.08), transparent 35%),
+ radial-gradient(circle at 80% 0%, rgba(243, 156, 18, 0.08), transparent 30%),
+ var(--bg);
+ color: var(--text);
+ font-family: "Inter", "Segoe UI", system-ui, -apple-system, sans-serif;
+}
+
+.sidebar {
+ background: linear-gradient(180deg, #0c0f14 0%, #0b0d12 100%);
+ border-right: 1px solid var(--border);
+ padding: 24px 20px;
+ display: flex;
+ flex-direction: column;
+ gap: 24px;
+ box-shadow: var(--shadow);
+}
+.sidebar__brand {
+ display: flex;
+ align-items: center;
+ gap: 12px;
+ font-weight: 700;
+ letter-spacing: 0.4px;
+}
+.brand__icon {
+ width: 38px;
+ height: 38px;
+ border-radius: 10px;
+ background: linear-gradient(135deg, #f39c12, #ff6b35);
+ display: grid;
+ place-items: center;
+ font-size: 1.1rem;
+ box-shadow: 0 10px 25px rgba(243, 156, 18, 0.35);
+}
+.brand__name { color: var(--text); }
+
+.sidebar__nav {
+ display: flex;
+ flex-direction: column;
+ gap: 10px;
+}
+.nav__item {
+ color: var(--text-dim);
+ text-decoration: none;
+ padding: 12px 14px;
+ border-radius: 12px;
+ transition: all 0.15s ease;
+ border: 1px solid transparent;
+}
+.nav__item:hover {
+ color: var(--text);
+ background: #11161f;
+ border-color: var(--border);
+}
+.nav__item--active {
+ background: #1f2835;
+ color: var(--text);
+ border-color: #2f3a4d;
+}
+
+.sidebar__footer {
+ margin-top: auto;
+ font-size: 0.85rem;
+ color: var(--text-dim);
+}
+.foot-label { font-weight: 700; color: var(--accent-3); }
+
+.content {
+ padding: 28px 36px 48px;
+}
+
+.content__header {
+ display: flex;
+ align-items: center;
+ justify-content: space-between;
+ gap: 24px;
+ margin-bottom: 24px;
+}
+.header__right {
+ display: flex;
+ align-items: center;
+ gap: 14px;
+}
+.eyebrow {
+ text-transform: uppercase;
+ letter-spacing: 0.12em;
+ font-size: 0.75rem;
+ color: var(--text-dim);
+ margin: 0 0 4px 0;
+}
+.title {
+ margin: 0;
+ font-size: 1.8rem;
+}
+.subtitle {
+ margin: 6px 0 0 0;
+ color: var(--text-dim);
+}
+
+.actions {
+ display: flex;
+ gap: 10px;
+ flex-wrap: wrap;
+}
+.btn {
+ border: none;
+ border-radius: 12px;
+ padding: 12px 16px;
+ font-weight: 700;
+ cursor: pointer;
+ transition: transform 0.1s ease, box-shadow 0.1s ease, background 0.15s ease, color 0.15s ease;
+}
+.btn.primary {
+ background: linear-gradient(135deg, #f39c12, #ff6b35);
+ color: #0c0f14;
+ border: 1px solid #ff8f3a;
+ box-shadow: 0 12px 30px rgba(243, 156, 18, 0.4);
+}
+.btn.success {
+ background: #2ecc71;
+ color: #0c0f14;
+ border: 1px solid #2ecc71;
+ box-shadow: 0 12px 30px rgba(46, 204, 113, 0.4);
+}
+.btn.ghost {
+ background: #1b2330;
+ color: #e8ecf2;
+ border: 1px solid #2f3a4d;
+ box-shadow: none;
+}
+.btn:hover { transform: translateY(-1px); }
+.btn.primary:hover {
+ background: linear-gradient(135deg, #ff9132, #ff6b35);
+ border-color: #ff8f3a;
+}
+.btn.success:hover {
+ background: #27ae60;
+ color: #0c0f14;
+ border-color: #27ae60;
+ box-shadow: 0 12px 30px rgba(39, 174, 96, 0.5);
+}
+.btn.ghost:hover {
+ background: #e74c3c;
+ color: #0c0f14;
+ border-color: #e74c3c;
+ box-shadow: 0 10px 24px rgba(231, 76, 60, 0.45);
+}
+
+/* User menu */
+.user-menu {
+ position: relative;
+ min-width: 180px;
+}
+.user-menu__trigger {
+ display: inline-flex;
+ align-items: center;
+ gap: 10px;
+ background: var(--panel-2);
+ border: 1px solid var(--border);
+ border-radius: 12px;
+ padding: 8px 10px;
+ color: var(--text);
+ cursor: pointer;
+ box-shadow: var(--shadow);
+}
+.user-menu__trigger:focus-visible {
+ outline: 2px solid var(--accent-3);
+ outline-offset: 2px;
+}
+.user-menu__avatar {
+ width: 36px;
+ height: 36px;
+ border-radius: 50%;
+ display: grid;
+ place-items: center;
+ background: linear-gradient(135deg, var(--accent), #ff6b35);
+ color: #0c0f14;
+ font-weight: 800;
+ letter-spacing: 0.5px;
+}
+.user-menu__info {
+ display: flex;
+ flex-direction: column;
+ gap: 2px;
+ line-height: 1.2;
+}
+.user-menu__name { font-weight: 700; }
+.user-menu__meta { font-size: 0.85rem; color: var(--text-dim); }
+.user-menu__chevron { color: var(--text-dim); font-size: 0.85rem; }
+.user-menu__dropdown {
+ position: absolute;
+ right: 0;
+ top: calc(100% + 6px);
+ min-width: 240px;
+ background: var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 12px;
+ box-shadow: var(--shadow);
+ padding: 8px 8px 10px 8px;
+ display: none;
+ z-index: 20;
+}
+.user-menu__dropdown.open { display: block; }
+.user-menu__section { padding: 6px; border-bottom: 1px solid var(--border); }
+.user-menu__section:last-child { border-bottom: none; }
+.user-menu__section-title { font-size: 0.85rem; color: var(--text-dim); margin-bottom: 6px; font-weight: 700; }
+.user-menu__section-item { color: var(--text); font-weight: 600; }
+.user-menu__option {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+ padding: 6px 4px;
+ border-radius: 10px;
+ cursor: pointer;
+ color: var(--text);
+}
+.user-menu__option:hover { background: var(--panel-2); }
+.user-menu__option input { accent-color: var(--accent-3); }
+.user-menu__link {
+ width: 100%;
+ text-align: left;
+ padding: 8px 6px;
+ border: none;
+ background: transparent;
+ color: var(--text);
+ border-radius: 10px;
+ cursor: pointer;
+ font-weight: 600;
+}
+.user-menu__link:hover { background: var(--panel-2); }
+
+/* Tooltips */
+.tooltip-container {
+ position: relative;
+ display: inline-flex;
+ align-items: center;
+}
+.tooltip-icon {
+ display: inline-flex;
+ align-items: center;
+ justify-content: center;
+ width: 18px;
+ height: 18px;
+ border-radius: 50%;
+ background: var(--panel-2);
+ border: 1px solid var(--border);
+ color: var(--text-dim);
+ font-size: 0.75rem;
+ margin-left: 6px;
+ cursor: pointer;
+}
+.tooltip-box {
+ position: absolute;
+ top: 125%;
+ right: 0;
+ background: #0f141c;
+ color: var(--text);
+ border: 1px solid var(--border);
+ border-radius: 10px;
+ padding: 8px 10px;
+ font-size: 0.85rem;
+ min-width: 220px;
+ box-shadow: var(--shadow);
+ display: none;
+ z-index: 50;
+}
+.tooltip-container:hover .tooltip-box,
+.tooltip-icon:focus + .tooltip-box {
+ display: block;
+}
+
+.card-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(320px, 1fr));
+ gap: 16px;
+ width: 100%;
+}
+.card-grid .empty {
+ padding: 18px;
+ background: var(--panel);
+ border: 1px dashed var(--border);
+ border-radius: 14px;
+ text-align: center;
+ color: var(--text-dim);
+}
+
+.panel {
+ background: radial-gradient(circle at 20% 20%, rgba(255, 255, 255, 0.03), transparent 40%), var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 14px;
+ padding: 16px;
+ box-shadow: var(--shadow);
+}
+
+.modal {
+ position: fixed;
+ inset: 0;
+ background: #000000;
+ display: none;
+ align-items: center;
+ justify-content: center;
+ z-index: 100;
+}
+.modal.show {
+ display: flex;
+ animation: modalFadeIn 0.2s ease;
+}
+@keyframes modalFadeIn {
+ from { opacity: 0; }
+ to { opacity: 1; }
+}
+.modal-content {
+ background: var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 16px;
+ min-width: 400px;
+ max-width: 650px;
+ width: 100%;
+ box-shadow: 0 24px 60px rgba(0, 0, 0, 0.8);
+ animation: modalSlideIn 0.25s ease;
+}
+@keyframes modalSlideIn {
+ from {
+ transform: translateY(-20px);
+ opacity: 0;
+ }
+ to {
+ transform: translateY(0);
+ opacity: 1;
+ }
+}
+.modal__dialog {
+ background: var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 16px;
+ min-width: 320px;
+ max-width: 480px;
+ width: 100%;
+ padding: 18px 20px;
+ box-shadow: 0 24px 60px rgba(0, 0, 0, 0.8);
+}
+.modal-header, .modal__header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 24px 24px 16px 24px;
+ border-bottom: 1px solid var(--border);
+}
+.modal-header h2, .modal__header h2 {
+ margin: 0;
+ font-size: 1.4rem;
+ font-weight: 700;
+ color: var(--text);
+}
+.modal-close, .modal__close {
+ background: none;
+ border: none;
+ color: var(--text-dim);
+ font-size: 28px;
+ cursor: pointer;
+ width: 36px;
+ height: 36px;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ border-radius: 8px;
+ transition: all 0.15s ease;
+}
+.modal-close:hover, .modal__close:hover {
+ background: var(--panel-2);
+ color: var(--text);
+}
+.modal-body, .modal__body {
+ padding: 24px;
+ display: flex;
+ flex-direction: column;
+ gap: 20px;
+}
+.field { display: flex; flex-direction: column; gap: 4px; color: var(--text-dim); }
+.field input, .field select {
+ background: #0f141c;
+ border: 1px solid var(--border);
+ border-radius: 10px;
+ padding: 10px 12px;
+ color: var(--text);
+}
+.field-row { display: grid; grid-template-columns: 1fr 1fr; gap: 10px; }
+.toggle { display: flex; align-items: center; gap: 10px; color: var(--text); }
+.modal__actions { display: flex; justify-content: flex-end; gap: 10px; margin-top: 10px; }
+
+.info-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(240px, 1fr));
+ gap: 16px;
+}
+
+@media (max-width: 960px) {
+ body.page {
+ grid-template-columns: 1fr;
+ }
+ .sidebar { flex-direction: row; align-items: center; }
+ .sidebar__nav { flex-direction: row; flex-wrap: wrap; }
+ .content { padding: 18px; }
+ .content__header { flex-direction: column; align-items: flex-start; }
+}
diff --git a/frontend/static/css/printers.css b/frontend/static/css/printers.css
new file mode 100644
index 0000000..4b1fd1f
--- /dev/null
+++ b/frontend/static/css/printers.css
@@ -0,0 +1,181 @@
+.card {
+ background: radial-gradient(circle at 20% 20%, rgba(255, 255, 255, 0.03), transparent 40%), var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 14px;
+ padding: 14px;
+ box-shadow: var(--shadow);
+ display: grid;
+ grid-template-areas:
+ "head head status"
+ "body body body"
+ "progress progress progress";
+ gap: 10px;
+ min-height: 170px;
+ max-width: 360px;
+}
+.card__head {
+ grid-area: head;
+ display: flex;
+ align-items: center;
+ justify-content: space-between;
+ gap: 10px;
+ flex-wrap: wrap;
+ position: relative;
+ padding-right: 48px;
+}
+.card__title {
+ font-weight: 700;
+ font-size: 1.1rem;
+}
+.card__badges {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+}
+.card__status {
+ justify-self: end;
+ align-self: start;
+ display: inline-flex;
+ align-items: center;
+ gap: 6px;
+ padding: 6px 9px;
+ border-radius: 999px;
+ font-weight: 700;
+ background: rgba(46, 204, 113, 0.12);
+ color: #2ecc71;
+ border: 1px solid rgba(46, 204, 113, 0.3);
+ font-size: 0.9rem;
+}
+.card__status.offline {
+ background: rgba(231, 76, 60, 0.12);
+ color: #e74c3c;
+ border-color: rgba(231, 76, 60, 0.35);
+}
+.dot {
+ width: 10px;
+ height: 10px;
+ border-radius: 50%;
+ background: currentColor;
+}
+.kebab {
+ position: absolute;
+ right: 4px;
+ top: 4px;
+}
+.kebab button {
+ background: none;
+ border: none;
+ color: var(--text);
+ font-size: 18px;
+ cursor: pointer;
+ padding: 4px 6px;
+ border-radius: 8px;
+ transition: background 0.15s ease;
+}
+.kebab button:hover { background: #1c2430; }
+.kebab-menu {
+ position: absolute;
+ top: 26px;
+ right: 0;
+ background: #121821;
+ border: 1px solid var(--border);
+ border-radius: 10px;
+ min-width: 170px;
+ box-shadow: var(--shadow);
+ display: none;
+ z-index: 10;
+}
+.kebab-menu.open { display: block; }
+.kebab-menu button {
+ width: 100%;
+ padding: 10px 12px;
+ background: transparent;
+ border: none;
+ color: var(--text);
+ text-align: left;
+ cursor: pointer;
+}
+.kebab-menu button:hover { background: #1c2430; }
+
+.card__body {
+ grid-area: body;
+ display: grid;
+ grid-template-columns: auto 1fr;
+ gap: 12px;
+ align-items: center;
+}
+.card__image {
+ width: 90px;
+ height: 90px;
+ border-radius: 12px;
+ background: linear-gradient(145deg, #1c2532, #0f141c);
+ display: grid;
+ place-items: center;
+ border: 1px solid var(--border);
+}
+.card__image img {
+ width: 70px;
+ height: 70px;
+ object-fit: cover;
+ border-radius: 10px;
+ filter: drop-shadow(0 8px 18px rgba(0,0,0,0.35));
+}
+.card__meta {
+ display: grid;
+ grid-template-columns: 1fr;
+ gap: 4px;
+ color: var(--text);
+ font-size: 0.95rem;
+}
+.card__meta .label { color: var(--text-dim); display: inline-block; min-width: 72px; }
+.card__meta .value { color: var(--text); font-weight: 700; }
+
+.temp {
+ display: flex;
+ gap: 6px;
+}
+.temp .label { color: var(--text-dim); font-size: 0.9rem; }
+.temp .value { color: var(--text); font-weight: 600; }
+
+.card__progress {
+ grid-area: progress;
+ display: flex;
+ align-items: center;
+ gap: 10px;
+}
+.progress-bar {
+ flex: 1;
+ height: 6px;
+ background: #0d1117;
+ border-radius: 999px;
+ overflow: hidden;
+ border: 1px solid var(--border);
+}
+.progress-bar__fill {
+ height: 100%;
+ width: 0%;
+ border-radius: inherit;
+ background: linear-gradient(90deg, #f39c12, #e67e22);
+ transition: width 0.25s ease;
+}
+.progress-value { color: var(--text-dim); font-weight: 700; }
+
+.card__foot {
+ grid-area: foot;
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ color: var(--text-dim);
+ font-size: 0.9rem;
+}
+.card__actions {
+ display: flex;
+ gap: 8px;
+}
+.pill {
+ border-radius: 999px;
+ padding: 6px 10px;
+ border: 1px solid var(--border);
+ color: var(--text-dim);
+ font-weight: 600;
+}
diff --git a/frontend/static/dashboard.css b/frontend/static/dashboard.css
new file mode 100644
index 0000000..3c87d6a
--- /dev/null
+++ b/frontend/static/dashboard.css
@@ -0,0 +1,417 @@
+/* FilamentHub Dashboard Styling */
+
+:root {
+ --bg-dark: #0a0a0a;
+ --bg-card: #1a1a1a;
+ --bg-card-hover: #222;
+ --border: #2a2a2a;
+ --text: #f0f0f0;
+ --text-dim: #888;
+ --accent: #4fc3f7;
+ --accent-hover: #29b6f6;
+ --success: #66bb6a;
+ --warning: #ffa726;
+ --error: #ef5350;
+ --gradient: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
+}
+
+* {
+ margin: 0;
+ padding: 0;
+ box-sizing: border-box;
+}
+
+html {
+ background: #0a0a0a;
+}
+
+body {
+ background: #0a0a0a;
+ color: var(--text);
+ font-family: 'Segoe UI', system-ui, -apple-system, sans-serif;
+ line-height: 1.6;
+ min-height: 100vh;
+}
+
+.container {
+ max-width: 1400px;
+ margin: 0 auto;
+ padding: 20px;
+}
+
+/* HEADER */
+.header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 20px 0;
+ margin-bottom: 30px;
+ border-bottom: 2px solid var(--border);
+}
+
+.header h1 {
+ font-size: 2rem;
+ background: var(--gradient);
+ -webkit-background-clip: text;
+ background-clip: text;
+ -webkit-text-fill-color: transparent;
+}
+
+.header-actions {
+ display: flex;
+ gap: 10px;
+}
+
+/* NAVIGATION */
+.nav {
+ display: flex;
+ gap: 10px;
+ margin-bottom: 30px;
+ flex-wrap: wrap;
+}
+
+.nav-btn {
+ padding: 12px 24px;
+ background: var(--bg-card);
+ border: 1px solid var(--border);
+ color: var(--text-dim);
+ cursor: pointer;
+ border-radius: 8px;
+ font-size: 1rem;
+ transition: all 0.3s ease;
+ text-decoration: none;
+ display: inline-block;
+}
+
+.nav-btn:hover {
+ background: var(--bg-card-hover);
+ color: var(--text);
+ border-color: var(--accent);
+}
+
+.nav-btn.active {
+ background: var(--accent);
+ color: white;
+ border-color: var(--accent);
+}
+
+/* CARDS */
+.card {
+ background: var(--bg-card);
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ padding: 20px;
+ transition: all 0.3s ease;
+}
+
+.card:hover {
+ border-color: var(--accent);
+ box-shadow: 0 4px 12px rgba(79, 195, 247, 0.1);
+}
+
+.card h3 {
+ margin-bottom: 15px;
+ color: var(--accent);
+ font-size: 1.2rem;
+}
+
+/* GRID */
+.grid {
+ display: grid;
+ gap: 20px;
+ margin-bottom: 30px;
+}
+
+/* Abstände zwischen Sektionen */
+.grid + .card,
+.card + .card,
+.card + .grid {
+ margin-top: 20px;
+}
+
+/* Quick actions spacing */
+.quick-actions .btn {
+ padding: 8px 14px;
+ font-size: 0.95rem;
+}
+
+.quick-actions .btn + .btn {
+ margin-top: 8px;
+}
+
+/* Untere 3er-Row gleich hoch */
+.row-equal {
+ align-items: stretch;
+}
+.row-equal > .card.equal-height {
+ height: 100%;
+}
+
+.grid-2col {
+ grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
+}
+
+.grid-3col {
+ grid-template-columns: repeat(auto-fit, minmax(280px, 1fr));
+ align-items: start;
+}
+
+.card.equal-height {
+ display: flex;
+ flex-direction: column;
+ height: auto;
+}
+
+.grid-3col > .card.equal-height {
+ height: auto;
+}
+
+.grid-4col {
+ grid-template-columns: repeat(auto-fit, minmax(220px, 1fr));
+}
+
+/* STATS */
+.stat-card {
+ text-align: center;
+ padding: 30px 20px;
+}
+
+.stat-value {
+ font-size: 3rem;
+ font-weight: 700;
+ color: var(--accent);
+ line-height: 1;
+ margin-bottom: 10px;
+}
+
+.stat-label {
+ color: var(--text-dim);
+ font-size: 0.95rem;
+ text-transform: uppercase;
+ letter-spacing: 1px;
+}
+
+.stat-icon {
+ font-size: 2rem;
+ margin-bottom: 10px;
+ opacity: 0.8;
+ width: 2.5rem;
+ height: 2.5rem;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ margin-left: auto;
+ margin-right: auto;
+ font-family: 'Segoe UI Emoji', 'Apple Color Emoji', 'Noto Color Emoji', sans-serif;
+ overflow: hidden;
+ line-height: 1;
+ vertical-align: middle;
+}
+
+/* BUTTONS */
+.btn {
+ padding: 10px 20px;
+ border: none;
+ border-radius: 6px;
+ cursor: pointer;
+ font-size: 1rem;
+ transition: all 0.3s ease;
+ display: inline-flex;
+ align-items: center;
+ gap: 8px;
+}
+
+.btn-primary {
+ background: var(--accent);
+ color: white;
+}
+
+.btn-primary:hover {
+ background: var(--accent-hover);
+ transform: translateY(-2px);
+}
+
+.btn-success {
+ background: var(--success);
+ color: white;
+}
+
+.btn-success:hover {
+ background: #57a85a;
+}
+
+.btn-secondary {
+ background: var(--bg-card);
+ color: var(--text);
+ border: 1px solid var(--border);
+}
+
+.btn-secondary:hover {
+ background: var(--bg-card-hover);
+ border-color: var(--accent);
+}
+
+/* TABLE */
+.table-container {
+ overflow-x: auto;
+ margin-top: 15px;
+}
+
+table {
+ width: 100%;
+ border-collapse: collapse;
+}
+
+thead {
+ background: var(--bg-dark);
+}
+
+th, td {
+ padding: 12px;
+ text-align: left;
+ border-bottom: 1px solid var(--border);
+}
+
+th {
+ color: var(--accent);
+ font-weight: 600;
+ text-transform: uppercase;
+ font-size: 0.85rem;
+ letter-spacing: 0.5px;
+}
+
+tbody tr {
+ transition: background 0.2s ease;
+}
+
+tbody tr:hover {
+ background: var(--bg-card-hover);
+}
+
+/* STATUS BADGE */
+.status-badge {
+ display: inline-block;
+ padding: 4px 12px;
+ border-radius: 12px;
+ font-size: 0.85rem;
+ font-weight: 600;
+}
+
+.status-online {
+ background: rgba(102, 187, 106, 0.2);
+ color: var(--success);
+}
+
+.status-offline {
+ background: rgba(136, 136, 136, 0.2);
+ color: var(--text-dim);
+}
+
+.status-printing {
+ background: rgba(79, 195, 247, 0.2);
+ color: var(--accent);
+}
+
+.status-error {
+ background: rgba(239, 83, 80, 0.2);
+ color: var(--error);
+}
+
+/* PROGRESS BAR */
+.progress-bar {
+ width: 100%;
+ height: 8px;
+ background: rgba(79, 195, 247, 0.1);
+ border-radius: 4px;
+ overflow: hidden;
+ margin-top: 8px;
+}
+
+.progress-fill {
+ height: 100%;
+ background: var(--accent);
+ border-radius: 4px;
+ transition: width 0.3s ease;
+}
+
+/* INFO GROUP */
+.info-group {
+ display: flex;
+ flex-direction: column;
+ gap: 10px;
+}
+
+.info-item {
+ display: flex;
+ justify-content: space-between;
+ padding: 8px 0;
+ border-bottom: 1px solid var(--border);
+}
+
+.info-item:last-child {
+ border-bottom: none;
+}
+
+.info-label {
+ color: var(--text-dim);
+ font-size: 0.9rem;
+}
+
+.info-value {
+ color: var(--text);
+ font-weight: 600;
+}
+
+/* LOADER */
+.loader {
+ text-align: center;
+ padding: 40px;
+ color: var(--text-dim);
+}
+
+.loader::after {
+ content: '...';
+ animation: dots 1.5s infinite;
+}
+
+@keyframes dots {
+ 0%, 20% { content: '.'; }
+ 40% { content: '..'; }
+ 60%, 100% { content: '...'; }
+}
+
+/* EMPTY STATE */
+.empty-state {
+ text-align: center;
+ padding: 20px 20px 30px 20px;
+ margin-top: 10px;
+ color: var(--text-dim);
+}
+
+.empty-state-icon {
+ font-size: 4rem;
+ margin-bottom: 20px;
+ opacity: 0.3;
+}
+
+.empty-state h3 {
+ color: var(--text);
+ margin-bottom: 10px;
+}
+
+/* RESPONSIVE */
+@media (max-width: 768px) {
+ .header {
+ flex-direction: column;
+ align-items: flex-start;
+ gap: 15px;
+ }
+
+ .grid {
+ grid-template-columns: 1fr;
+ }
+
+ .stat-value {
+ font-size: 2.5rem;
+ }
+}
diff --git a/frontend/static/dashboard.js b/frontend/static/dashboard.js
new file mode 100644
index 0000000..7ed0588
--- /dev/null
+++ b/frontend/static/dashboard.js
@@ -0,0 +1,275 @@
+// FilamentHub Dashboard JavaScript (clean icons + compact cards)
+
+// === STATE ===
+let dashboardData = {
+ stats: {},
+ materials: [],
+ spools: [],
+ printers: [],
+ recentJobs: []
+};
+
+// === INIT ===
+document.addEventListener('DOMContentLoaded', () => {
+ console.log('[Dashboard] DOMContentLoaded - starting data load');
+ loadDashboardData();
+ setupNotificationWebSocket();
+ // Auto-refresh every 10 seconds
+ setInterval(loadDashboardData, 10000);
+});
+
+// === NOTIFICATION WEBSOCKET ===
+function setupNotificationWebSocket() {
+ const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
+ const wsUrl = `${protocol}//${window.location.host}/api/notifications/ws`;
+
+ let ws = null;
+ let reconnectTimeout = null;
+
+ function connect() {
+ try {
+ ws = new WebSocket(wsUrl);
+
+ ws.onopen = () => {
+ console.log('[Notifications] WebSocket connected');
+ };
+
+ ws.onmessage = (event) => {
+ try {
+ const data = JSON.parse(event.data);
+ console.log('[Notifications] Received:', data);
+
+ // Backend sendet {event: "notification_trigger", payload: {...}}
+ if (data.event === 'notification_trigger' && data.payload) {
+ const notification = data.payload;
+ showNotification(
+ notification.message || 'Benachrichtigung',
+ notification.type || 'info'
+ );
+ }
+ } catch (err) {
+ console.error('[Notifications] Parse error:', err);
+ }
+ };
+
+ ws.onerror = (error) => {
+ console.error('[Notifications] WebSocket error:', error);
+ };
+
+ ws.onclose = () => {
+ console.log('[Notifications] WebSocket closed, reconnecting in 5s...');
+ if (reconnectTimeout) clearTimeout(reconnectTimeout);
+ reconnectTimeout = setTimeout(connect, 5000);
+ };
+ } catch (err) {
+ console.error('[Notifications] Connection error:', err);
+ if (reconnectTimeout) clearTimeout(reconnectTimeout);
+ reconnectTimeout = setTimeout(connect, 5000);
+ }
+ }
+
+ connect();
+}
+
+// === LOAD DATA ===
+async function loadDashboardData() {
+ try {
+ console.log('[Dashboard] loadDashboardData started');
+ // Alle Daten parallel laden für bessere Performance
+ const [materials, spools, printers] = await Promise.all([
+ fetch('/api/materials/').then(r => r.json()),
+ fetch('/api/spools/').then(r => r.json()),
+ fetch('/api/printers/').then(r => r.json())
+ ]);
+
+ dashboardData.materials = materials;
+ dashboardData.spools = spools;
+ dashboardData.printers = printers;
+
+ // Jetzt alle UI-Updates mit den Daten
+ updateStatsCards(materials, spools, printers);
+ renderMaterialsList(materials.slice(0, 5));
+ renderLowSpoolsList(spools.filter(s => {
+ if (s.is_empty) return false;
+ const remaining = s.weight_remaining || s.weight_full || 0;
+ return remaining < 200;
+ }).slice(0, 5));
+ renderPrintersList(printers);
+
+ console.log('[Dashboard] All data loaded successfully');
+ } catch (error) {
+ console.error('Fehler beim Laden der Dashboard-Daten:', error);
+ }
+}
+
+function updateStatsCards(materials, spools, printers) {
+ console.log('[Stats] Updating stat cards');
+
+ // Materials
+ const statMaterials = document.getElementById('statMaterials');
+ if (statMaterials) statMaterials.textContent = materials.length;
+
+ // Spools
+ const statSpools = document.getElementById('statSpools');
+ if (statSpools) statSpools.textContent = spools.length;
+
+ // Active Spools
+ const activeSpools = spools.filter(s => !s.is_empty);
+ const statActiveSpools = document.getElementById('statActiveSpools');
+ if (statActiveSpools) statActiveSpools.textContent = activeSpools.length;
+
+ // Total Weight
+ const totalWeight = spools.reduce((sum, s) => {
+ return sum + (s.weight_remaining || s.weight_full || 0);
+ }, 0);
+ const statTotalWeight = document.getElementById('statTotalWeight');
+ if (statTotalWeight) statTotalWeight.textContent = Math.round(totalWeight);
+
+ // Printers
+ const onlineCount = printers.filter(p => p.online).length;
+ const totalCount = printers.length;
+ const statPrinters = document.getElementById('statPrinters');
+ if (statPrinters) statPrinters.textContent = `${onlineCount} / ${totalCount}`;
+}
+
+function renderMaterialsList(materials) {
+ const container = document.getElementById('materialsList');
+ if (!container) return;
+
+ if (materials.length === 0) {
+ container.innerHTML = `
+
+
📦
+
Keine Materialien vorhanden
+
+ 📦 Material hinzufügen
+
+
+ `;
+ return;
+ }
+
+ container.innerHTML = `
+
+
+
+
+ Name
+ Marke
+ Farbe
+ Durchmesser
+
+
+
+ ${materials.map(m => `
+
+ ${m.name}
+ ${m.brand || '-'}
+
+ ${m.color ? `
+
+ ${m.color}
+ ` : '-'}
+
+ ${m.diameter}mm
+
+ `).join('')}
+
+
+
+ ${materials.length >= 5 ? `
+
+ ` : ''}
+ `;
+}
+
+function renderLowSpoolsList(spools) {
+ const container = document.getElementById('lowSpoolsList');
+ if (!container) return;
+
+ if (spools.length === 0) {
+ container.innerHTML = `
+
+ ✔️ Alle Spulen haben ausreichend Filament
+
+ `;
+ return;
+ }
+
+ container.innerHTML = `
+
+ ${spools.map(s => {
+ const remaining = s.weight_remaining || s.weight_full || 0;
+ const percentage = s.weight_full ? (remaining / s.weight_full) * 100 : 0;
+
+ return `
+
+
+ ${s.label || 'Spule #' + s.id.substring(0, 8)}
+ ${Math.round(remaining)}g
+
+
+
+ `;
+ }).join('')}
+
+ `;
+}
+
+function renderPrintersList(printers) {
+ const container = document.getElementById('printersList');
+ if (!container) return;
+ if (!printers || printers.length === 0) {
+ container.innerHTML = `🖨️
Keine Drucker konfiguriert
`;
+ return;
+ }
+ container.innerHTML = `${printers.map(p => `
+
+
${p.online ? 'Online ' : 'Offline '}
+
+
${p.printer_type === 'bambu' || p.printer_type === 'bambu_lab' ? '🎯' : p.printer_type === 'klipper' ? '🛠️' : '🖨️'}
+
${p.name}
+
+
Typ ${p.printer_type}
+
IP-Adresse ${p.ip_address}${p.port ? ':' + p.port : ''}
+
+
+
+ `).join('')}
`;
+}
+
+// === UTILITY ===
+function showNotification(message, type = 'info') {
+ const notification = document.createElement('div');
+ notification.className = `notification notification-${type}`;
+ notification.textContent = message;
+ notification.style.cssText = `
+ position: fixed;
+ top: 50%;
+ left: 50%;
+ transform: translate(-50%, -50%);
+ padding: 15px 25px;
+ background: var(--bg-card);
+ border: 1px solid var(--accent);
+ border-radius: 8px;
+ color: var(--text);
+ z-index: 10000;
+ box-shadow: 0 8px 32px rgba(0,0,0,0.4);
+ animation: slideIn 0.3s ease;
+ `;
+
+ document.body.appendChild(notification);
+
+ setTimeout(() => {
+ notification.style.animation = 'slideOut 0.3s ease';
+ setTimeout(() => notification.remove(), 300);
+ }, 3000);
+}
diff --git a/frontend/static/img/X1C.png b/frontend/static/img/X1C.png
new file mode 100644
index 0000000..e969da2
Binary files /dev/null and b/frontend/static/img/X1C.png differ
diff --git a/frontend/static/img/x1c.svg b/frontend/static/img/x1c.svg
new file mode 100644
index 0000000..7b9c840
--- /dev/null
+++ b/frontend/static/img/x1c.svg
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
diff --git a/frontend/static/jobs.css b/frontend/static/jobs.css
new file mode 100644
index 0000000..46d5800
--- /dev/null
+++ b/frontend/static/jobs.css
@@ -0,0 +1,484 @@
+/* Jobs Page Styles - Modern Design */
+
+/* Stats Grid */
+.stats-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
+ gap: 1.5rem;
+ margin-bottom: 2rem;
+}
+
+.stat-card {
+ background: linear-gradient(135deg, var(--panel) 0%, rgba(20, 26, 34, 0.8) 100%);
+ border: 1px solid var(--border);
+ border-radius: 12px;
+ padding: 1.5rem;
+ display: flex;
+ align-items: center;
+ gap: 1rem;
+ transition: all 0.3s ease;
+ box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
+}
+
+.stat-card:hover {
+ transform: translateY(-2px);
+ box-shadow: 0 6px 12px rgba(0, 0, 0, 0.15),
+ 0 0 20px rgba(243, 156, 18, 0.1);
+ border-color: var(--accent);
+}
+
+.stat-icon {
+ font-size: 2.5rem;
+ opacity: 0.9;
+}
+
+.stat-content {
+ flex: 1;
+}
+
+.stat-label {
+ font-size: 0.875rem;
+ color: var(--text-dim);
+ margin-bottom: 0.25rem;
+ text-transform: uppercase;
+ letter-spacing: 0.5px;
+}
+
+.stat-value {
+ font-size: 2rem;
+ font-weight: 700;
+ color: var(--text);
+ background: linear-gradient(135deg, var(--accent) 0%, #e67e22 100%);
+ -webkit-background-clip: text;
+ -webkit-text-fill-color: transparent;
+ background-clip: text;
+}
+
+/* Filter Section */
+.filter-section {
+ background: var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 12px;
+ padding: 1.5rem;
+ margin-bottom: 2rem;
+}
+
+.filter-group {
+ display: flex;
+ gap: 1rem;
+ flex-wrap: wrap;
+ align-items: center;
+ margin-bottom: 1rem;
+}
+
+.search-input {
+ flex: 1;
+ min-width: 200px;
+ padding: 0.75rem 1rem;
+ background: var(--bg);
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ color: var(--text);
+ font-size: 0.9375rem;
+ transition: all 0.2s ease;
+}
+
+.search-input:focus {
+ outline: none;
+ border-color: var(--accent);
+ box-shadow: 0 0 0 3px rgba(243, 156, 18, 0.1);
+}
+
+.filter-select {
+ padding: 0.75rem 2.5rem 0.75rem 1rem;
+ background: var(--bg);
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ color: var(--text);
+ font-size: 0.9375rem;
+ cursor: pointer;
+ transition: all 0.2s ease;
+ appearance: none;
+ background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 12 12'%3E%3Cpath fill='%23f39c12' d='M6 9L1 4h10z'/%3E%3C/svg%3E");
+ background-repeat: no-repeat;
+ background-position: right 1rem center;
+}
+
+.filter-select:focus {
+ outline: none;
+ border-color: var(--accent);
+ box-shadow: 0 0 0 3px rgba(243, 156, 18, 0.1);
+}
+
+.filter-select option {
+ background: var(--panel);
+ color: var(--text);
+}
+
+.result-count {
+ color: var(--text-dim);
+ font-size: 0.9375rem;
+}
+
+.result-count span {
+ color: var(--accent);
+ font-weight: 600;
+}
+
+/* Table Section */
+.table-section {
+ background: var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 12px;
+ overflow: hidden;
+ box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
+}
+
+.table-container {
+ overflow-x: auto;
+}
+
+table {
+ width: 100%;
+ border-collapse: collapse;
+}
+
+thead {
+ background: linear-gradient(135deg, rgba(243, 156, 18, 0.15) 0%, rgba(230, 126, 34, 0.1) 100%);
+ border-bottom: 2px solid var(--accent);
+}
+
+thead th {
+ padding: 1rem;
+ text-align: left;
+ font-weight: 600;
+ color: var(--accent);
+ text-transform: uppercase;
+ font-size: 0.8125rem;
+ letter-spacing: 0.5px;
+}
+
+tbody tr {
+ border-bottom: 1px solid var(--border);
+ transition: all 0.2s ease;
+}
+
+tbody tr:hover {
+ background: rgba(243, 156, 18, 0.05);
+ box-shadow: inset 0 0 0 1px rgba(243, 156, 18, 0.1);
+}
+
+tbody td {
+ padding: 0.875rem 1rem;
+ color: var(--text);
+}
+
+tbody td strong {
+ color: var(--text);
+}
+
+tbody td small {
+ font-size: 0.8125rem;
+ color: var(--text-dim);
+}
+
+/* Color Preview */
+.color-preview {
+ display: inline-block;
+ width: 20px;
+ height: 20px;
+ border-radius: 6px;
+ border: 2px solid var(--border);
+ box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2);
+}
+
+/* Status Badges */
+.status-badge {
+ display: inline-block;
+ padding: 0.25rem 0.75rem;
+ border-radius: 12px;
+ font-size: 0.75rem;
+ font-weight: 600;
+ text-transform: uppercase;
+ letter-spacing: 0.3px;
+}
+
+.status-badge.status-online {
+ background: rgba(46, 213, 115, 0.2);
+ color: var(--success);
+}
+
+.status-badge.status-printing {
+ background: rgba(243, 156, 18, 0.2);
+ color: var(--accent);
+}
+
+.status-badge.status-offline {
+ background: rgba(231, 76, 60, 0.2);
+ color: var(--error);
+}
+
+/* Table Actions */
+.table-actions {
+ display: flex;
+ gap: 0.5rem;
+}
+
+.btn-icon {
+ padding: 0.5rem 0.75rem;
+ background: transparent;
+ border: 1px solid var(--border);
+ border-radius: 6px;
+ color: var(--text);
+ cursor: pointer;
+ transition: all 0.2s ease;
+ font-size: 1rem;
+}
+
+.btn-icon:hover {
+ background: rgba(243, 156, 18, 0.1);
+ border-color: var(--accent);
+ transform: translateY(-2px);
+}
+
+.btn-icon.btn-delete:hover {
+ background: rgba(231, 76, 60, 0.1);
+ border-color: var(--error);
+ color: var(--error);
+}
+
+/* Buttons */
+.btn {
+ padding: 0.75rem 1.5rem;
+ border-radius: 8px;
+ font-weight: 600;
+ cursor: pointer;
+ transition: all 0.2s ease;
+ border: none;
+ font-size: 0.9375rem;
+}
+
+.btn-primary {
+ background: linear-gradient(135deg, var(--accent) 0%, #e67e22 100%);
+ color: #fff;
+}
+
+.btn-primary:hover {
+ transform: translateY(-2px);
+ box-shadow: 0 4px 12px rgba(243, 156, 18, 0.4);
+}
+
+.btn-secondary {
+ background: transparent;
+ border: 1px solid var(--border);
+ color: var(--text);
+}
+
+.btn-secondary:hover {
+ background: rgba(243, 156, 18, 0.1);
+ border-color: var(--accent);
+}
+
+/* Notification Toast */
+.notification {
+ position: fixed;
+ bottom: 2rem;
+ right: 2rem;
+ padding: 1rem 1.5rem;
+ background: var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ color: var(--text);
+ box-shadow: 0 4px 12px rgba(0, 0, 0, 0.3);
+ opacity: 0;
+ transform: translateY(20px);
+ transition: all 0.3s ease;
+ z-index: 2000;
+}
+
+.notification.show {
+ opacity: 1;
+ transform: translateY(0);
+}
+
+.notification-success {
+ border-left: 4px solid var(--success);
+}
+
+.notification-error {
+ border-left: 4px solid var(--error);
+}
+
+.notification-warning {
+ border-left: 4px solid var(--warning);
+}
+
+.notification-info {
+ border-left: 4px solid var(--accent);
+}
+
+/* Modal Styles */
+.modal {
+ position: fixed;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 100%;
+ background: rgba(0, 0, 0, 0.5);
+ display: none;
+ align-items: center;
+ justify-content: center;
+ z-index: 3000;
+ animation: fadeIn 0.2s ease;
+}
+
+.modal.show {
+ display: flex;
+}
+
+@keyframes fadeIn {
+ from {
+ opacity: 0;
+ }
+ to {
+ opacity: 1;
+ }
+}
+
+.modal-content {
+ background: var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 12px;
+ width: 90%;
+ max-width: 600px;
+ max-height: 90vh;
+ overflow-y: auto;
+ animation: slideUp 0.3s ease;
+ box-shadow: 0 10px 40px rgba(0, 0, 0, 0.3);
+}
+
+.modal-content.modal-small {
+ max-width: 400px;
+}
+
+@keyframes slideUp {
+ from {
+ transform: translateY(20px);
+ opacity: 0;
+ }
+ to {
+ transform: translateY(0);
+ opacity: 1;
+ }
+}
+
+.modal-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 1.5rem;
+ border-bottom: 1px solid var(--border);
+}
+
+.modal-header h2 {
+ margin: 0;
+ color: var(--text);
+ font-size: 1.5rem;
+}
+
+.modal-close {
+ background: transparent;
+ border: none;
+ color: var(--text-dim);
+ font-size: 1.5rem;
+ cursor: pointer;
+ transition: all 0.2s ease;
+}
+
+.modal-close:hover {
+ color: var(--accent);
+}
+
+/* Form Styles */
+.modal-content form {
+ padding: 1.5rem;
+}
+
+.form-group {
+ margin-bottom: 1.5rem;
+}
+
+.form-group label {
+ display: block;
+ margin-bottom: 0.5rem;
+ color: var(--text);
+ font-weight: 600;
+ font-size: 0.9375rem;
+}
+
+.form-group input,
+.form-group select {
+ width: 100%;
+ padding: 0.75rem 1rem;
+ background: var(--bg);
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ color: var(--text);
+ font-size: 0.9375rem;
+ font-family: inherit;
+ transition: all 0.2s ease;
+}
+
+.form-group input:focus,
+.form-group select:focus {
+ outline: none;
+ border-color: var(--accent);
+ box-shadow: 0 0 0 3px rgba(243, 156, 18, 0.1);
+}
+
+.form-group input::placeholder {
+ color: var(--text-dim);
+}
+
+.form-group select {
+ appearance: none;
+ background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 12 12'%3E%3Cpath fill='%23f39c12' d='M6 9L1 4h10z'/%3E%3C/svg%3E");
+ background-repeat: no-repeat;
+ background-position: right 1rem center;
+ padding-right: 2.5rem;
+}
+
+.form-group select option {
+ background: var(--panel);
+ color: var(--text);
+}
+
+.form-actions {
+ display: flex;
+ gap: 1rem;
+ justify-content: flex-end;
+ padding-top: 1.5rem;
+ border-top: 1px solid var(--border);
+}
+
+.form-actions .btn {
+ flex: 0 0 auto;
+}
+
+.btn-danger {
+ background: linear-gradient(135deg, var(--error) 0%, #c0392b 100%) !important;
+ color: #fff !important;
+}
+
+.btn-danger:hover {
+ box-shadow: 0 4px 12px rgba(231, 76, 60, 0.4) !important;
+}
+
+/* Modal text */
+.modal-content p {
+ color: var(--text);
+ margin: 1rem 0;
+ line-height: 1.6;
+}
+
+
diff --git a/frontend/static/jobs.js b/frontend/static/jobs.js
new file mode 100644
index 0000000..8bbd5ba
--- /dev/null
+++ b/frontend/static/jobs.js
@@ -0,0 +1,614 @@
+// Global variables
+let jobs = [];
+let printers = [];
+let spools = [];
+let materials = [];
+let currentJobId = null;
+let deleteJobId = null;
+let overrideJobId = null;
+
+// Load data on page load
+document.addEventListener('DOMContentLoaded', async () => {
+ // Stammdaten ERST laden, dann Jobs rendern, damit IDs aufgelöst werden
+ try {
+ await loadMaterials();
+ await loadPrinters();
+ await loadSpools();
+ await loadJobs();
+ await loadStats();
+ } catch (e) {
+ console.error('Init error', e);
+ }
+
+ // Set default start time to now
+ const now = new Date();
+ now.setMinutes(now.getMinutes() - now.getTimezoneOffset());
+ document.getElementById('startedAt').value = now.toISOString().slice(0, 16);
+
+ // Search and filter
+ document.getElementById('searchInput').addEventListener('input', filterJobs);
+ document.getElementById('filterPrinter').addEventListener('change', filterJobs);
+ document.getElementById('filterStatus').addEventListener('change', filterJobs);
+
+ // Spulen-Suche (Job-Formular)
+ const spoolSearch = document.getElementById('spoolSearch');
+ if (spoolSearch) {
+ spoolSearch.addEventListener('input', filterSpoolList);
+ }
+
+ // Spulen-Suche (Manual Usage Modal)
+ const usageSpoolSearch = document.getElementById('usageSpoolSearch');
+ if (usageSpoolSearch) {
+ usageSpoolSearch.addEventListener('input', filterUsageSpoolList);
+ }
+
+ // Auto-refresh alle 15 Sekunden
+ setInterval(async () => {
+ await loadJobs();
+ await loadStats();
+ }, 15000);
+});
+
+async function loadJobs() {
+ try {
+ const response = await fetch('/api/jobs/with-usage');
+ jobs = await response.json();
+ renderJobs(jobs);
+ } catch (error) {
+ console.error('Fehler beim Laden der Jobs:', error);
+ }
+}
+
+async function loadPrinters() {
+ try {
+ const response = await fetch('/api/printers/');
+ printers = await response.json();
+
+ // Populate printer selects
+ const printerSelect = document.getElementById('jobPrinter');
+ const filterSelect = document.getElementById('filterPrinter');
+
+ printers.forEach(printer => {
+ const option = `${printer.name} (${printer.printer_type || printer.type || '-'}) `;
+ printerSelect.innerHTML += option;
+ filterSelect.innerHTML += option;
+ });
+ } catch (error) {
+ console.error('Fehler beim Laden der Drucker:', error);
+ }
+}
+
+async function loadMaterials() {
+ try {
+ const response = await fetch('/api/materials/');
+ materials = await response.json();
+ } catch (error) {
+ console.error('Fehler beim Laden der Materialien:', error);
+ }
+}
+
+async function loadSpools() {
+ try {
+ const response = await fetch('/api/spools/');
+ spools = await response.json();
+ renderSpoolList(spools);
+ } catch (error) {
+ console.error('Fehler beim Laden der Spulen:', error);
+ }
+}
+
+function renderSpoolList(spoolsToRender) {
+ const spoolSelect = document.getElementById('jobSpool');
+ const usageSpoolSelect = document.getElementById('usageSpool');
+
+ // Reset
+ spoolSelect.innerHTML = '-- Keine Spule -- ';
+ if (usageSpoolSelect) {
+ usageSpoolSelect.innerHTML = '-- Spule wählen -- ';
+ }
+
+ spoolsToRender.forEach(spool => {
+ // Material-Info holen
+ const material = materials.find(m => m.id === spool.material_id);
+ const materialName = material ? material.name : 'Unbekannt';
+ const materialColor = material ? material.color : null;
+
+ // Farbe extrahieren (Material-Farbe oder Tray-Farbe)
+ let colorName = '';
+ if (materialColor) {
+ // Verwende Material-Farbe
+ colorName = materialColor.replace('#', '').toUpperCase();
+ } else if (spool.tray_color) {
+ // Verwende Tray-Farbe (Bambu Lab)
+ colorName = spool.tray_color.substring(0, 6).toUpperCase();
+ }
+
+ // Spulen-Nummer oder "RFID"
+ const spoolNumber = spool.spool_number ? `#${spool.spool_number}` : (spool.tray_uuid ? '📡 RFID' : '');
+
+ // Restgewicht mit Format: "332.50g / 1000g"
+ // API liefert weight_current als "weight" (serialization_alias in spool.py:178)
+ const weightCurrent = spool.weight || 0;
+ const weightFull = spool.weight_full || 1000;
+ const weight = `${weightCurrent.toFixed(2)}g / ${weightFull}g`;
+
+ // Label erstellen: "PLA Basic BLAU | #5 | 332.50g / 1000g"
+ const parts = [materialName];
+ if (colorName) parts.push(colorName);
+ if (spoolNumber) parts.push(`| ${spoolNumber}`);
+ parts.push(`| ${weight}`);
+
+ const displayText = parts.join(' ');
+
+ const option = `${displayText} `;
+ spoolSelect.innerHTML += option;
+
+ if (usageSpoolSelect) {
+ usageSpoolSelect.innerHTML += option;
+ }
+ });
+}
+
+function filterSpoolList() {
+ const searchTerm = document.getElementById('spoolSearch').value.toLowerCase();
+
+ if (!searchTerm) {
+ renderSpoolList(spools);
+ return;
+ }
+
+ const filtered = spools.filter(spool => {
+ const material = materials.find(m => m.id === spool.material_id);
+ const materialName = material ? material.name.toLowerCase() : '';
+ const brand = material && material.brand ? material.brand.toLowerCase() : '';
+ const spoolNumber = spool.spool_number ? spool.spool_number.toString() : '';
+ const label = spool.label ? spool.label.toLowerCase() : '';
+
+ return materialName.includes(searchTerm) ||
+ brand.includes(searchTerm) ||
+ spoolNumber.includes(searchTerm) ||
+ label.includes(searchTerm);
+ });
+
+ renderSpoolList(filtered);
+}
+
+function filterUsageSpoolList() {
+ const searchTerm = document.getElementById('usageSpoolSearch').value.toLowerCase();
+
+ if (!searchTerm) {
+ renderSpoolList(spools);
+ return;
+ }
+
+ const filtered = spools.filter(spool => {
+ const material = materials.find(m => m.id === spool.material_id);
+ const materialName = material ? material.name.toLowerCase() : '';
+ const brand = material && material.brand ? material.brand.toLowerCase() : '';
+ const spoolNumber = spool.spool_number ? spool.spool_number.toString() : '';
+ const label = spool.label ? spool.label.toLowerCase() : '';
+
+ return materialName.includes(searchTerm) ||
+ brand.includes(searchTerm) ||
+ spoolNumber.includes(searchTerm) ||
+ label.includes(searchTerm);
+ });
+
+ renderSpoolList(filtered);
+}
+
+async function loadStats() {
+ try {
+ const response = await fetch('/api/jobs/stats/summary');
+ const stats = await response.json();
+
+ document.getElementById('totalJobs').textContent = stats.total_jobs;
+ document.getElementById('completedJobs').textContent = stats.completed_jobs;
+ document.getElementById('activeJobs').textContent = stats.active_jobs;
+ document.getElementById('totalFilament').textContent = stats.total_filament_g + 'g';
+ } catch (error) {
+ console.error('Fehler beim Laden der Statistiken:', error);
+ }
+}
+
+function renderJobs(jobsList) {
+ const tbody = document.getElementById('jobsTable');
+ tbody.innerHTML = '';
+
+ // Update count
+ document.getElementById('jobCount').textContent = jobsList.length;
+
+ if (jobsList.length === 0) {
+ tbody.innerHTML = 'Keine Druckaufträge vorhanden ';
+ return;
+ }
+
+ jobsList.forEach(job => {
+ const printer = printers.find(p => p.id === job.printer_id);
+ const primarySpool = spools.find(s => s.id === job.spool_id);
+
+ // Prüfe ob Job Tracking braucht (kein Verbrauch und keine Spule)
+ const needsTracking = (!job.spool_id || job.filament_used_g === 0 || job.filament_used_mm === 0) && job.finished_at;
+
+ // Status-Badge basierend auf job.status
+ let status;
+ const jobStatus = (job.status || 'running').toLowerCase();
+
+ if (jobStatus === 'completed') {
+ status = '✓ Abgeschlossen ';
+ } else if (jobStatus === 'running' || jobStatus === 'printing') {
+ status = '▶ Aktiv ';
+ } else if (jobStatus === 'failed' || jobStatus === 'error' || jobStatus === 'exception') {
+ status = '✗ Fehlgeschlagen ';
+ } else if (jobStatus === 'cancelled' || jobStatus === 'canceled') {
+ status = '⊗ Abgebrochen ';
+ } else if (jobStatus === 'aborted' || jobStatus === 'stopped') {
+ status = '⊘ Gestoppt ';
+ } else {
+ // Fallback für unbekannte Status
+ status = `${jobStatus} `;
+ }
+
+ const verbrauch = needsTracking ?
+ '⚠️ 0g ' :
+ `${job.filament_used_g.toFixed(1)}g ${(job.filament_used_mm / 1000).toFixed(2)}m `;
+
+ // Berechne Dauer
+ const start = new Date(job.started_at);
+ const end = job.finished_at ? new Date(job.finished_at) : new Date();
+ const durationMs = end - start;
+ const durationMin = Math.floor(durationMs / 60000);
+ const hours = Math.floor(durationMin / 60);
+ const minutes = durationMin % 60;
+ const durationText = hours > 0 ? `${hours}h ${minutes}m` : `${minutes}m`;
+
+ const spoolDisplay = primarySpool ?
+ `
+ ${primarySpool.tray_color ? ` ` : ''}
+ ${primarySpool.label || `Spule ${primarySpool.id.substring(0,6)}`}
+
` :
+ (needsTracking ? '⚠️ Keine ' : '- ');
+
+ // Aktionen: "Verbrauch nachtragen" Button wenn nötig
+ const actions = needsTracking ?
+ `
+
+ 📝 Nachtragen
+
+ 🗑️
+
` :
+ `
+ ✏️
+ 🗑️
+
`;
+
+ const rowClass = needsTracking ? ' style="background: var(--warning-bg, #fff3cd);"' : '';
+
+ const row = `
+
+ ${needsTracking ? '⚠️ ' : ''}${job.name}
+ ${printer ? printer.name : 'Unbekannt '}
+ ${spoolDisplay}
+ ${verbrauch}
+ ${status}
+ ${durationText}
+
+ ${actions}
+
+
+ `;
+ tbody.innerHTML += row;
+ });
+}
+
+function filterJobs() {
+ const search = document.getElementById('searchInput').value.toLowerCase();
+ const printerFilter = document.getElementById('filterPrinter').value;
+ const statusFilter = document.getElementById('filterStatus').value;
+
+ const filtered = jobs.filter(job => {
+ const matchSearch = job.name.toLowerCase().includes(search);
+ const matchPrinter = !printerFilter || job.printer_id === printerFilter;
+
+ // Erweiterter Status-Filter mit "no-tracking"
+ let matchStatus = !statusFilter;
+ if (statusFilter === 'active') {
+ matchStatus = !job.finished_at;
+ } else if (statusFilter === 'completed') {
+ matchStatus = job.finished_at;
+ } else if (statusFilter === 'no-tracking') {
+ // Jobs ohne Tracking: kein Verbrauch ODER keine Spule UND abgeschlossen
+ matchStatus = (!job.spool_id || job.filament_used_g === 0 || job.filament_used_mm === 0) && job.finished_at;
+ }
+
+ return matchSearch && matchPrinter && matchStatus;
+ });
+
+ renderJobs(filtered);
+}
+
+function clearFilters() {
+ document.getElementById('searchInput').value = '';
+ document.getElementById('filterPrinter').value = '';
+ document.getElementById('filterStatus').value = '';
+ filterJobs();
+}
+
+function openAddModal() {
+ currentJobId = null;
+ document.getElementById('modalTitle').textContent = 'Neuer Druckauftrag';
+ document.getElementById('jobName').value = '';
+ document.getElementById('jobPrinter').value = '';
+ document.getElementById('jobSpool').value = '';
+ document.getElementById('filamentUsedMm').value = '0';
+ document.getElementById('filamentUsedG').value = '0';
+ document.getElementById('finishedAt').value = '';
+
+ // Bei neuen Jobs sind Verbrauchsfelder immer editierbar
+ const mmField = document.getElementById('filamentUsedMm');
+ const gField = document.getElementById('filamentUsedG');
+ mmField.readOnly = false;
+ gField.readOnly = false;
+ mmField.style.backgroundColor = '';
+ gField.style.backgroundColor = '';
+ mmField.title = '';
+ gField.title = '';
+
+ const now = new Date();
+ now.setMinutes(now.getMinutes() - now.getTimezoneOffset());
+ document.getElementById('startedAt').value = now.toISOString().slice(0, 16);
+
+ document.getElementById('jobModal').style.display = 'flex';
+}
+
+function editJob(id) {
+ const job = jobs.find(j => j.id === id);
+ if (!job) return;
+
+ currentJobId = id;
+ document.getElementById('modalTitle').textContent = 'Job bearbeiten';
+ document.getElementById('jobName').value = job.name;
+ document.getElementById('jobPrinter').value = job.printer_id;
+ document.getElementById('jobSpool').value = job.spool_id || '';
+ document.getElementById('filamentUsedMm').value = job.filament_used_mm;
+ document.getElementById('filamentUsedG').value = job.filament_used_g;
+
+ // Verbrauchsfelder: Nur bei manuellen Jobs editierbar
+ // MQTT-getrackte Jobs haben automatischen Verbrauch -> readonly
+ const hasTracking = job.filament_used_g > 0 || job.filament_used_mm > 0;
+ const mmField = document.getElementById('filamentUsedMm');
+ const gField = document.getElementById('filamentUsedG');
+
+ if (hasTracking) {
+ mmField.readOnly = true;
+ gField.readOnly = true;
+ mmField.style.backgroundColor = 'var(--bg-secondary, #f5f5f5)';
+ gField.style.backgroundColor = 'var(--bg-secondary, #f5f5f5)';
+ mmField.title = 'Automatisch getrackt (nicht editierbar)';
+ gField.title = 'Automatisch getrackt (nicht editierbar)';
+ } else {
+ mmField.readOnly = false;
+ gField.readOnly = false;
+ mmField.style.backgroundColor = '';
+ gField.style.backgroundColor = '';
+ mmField.title = '';
+ gField.title = '';
+ }
+
+ const startDate = new Date(job.started_at);
+ startDate.setMinutes(startDate.getMinutes() - startDate.getTimezoneOffset());
+ document.getElementById('startedAt').value = startDate.toISOString().slice(0, 16);
+
+ if (job.finished_at) {
+ const endDate = new Date(job.finished_at);
+ endDate.setMinutes(endDate.getMinutes() - endDate.getTimezoneOffset());
+ document.getElementById('finishedAt').value = endDate.toISOString().slice(0, 16);
+ } else {
+ document.getElementById('finishedAt').value = '';
+ }
+
+ document.getElementById('jobModal').style.display = 'flex';
+}
+
+function closeAddModal() {
+ document.getElementById('jobModal').style.display = 'none';
+ currentJobId = null;
+}
+
+function closeDeleteModal() {
+ document.getElementById('deleteModal').style.display = 'none';
+ deleteJobId = null;
+}
+
+async function saveJob(event) {
+ event.preventDefault();
+
+ const name = document.getElementById('jobName').value.trim();
+ const printer_id = document.getElementById('jobPrinter').value;
+ const spool_id = document.getElementById('jobSpool').value || null;
+ const filament_used_mm = parseFloat(document.getElementById('filamentUsedMm').value) || 0;
+ const filament_used_g = parseFloat(document.getElementById('filamentUsedG').value) || 0;
+ const started_at = document.getElementById('startedAt').value;
+ const finished_at = document.getElementById('finishedAt').value || null;
+
+ if (!name || !printer_id) {
+ alert('Bitte füllen Sie alle Pflichtfelder aus!');
+ return;
+ }
+
+ const jobData = {
+ name,
+ printer_id,
+ spool_id,
+ filament_used_mm,
+ filament_used_g,
+ started_at: started_at ? new Date(started_at).toISOString() : new Date().toISOString(),
+ finished_at: finished_at ? new Date(finished_at).toISOString() : null
+ };
+
+ try {
+ const url = currentJobId ? `/api/jobs/${currentJobId}` : '/api/jobs/';
+ const method = currentJobId ? 'PUT' : 'POST';
+
+ const response = await fetch(url, {
+ method,
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify(jobData)
+ });
+
+ if (response.ok) {
+ closeAddModal();
+ clearFilters();
+ await loadJobs();
+ await loadStats();
+ showNotification(currentJobId ? 'Job aktualisiert!' : 'Job erstellt!', 'success');
+ } else {
+ alert('Fehler beim Speichern des Jobs');
+ }
+ } catch (error) {
+ console.error('Fehler:', error);
+ alert('Fehler beim Speichern');
+ }
+}
+
+function deleteJob(id) {
+ deleteJobId = id;
+ document.getElementById('deleteModal').style.display = 'flex';
+}
+
+async function confirmDelete() {
+ if (!deleteJobId) return;
+
+ try {
+ const response = await fetch(`/api/jobs/${deleteJobId}`, {
+ method: 'DELETE'
+ });
+
+ if (response.ok) {
+ closeDeleteModal();
+ clearFilters();
+ await loadJobs();
+ await loadStats();
+ showNotification('Job gelöscht!', 'success');
+ } else {
+ alert('Fehler beim Löschen des Jobs');
+ }
+ } catch (error) {
+ console.error('Fehler:', error);
+ alert('Fehler beim Löschen');
+ }
+}
+
+// ===== Manual Usage Modal =====
+let manualUsageJobId = null;
+
+function openManualUsageModal(jobId) {
+ manualUsageJobId = jobId;
+ const job = jobs.find(j => j.id === jobId);
+
+ if (!job) {
+ alert('Job nicht gefunden');
+ return;
+ }
+
+ // Job-Name anzeigen
+ document.getElementById('usageJobName').textContent = job.name;
+
+ // Spulen-Dropdown befüllen (nur verfügbare Spulen)
+ const spoolSelect = document.getElementById('usageSpool');
+ spoolSelect.innerHTML = '-- Spule wählen -- ';
+
+ spools.forEach(spool => {
+ // Filtere nur Spulen die verfügbar sind (nicht leer, nicht im AMS eines anderen Druckers)
+ const isAvailable = !spool.is_empty;
+ if (isAvailable) {
+ const name = spool.label || `#${spool.spool_number || spool.id.substring(0, 6)}`;
+ const vendor = spool.vendor || '';
+ const color = spool.tray_color ? ` (${spool.tray_color.substring(0, 6)})` : '';
+ const displayName = vendor ? `${name} - ${vendor}${color}` : `${name}${color}`;
+ spoolSelect.innerHTML += `${displayName} `;
+ }
+ });
+
+ // Felder zurücksetzen
+ document.getElementById('usageGrams').value = '';
+ document.getElementById('usageMm').value = '';
+
+ // Modal öffnen
+ document.getElementById('manualUsageModal').style.display = 'flex';
+}
+
+function closeManualUsageModal() {
+ document.getElementById('manualUsageModal').style.display = 'none';
+ manualUsageJobId = null;
+}
+
+async function saveManualUsage(event) {
+ event.preventDefault();
+
+ const spool_id = document.getElementById('usageSpool').value;
+ const used_g = parseFloat(document.getElementById('usageGrams').value);
+ const usageMmMeters = parseFloat(document.getElementById('usageMm').value) || 0;
+ const used_mm = usageMmMeters > 0 ? usageMmMeters * 1000 : null; // Meter → mm
+
+ if (!spool_id) {
+ alert('Bitte wähle eine Spule aus!');
+ return;
+ }
+
+ if (!used_g && !used_mm) {
+ alert('Bitte gib den Verbrauch in Gramm oder Meter an!');
+ return;
+ }
+
+ const payload = {
+ spool_id,
+ used_g: used_g || null,
+ used_mm: used_mm || null
+ };
+
+ try {
+ const response = await fetch(`/api/jobs/${manualUsageJobId}/manual-usage`, {
+ method: 'PATCH',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify(payload)
+ });
+
+ if (response.ok) {
+ closeManualUsageModal();
+ clearFilters();
+ await loadJobs();
+ await loadStats();
+ await loadSpools(); // Spulen neu laden (Gewicht hat sich geändert)
+ showNotification('Verbrauch erfolgreich nachgetragen!', 'success');
+ } else {
+ const error = await response.json();
+ alert(`Fehler: ${error.detail || 'Verbrauch konnte nicht gespeichert werden'}`);
+ }
+ } catch (error) {
+ console.error('Fehler:', error);
+ alert('Fehler beim Speichern des Verbrauchs');
+ }
+}
+
+
+// Close modal on ESC or background click
+document.addEventListener('keydown', (e) => {
+ if (e.key === 'Escape') {
+ closeAddModal();
+ closeDeleteModal();
+ closeManualUsageModal();
+ }
+});
+
+document.getElementById('jobModal')?.addEventListener('click', (e) => {
+ if (e.target.id === 'jobModal') closeAddModal();
+});
+
+document.getElementById('deleteModal')?.addEventListener('click', (e) => {
+ if (e.target.id === 'deleteModal') closeDeleteModal();
+});
+
+document.getElementById('manualUsageModal')?.addEventListener('click', (e) => {
+ if (e.target.id === 'manualUsageModal') closeManualUsageModal();
+});
diff --git a/frontend/static/js/activePrintCard.js b/frontend/static/js/activePrintCard.js
new file mode 100644
index 0000000..29f45ed
--- /dev/null
+++ b/frontend/static/js/activePrintCard.js
@@ -0,0 +1,128 @@
+// Vanilla renderer for Active Print Card
+// Exposes global function: renderActiveJobs(container, activePrinters)
+(function () {
+ function pad(n) { return n < 10 ? '0' + n : '' + n; }
+
+ function formatTime(minutes) {
+ if (minutes == null) return '-';
+ minutes = Math.max(0, Math.round(minutes));
+ const h = Math.floor(minutes / 60);
+ const m = minutes % 60;
+ return h > 0 ? `${h}h ${m}m` : `${m}m`;
+ }
+
+ function getStartTime(progressPercent, timeRemainingMin) {
+ try {
+ const now = new Date();
+ const elapsed = (progressPercent / 100) * ((timeRemainingMin || 0) + (progressPercent?0:0));
+ // If no reliable total, fallback to now
+ const start = new Date(now.getTime() - (elapsed * 60000));
+ return start.toLocaleTimeString('de-DE', { hour: '2-digit', minute: '2-digit' });
+ } catch (e) { return '-'; }
+ }
+
+ function getEndTime(timeRemainingMin) {
+ try {
+ const now = new Date();
+ const end = new Date(now.getTime() + (Math.max(0, Math.round(timeRemainingMin || 0)) * 60000));
+ return end.toLocaleTimeString('de-DE', { hour: '2-digit', minute: '2-digit' });
+ } catch (e) { return '-'; }
+ }
+
+ function toPercent(v) {
+ if (v == null) return 0;
+ const n = Number(v);
+ if (isNaN(n)) return 0;
+ return Math.max(0, Math.min(100, Math.round(n)));
+ }
+
+ function renderCardForPrinter(printer) {
+ const rawGcode = printer.live?.gcode_file || printer.live?.file || printer.live?.job_name || '';
+ const gcodeBase = rawGcode && rawGcode.includes('/') ? rawGcode.split('/').pop() : rawGcode;
+ const jobName = printer.live?.subtask_name || printer.live?.job_name || gcodeBase || 'Unbekannter Job';
+ const printerName = printer.name || printer.cloud_serial || 'Unbekannter Drucker';
+ const progress = toPercent(printer.live?.percent ?? printer.live?.progress ?? printer.live?.progress_percent);
+
+ // time remaining: try common fields (seconds/minutes)
+ let timeRemainingMin = null;
+ const remSec = printer.live?.mc_remaining_time || printer.live?.remaining_time || printer.live?.remain_time || printer.live?.mc_remaining_seconds;
+ if (remSec != null && Number(remSec) !== 0) {
+ // assume seconds
+ timeRemainingMin = Math.round(Number(remSec) / 60);
+ } else if (printer.live?.time_remaining_min != null) {
+ timeRemainingMin = Number(printer.live.time_remaining_min);
+ }
+
+ // Fallback: if not present, try to estimate via percent (unknown total -> leave '-')
+ const timeText = formatTime(timeRemainingMin);
+ const startText = getStartTime(progress, timeRemainingMin);
+ const endText = getEndTime(timeRemainingMin);
+
+ return `
+
+
+
+
+
+
${100 - progress}%
+
left
+
+
+
${timeText}
+
+
+
+
+ `;
+ }
+
+ // basic HTML escaper
+ function escapeHtml(str) {
+ if (str == null) return '';
+ return String(str).replace(/[&<>\"]/g, function (c) { return {'&':'&','<':'<','>':'>','"':'"'}[c]; });
+ }
+
+ window.renderActiveJobs = function (container, activePrinters) {
+ if (!container) return;
+ if (!Array.isArray(activePrinters) || activePrinters.length === 0) {
+ container.innerHTML = 'Keine aktiven Drucke
';
+ return;
+ }
+ // Render first active print as large card, others as smaller job-cards
+ const html = activePrinters.map((p, idx) => {
+ if (idx === 0) return renderCardForPrinter(p);
+ // fallback smaller card for additional printers
+ const name = escapeHtml(p.live?.job_name || p.current_job_name || 'Unbekannter Job');
+ const pname = escapeHtml(p.name || p.cloud_serial || 'Unbekannter Drucker');
+ const prog = toPercent(p.live?.percent ?? p.progress ?? 0);
+ return `
+
+ `;
+ }).join('');
+ container.innerHTML = html;
+ };
+
+})();
diff --git a/frontend/static/js/admin_notifications.js b/frontend/static/js/admin_notifications.js
new file mode 100644
index 0000000..46430f9
--- /dev/null
+++ b/frontend/static/js/admin_notifications.js
@@ -0,0 +1,397 @@
+let notificationsConfig = [];
+let selectedNotificationId = null;
+
+async function loadNotifications() {
+ try {
+ const res = await fetch("/api/notifications-config");
+ const data = await res.json();
+ notificationsConfig = data.notifications || [];
+ if (!selectedNotificationId && notificationsConfig.length) {
+ selectedNotificationId = notificationsConfig[0].id;
+ }
+ renderList();
+ const current = notificationsConfig.find((n) => n.id === selectedNotificationId) || notificationsConfig[0];
+ if (current) fillForm(current);
+ } catch (err) {
+ console.error("Konnte Notifications nicht laden", err);
+ }
+}
+
+function fillForm(notification) {
+ document.getElementById("notif-id").value = notification.id || "";
+ document.getElementById("notif-label").value = notification.label || "";
+ document.getElementById("notif-message").value = notification.message || "";
+ document.getElementById("notif-type").value = notification.type || "info";
+ document.getElementById("notif-trigger").value = notification.trigger?.type || "manual";
+ document.getElementById("notif-enabled").checked = notification.enabled !== false;
+ document.getElementById("notif-persistent").checked = Boolean(notification.persistent);
+
+ // Webhook fields
+ document.getElementById("notif-webhook-url").value = notification.webhook?.url || "";
+ document.getElementById("notif-webhook-type").value = notification.webhook?.type || "";
+ document.getElementById("notif-webhook-username").value = notification.webhook?.username || "";
+
+ // Update trigger section visibility
+ updateTriggerSection();
+
+ // Trigger conditions based on trigger type
+ const triggerType = notification.trigger?.type;
+ if (triggerType && notification.trigger?.condition) {
+ const condition = notification.trigger.condition;
+
+ if (triggerType === 'temperature') {
+ document.getElementById("temp-operator").value = condition.operator || ">";
+ document.getElementById("temp-value").value = condition.value || "";
+ } else if (triggerType === 'humidity') {
+ document.getElementById("humidity-operator").value = condition.operator || ">";
+ document.getElementById("humidity-value").value = condition.value || "";
+ } else if (triggerType === 'print_time') {
+ document.getElementById("printtime-operator").value = condition.operator || ">";
+ document.getElementById("printtime-value").value = condition.value || "";
+ } else if (triggerType === 'filament_weight') {
+ document.getElementById("filament-operator").value = condition.operator || "<";
+ document.getElementById("filament-value").value = condition.value || "";
+ } else if (triggerType === 'custom') {
+ document.getElementById("custom-condition-code").value = condition.code || "";
+ }
+ }
+
+ selectedNotificationId = notification.id;
+}
+
+function readForm() {
+ const data = {
+ id: document.getElementById("notif-id").value.trim(),
+ label: document.getElementById("notif-label").value.trim(),
+ message: document.getElementById("notif-message").value.trim(),
+ type: document.getElementById("notif-type").value,
+ persistent: document.getElementById("notif-persistent").checked,
+ enabled: document.getElementById("notif-enabled").checked,
+ };
+
+ // Webhook configuration
+ const webhookUrl = document.getElementById("notif-webhook-url").value.trim();
+ const webhookType = document.getElementById("notif-webhook-type").value;
+ if (webhookUrl && webhookType) {
+ data.webhook = {
+ url: webhookUrl,
+ type: webhookType,
+ username: document.getElementById("notif-webhook-username").value.trim() || "FilamentHub Bot"
+ };
+ }
+
+ // Trigger configuration
+ const triggerType = document.getElementById("notif-trigger").value;
+ data.trigger = { type: triggerType };
+
+ // Add condition based on trigger type
+ if (triggerType === 'temperature') {
+ data.trigger.condition = {
+ type: 'temperature',
+ operator: document.getElementById("temp-operator").value,
+ value: parseFloat(document.getElementById("temp-value").value) || 0
+ };
+ } else if (triggerType === 'humidity') {
+ data.trigger.condition = {
+ type: 'humidity',
+ operator: document.getElementById("humidity-operator").value,
+ value: parseFloat(document.getElementById("humidity-value").value) || 0
+ };
+ } else if (triggerType === 'print_time') {
+ data.trigger.condition = {
+ type: 'print_time',
+ operator: document.getElementById("printtime-operator").value,
+ value: parseFloat(document.getElementById("printtime-value").value) || 0
+ };
+ } else if (triggerType === 'filament_weight') {
+ data.trigger.condition = {
+ type: 'filament_weight',
+ operator: document.getElementById("filament-operator").value,
+ value: parseFloat(document.getElementById("filament-value").value) || 0
+ };
+ } else if (triggerType === 'custom') {
+ data.trigger.condition = {
+ type: 'custom',
+ code: document.getElementById("custom-condition-code").value.trim()
+ };
+ }
+
+ return data;
+}
+
+async function saveNotification() {
+ const data = readForm();
+ if (!data.id || !data.message) {
+ alert("ID und Nachricht sind Pflichtfelder.");
+ return;
+ }
+ const idx = notificationsConfig.findIndex((n) => n.id === data.id);
+ if (idx >= 0) {
+ notificationsConfig[idx] = data;
+ } else {
+ notificationsConfig.push(data);
+ }
+ selectedNotificationId = data.id;
+ await persistConfig();
+ renderList();
+ if (window.renderPersistentAlerts) {
+ renderPersistentAlerts();
+ }
+}
+
+async function deleteNotification(id) {
+ notificationsConfig = notificationsConfig.filter((n) => n.id !== id);
+ if (selectedNotificationId === id) {
+ selectedNotificationId = notificationsConfig[0]?.id || null;
+ if (notificationsConfig[0]) fillForm(notificationsConfig[0]);
+ }
+ await persistConfig();
+ renderList();
+}
+
+async function persistConfig() {
+ try {
+ await fetch("/api/notifications-config", {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ notifications: notificationsConfig }),
+ });
+ } catch (err) {
+ console.error("Konnte Notifications nicht speichern", err);
+ }
+}
+
+async function triggerNotification(id) {
+ const targetId = id || readForm().id;
+ if (!targetId) return;
+ try {
+ await fetch("/api/notifications-trigger", {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ id: targetId }),
+ });
+ if (typeof window.triggerAlert === "function") {
+ const notification = notificationsConfig.find((n) => n.id === targetId);
+ if (notification && notification.enabled !== false) {
+ window.triggerAlert(notification);
+ }
+ }
+ } catch (err) {
+ console.error("Trigger fehlgeschlagen", err);
+ }
+}
+
+function testNotification() {
+ const data = readForm();
+ if (!data.id || !data.message) {
+ alert("ID und Nachricht sind Pflichtfelder.");
+ return;
+ }
+ if (typeof window.triggerAlert === "function") {
+ window.triggerAlert({ ...data, enabled: true });
+ }
+}
+
+function renderList() {
+ const container = document.getElementById("notification-cards-container");
+ container.innerHTML = "";
+
+ if (!notificationsConfig.length) {
+ container.innerHTML = `
+
+
+
+
+
+
Keine Benachrichtigungen
+
Erstelle deine erste Benachrichtigung mit dem Button oben
+
+ `;
+ return;
+ }
+
+ notificationsConfig.forEach((n) => {
+ const card = document.createElement("div");
+ card.className = `notif-card ${n.enabled === false ? 'inactive' : ''}`;
+
+ const typeEmoji = {
+ success: '✓',
+ warn: '⚠️',
+ error: '✕',
+ info: 'ℹ️'
+ }[n.type] || 'ℹ️';
+
+ card.innerHTML = `
+
+ ${n.label || n.id}
+ ${n.message}
+
+
+ ✏️ Bearbeiten
+ ⚡ Triggern
+ 🗑️ Löschen
+
+ `;
+
+ container.appendChild(card);
+ });
+}
+
+function openNotifModal(title = 'Neue Benachrichtigung') {
+ document.getElementById('notif-modal-title').textContent = title;
+ document.getElementById('notif-edit-modal').classList.add('active');
+ document.body.style.overflow = 'hidden';
+}
+
+function closeNotifModal() {
+ document.getElementById('notif-edit-modal').classList.remove('active');
+ document.body.style.overflow = 'auto';
+}
+
+function editNotification(id) {
+ const notification = notificationsConfig.find(n => n.id === id);
+ if (notification) {
+ fillForm(notification);
+ openNotifModal('Benachrichtigung bearbeiten');
+ }
+}
+
+function confirmDeleteNotification(id) {
+ if (confirm(`Benachrichtigung "${id}" wirklich löschen?`)) {
+ deleteNotification(id);
+ }
+}
+
+function createNewNotification() {
+ // Reset form
+ document.getElementById('notif-id').value = '';
+ document.getElementById('notif-label').value = '';
+ document.getElementById('notif-message').value = '';
+ document.getElementById('notif-type').value = 'info';
+ document.getElementById('notif-trigger').value = 'manual';
+ document.getElementById('notif-enabled').checked = true;
+ document.getElementById('notif-persistent').checked = false;
+
+ // Reset webhook fields
+ document.getElementById('notif-webhook-url').value = '';
+ document.getElementById('notif-webhook-type').value = '';
+ document.getElementById('notif-webhook-username').value = '';
+
+ // Reset all condition fields
+ document.getElementById('temp-operator').value = '>';
+ document.getElementById('temp-value').value = '';
+ document.getElementById('humidity-operator').value = '>';
+ document.getElementById('humidity-value').value = '';
+ document.getElementById('printtime-operator').value = '>';
+ document.getElementById('printtime-value').value = '';
+ document.getElementById('filament-operator').value = '<';
+ document.getElementById('filament-value').value = '';
+ document.getElementById('custom-condition-code').value = '';
+
+ // Hide trigger section
+ updateTriggerSection();
+
+ selectedNotificationId = null;
+ openNotifModal('Neue Benachrichtigung');
+}
+
+// Update visibility of trigger conditions section based on trigger type
+function updateTriggerSection() {
+ const triggerType = document.getElementById('notif-trigger').value;
+ const triggerSection = document.getElementById('trigger-conditions-section');
+
+ // Hide all condition configs first
+ document.querySelectorAll('.condition-config').forEach(el => {
+ el.style.display = 'none';
+ });
+
+ // Show trigger section and relevant condition based on trigger type
+ if (triggerType === 'temperature') {
+ triggerSection.style.display = 'block';
+ document.getElementById('condition-temperature').style.display = 'block';
+ } else if (triggerType === 'humidity') {
+ triggerSection.style.display = 'block';
+ document.getElementById('condition-humidity').style.display = 'block';
+ } else if (triggerType === 'print_time') {
+ triggerSection.style.display = 'block';
+ document.getElementById('condition-print_time').style.display = 'block';
+ } else if (triggerType === 'filament_weight') {
+ triggerSection.style.display = 'block';
+ document.getElementById('condition-filament_weight').style.display = 'block';
+ } else if (triggerType === 'custom') {
+ triggerSection.style.display = 'block';
+ document.getElementById('condition-custom').style.display = 'block';
+ } else {
+ // Hide trigger section for manual, print_done, error, material_low
+ triggerSection.style.display = 'none';
+ }
+}
+
+// Neue Funktion zum Triggern mit Auswahl
+async function triggerSelectedNotification() {
+ const notifId = document.getElementById("notif-id").value.trim();
+ const triggerType = document.getElementById("notif-trigger").value;
+ if (!notifId) {
+ alert("Bitte zuerst eine Notification auswählen oder anlegen.");
+ return;
+ }
+ try {
+ await fetch("/api/notifications-trigger", {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ id: notifId, trigger: triggerType })
+ });
+ if (typeof window.triggerAlert === "function") {
+ const notification = notificationsConfig.find((n) => n.id === notifId);
+ if (notification && notification.enabled !== false) {
+ window.triggerAlert(notification);
+ }
+ }
+ } catch (err) {
+ console.error("Trigger fehlgeschlagen", err);
+ }
+}
+
+document.addEventListener("DOMContentLoaded", () => {
+ loadNotifications();
+
+ // Modal buttons
+ document.getElementById("btn-save").addEventListener("click", async () => {
+ await saveNotification();
+ closeNotifModal();
+ });
+ document.getElementById("btn-test").addEventListener("click", testNotification);
+
+ // New notification button
+ const btnNew = document.getElementById("btn-new-notification");
+ if (btnNew) {
+ btnNew.addEventListener("click", createNewNotification);
+ }
+
+ // Trigger type change handler
+ const triggerSelect = document.getElementById("notif-trigger");
+ if (triggerSelect) {
+ triggerSelect.addEventListener("change", updateTriggerSection);
+ }
+
+ // ESC key closes modal
+ document.addEventListener('keydown', (e) => {
+ if (e.key === 'Escape') {
+ closeNotifModal();
+ }
+ });
+});
diff --git a/frontend/static/js/dashboard.js b/frontend/static/js/dashboard.js
new file mode 100644
index 0000000..1e687a9
--- /dev/null
+++ b/frontend/static/js/dashboard.js
@@ -0,0 +1,146 @@
+// === INIT ===
+document.addEventListener('DOMContentLoaded', () => {
+ console.log('[Dashboard] Initializing Live Operations Dashboard (FULL MATCH)');
+ loadDashboardDataFull();
+ setInterval(loadDashboardDataFull, 5000); // Refresh every 5 seconds for live feel
+});
+
+const ACTIVE_JOB_STATES = new Set(['RUNNING', 'PAUSE', 'ERROR']);
+
+// === LOAD DATA (LIVE-STATE) ===
+// === LOAD DATA (ALLE DRUCKER + LIVE-STATE) ===
+async function loadDashboardDataFull() {
+ try {
+ // Hole alle Drucker aus der Datenbank
+ const printersRes = await fetch('/api/printers/');
+ const printers = await printersRes.json();
+ // Hole Live-State
+ const liveRes = await fetch('/api/live-state/');
+ const liveData = await liveRes.json();
+ // Mappe Live-State nach cloud_serial, nutze payload.print falls vorhanden, sonst payload
+ const liveMap = Object.fromEntries(
+ Object.entries(liveData).map(([k, v]) => [k, v.payload && v.payload.print ? v.payload.print : v.payload])
+ );
+ // Kombiniere Druckerliste mit Live-State anhand cloud_serial
+ const printerList = printers.map(printer => {
+ const live = liveMap[printer.cloud_serial] || {};
+ return { ...printer, live };
+ });
+ console.log('[DEBUG] printerList:', printerList);
+ updateLiveStatusFull(printerList);
+ updateActiveJobsFull(printerList);
+ updateRefreshTime();
+
+ // === NEU: Statistik-API für Tageswerte ===
+ try {
+ const statsRes = await fetch('/api/statistics/heatmap?days=1');
+ const statsData = await statsRes.json();
+ // statsData.data ist ein Array mit einem Eintrag für heute
+ const today = (statsData.data && statsData.data.length > 0) ? statsData.data[0] : null;
+ if (today) {
+ // Druckzeit (duration_h) in Minuten
+ const durationMin = Math.round((today.duration_h || 0) * 60);
+ document.getElementById('avgTimeTodayValue').textContent = durationMin > 0 ? formatDuration(durationMin) : '0 Min';
+ document.getElementById('avgTimeTodayLabel').textContent = 'Heute';
+ // Filamentverbrauch (filament_g)
+ document.getElementById('filamentTodayValue').textContent = (today.filament_g || 0).toFixed(1) + ' g';
+ } else {
+ document.getElementById('avgTimeTodayValue').textContent = '0 Min';
+ document.getElementById('avgTimeTodayLabel').textContent = 'Heute';
+ document.getElementById('filamentTodayValue').textContent = '0.0 g';
+ }
+ } catch (err) {
+ console.warn('[Dashboard] Statistik-API nicht erreichbar:', err);
+ }
+ } catch (error) {
+ console.error('[Dashboard] Error loading dashboard data:', error);
+ }
+}
+
+// === UPDATE LIVE STATUS (ALLE DRUCKER + LIVE-STATE) ===
+function updateLiveStatusFull(printerList) {
+ const totalCount = printerList.length;
+ // Online: Wenn ein Live-State-Objekt für den Drucker existiert (unabhängig von Feldern)
+ const onlineCount = printerList.filter(p => p.live && Object.keys(p.live).length > 0).length;
+ document.getElementById('onlinePrinters').textContent = onlineCount;
+ document.getElementById('printerStats').textContent = `${onlineCount}/${totalCount} Online`;
+
+ // Aktive Drucke (robuste Logik wie in updateActiveJobsFull)
+ const activePrinters = printerList.filter(p => {
+ const state = typeof p.live?.gcode_state === 'string' ? p.live.gcode_state.toUpperCase() : '';
+ return ACTIVE_JOB_STATES.has(state);
+ });
+ document.getElementById('activeJobCount').textContent = activePrinters.length;
+
+ // Alerts: Zeige Warnung, wenn nicht alle online sind
+ updateAlertsLive(totalCount, onlineCount);
+}
+
+// === UPDATE ACTIVE JOBS (ALLE DRUCKER + LIVE-STATE) ===
+function updateActiveJobsFull(printerList) {
+ const activePrinters = printerList.filter(p => {
+ const state = typeof p.live?.gcode_state === 'string' ? p.live.gcode_state.toUpperCase() : '';
+ return ACTIVE_JOB_STATES.has(state);
+ });
+ const container = document.getElementById('activeJobsList');
+ if (activePrinters.length === 0) {
+ container.innerHTML = 'Keine aktiven Drucke
';
+ return;
+ }
+ // Use centralized renderer from activePrintCard.js
+ if (typeof renderActiveJobs === 'function') {
+ renderActiveJobs(container, activePrinters);
+ } else {
+ // Fallback to simple list if renderer not available
+ container.innerHTML = activePrinters.map(printer => {
+ const rawGcode = printer.live.gcode_file || printer.live.file || '';
+ const gcodeBase = rawGcode && rawGcode.includes('/') ? rawGcode.split('/').pop() : rawGcode;
+ const jobName = printer.live.subtask_name || printer.live.job_name || gcodeBase || 'Unbekannter Job';
+ const printerName = printer.name || printer.cloud_serial || 'Unbekannter Drucker';
+ const progress = printer.live.percent || 0;
+ return `
+
+
+
${jobName}
+
${printerName}
+
+
+
${progress.toFixed(0)}%
+
fertig
+
+
+ `;
+ }).join('');
+ }
+}
+
+// === UPDATE ALERTS (LIVE-STATE) ===
+function updateAlertsLive(totalCount, onlineCount) {
+ const alerts = [];
+ const offlineCount = totalCount - onlineCount;
+ if (offlineCount > 0) {
+ alerts.push(`⚠️ ${offlineCount} Drucker offline`);
+ }
+ const container = document.getElementById('alertsList');
+ if (alerts.length === 0) {
+ container.innerHTML = 'Keine Alerts
';
+ } else {
+ container.innerHTML = alerts.map(a => `${a}
`).join('');
+ }
+}
+
+// === UPDATE REFRESH TIME ===
+function updateRefreshTime() {
+ const now = new Date();
+ const time = now.toLocaleTimeString('de-DE', { hour: '2-digit', minute: '2-digit', second: '2-digit' });
+ document.getElementById('refreshTime').textContent = `Aktualisiert ${time}`;
+}
+
+// === HELPERS ===
+function formatDuration(minutes) {
+ if (minutes < 1) return '< 1 Min';
+ if (minutes < 60) return `${Math.round(minutes)} Min`;
+ const hours = Math.floor(minutes / 60);
+ const mins = Math.round(minutes % 60);
+ return `${hours}h ${mins}m`;
+}
diff --git a/frontend/static/js/dashboard_live_patch.js b/frontend/static/js/dashboard_live_patch.js
new file mode 100644
index 0000000..e006214
--- /dev/null
+++ b/frontend/static/js/dashboard_live_patch.js
@@ -0,0 +1,57 @@
+// Patch: Aktive Drucke aus /api/live-state/
+// Diese Datei ersetzt die Anzeige der aktiven Drucke im Dashboard durch die Daten aus /api/live-state/
+
+document.addEventListener('DOMContentLoaded', () => {
+ console.log('[Dashboard] Initializing Live Operations Dashboard (LIVE PATCH)');
+ loadDashboardDataLive();
+ setInterval(loadDashboardDataLive, 5000);
+});
+
+async function loadDashboardDataLive() {
+ try {
+ // Hole Live-State
+ const res = await fetch('/api/live-state/');
+ const liveData = await res.json();
+ // liveData sollte ein Array von Druckern enthalten
+ updateLiveActivePrints(liveData);
+ updateRefreshTime();
+ } catch (error) {
+ console.error('[Dashboard] Error loading live-state:', error);
+ }
+}
+
+function updateLiveActivePrints(liveData) {
+ // Filtere alle Drucker, die aktuell drucken
+ const activePrinters = liveData.filter(p => p.state === 'printing' || p.state === 'busy');
+ // Zähle aktive Drucke
+ document.getElementById('activeJobCount').textContent = activePrinters.length;
+ // Zeige Details im Bereich "Aktive Drucke"
+ const container = document.getElementById('activeJobsList');
+ if (activePrinters.length === 0) {
+ container.innerHTML = 'Keine aktiven Drucke
';
+ return;
+ }
+ // Delegate rendering to activePrintCard if available
+ if (typeof renderActiveJobs === 'function') {
+ renderActiveJobs(container, activePrinters);
+ } else {
+ container.innerHTML = activePrinters.map(printer => `
+
+
+
${printer.current_job_name || 'Unbekannter Job'}
+
${printer.name || 'Unbekannter Drucker'}
+
+
+
${printer.progress ? printer.progress.toFixed(0) : '?'}%
+
fertig
+
+
+ `).join('');
+ }
+}
+
+function updateRefreshTime() {
+ const now = new Date();
+ const time = now.toLocaleTimeString('de-DE', { hour: '2-digit', minute: '2-digit', second: '2-digit' });
+ document.getElementById('refreshTime').textContent = `Aktualisiert ${time}`;
+}
diff --git a/frontend/static/js/debug_ams.js b/frontend/static/js/debug_ams.js
new file mode 100644
index 0000000..23a9bba
--- /dev/null
+++ b/frontend/static/js/debug_ams.js
@@ -0,0 +1,84 @@
+async function loadAmsDebug() {
+ const root = document.getElementById("amsRoot");
+ const rawPre = document.getElementById("rawJson");
+ const parsedPre = document.getElementById("parsedJson");
+ const mappedPre = document.getElementById("mappedJson");
+ const toggles = document.querySelectorAll("[data-toggle]");
+
+ try {
+ const res = await fetch("/api/debug/ams");
+ if (!res.ok) throw new Error("Request failed");
+ const data = await res.json();
+ if (rawPre) rawPre.textContent = JSON.stringify(data.raw, null, 2);
+ if (parsedPre) parsedPre.textContent = JSON.stringify(data.parsed, null, 2);
+ if (mappedPre) mappedPre.textContent = JSON.stringify(data.mapped, null, 2);
+
+ renderAmsUnits(root, data.parsed || []);
+
+ toggles.forEach(btn => {
+ btn.addEventListener("click", () => toggleBlock(btn.dataset.toggle));
+ });
+ } catch (e) {
+ console.error(e);
+ if (root) root.innerHTML = 'Fehler beim Laden der AMS-Daten.
';
+ }
+}
+
+function toggleBlock(target) {
+ const blocks = {
+ raw: document.getElementById("rawBlock"),
+ parsed: document.getElementById("parsedBlock"),
+ mapped: document.getElementById("mappedBlock"),
+ };
+ Object.entries(blocks).forEach(([key, el]) => {
+ if (!el) return;
+ el.classList.toggle("show", key === target && !el.classList.contains("show"));
+ if (key !== target) el.classList.remove("show");
+ });
+}
+
+function renderAmsUnits(root, units) {
+ if (!root) return;
+ if (!Array.isArray(units) || units.length === 0) {
+ root.innerHTML = 'Keine AMS-Daten gefunden.
';
+ return;
+ }
+
+ root.innerHTML = units
+ .map((u) => renderAmsCard(u))
+ .join("");
+}
+
+function renderAmsCard(u) {
+ const trays = Array.isArray(u.trays) ? u.trays : [];
+ const trayGrid = trays
+ .map((t) => renderTray(t, u.active_tray))
+ .join("");
+ const badge = u.active_tray !== undefined && u.active_tray !== null
+ ? `active_tray: ${u.active_tray} `
+ : `no active tray `;
+ return `
+
+
+
+ ${trayGrid}
+
+ `;
+}
+
+function renderTray(t, active) {
+ const isActive = active === t.tray_id;
+ const isEmpty = !t.material && !t.tray_uuid;
+ return `
+
+
Slot ${t.tray_id ?? "-"}
+
Material: ${t.material ?? "-"}
+
UUID: ${t.tray_uuid ?? "-"}
+
+ `;
+}
+
+document.addEventListener("DOMContentLoaded", loadAmsDebug);
diff --git a/frontend/static/js/global_notifications.js b/frontend/static/js/global_notifications.js
new file mode 100644
index 0000000..0d36093
--- /dev/null
+++ b/frontend/static/js/global_notifications.js
@@ -0,0 +1,182 @@
+(function () {
+ console.log("[GlobalNotifications] Script wird geladen...");
+
+ const state = {
+ config: [],
+ socket: null,
+ reconnectTimer: null,
+ alertRoot: null,
+ };
+
+ function getAlertRoot() {
+ if (!state.alertRoot) {
+ state.alertRoot = document.getElementById("alert-root");
+ }
+ return state.alertRoot;
+ }
+
+ async function loadNotifications() {
+ try {
+ const res = await fetch("/api/notifications-config");
+ if (!res.ok) throw new Error("Config Load failed");
+ const data = await res.json();
+ state.config = data.notifications || [];
+ // KEIN automatisches Anzeigen persistenter Alerts mehr!
+ } catch (err) {
+ console.error("Notifications konnten nicht geladen werden:", err);
+ }
+ }
+
+ function findNotificationById(id) {
+ return state.config.find((n) => n.id === id);
+ }
+
+ function createAlertElement(notification) {
+ const root = getAlertRoot();
+ if (!root) return null;
+
+ const wrapper = document.createElement("div");
+ wrapper.className = `alert alert--${notification.type || "info"}`;
+ wrapper.dataset.id = notification.id || "";
+ if (notification.persistent) wrapper.dataset.persistent = "true";
+
+ const content = document.createElement("div");
+ content.className = "alert__content";
+
+ const title = document.createElement("div");
+ title.className = "alert__title";
+ title.textContent = notification.label || notification.id || "Notification";
+
+ const message = document.createElement("div");
+ message.className = "alert__message";
+ message.textContent = notification.message || "";
+
+ const closeBtn = document.createElement("button");
+ closeBtn.className = "alert__close";
+ closeBtn.type = "button";
+ closeBtn.innerText = "×";
+ closeBtn.addEventListener("click", () => closeAlert(wrapper));
+
+ content.appendChild(title);
+ content.appendChild(message);
+ wrapper.appendChild(content);
+ wrapper.appendChild(closeBtn);
+ return wrapper;
+ }
+
+ function triggerAlert(notification) {
+ console.log("[GlobalNotifications] triggerAlert aufgerufen:", notification);
+ const root = getAlertRoot();
+ if (!root) {
+ console.error("[GlobalNotifications] alert-root nicht gefunden!");
+ return;
+ }
+
+ let resolved = notification;
+ if (typeof notification === "string") {
+ resolved = findNotificationById(notification);
+ }
+ if (!resolved) {
+ console.warn("[GlobalNotifications] Notification nicht gefunden:", notification);
+ return;
+ }
+ if (resolved.enabled === false) {
+ console.log("[GlobalNotifications] Notification ist deaktiviert:", resolved.id);
+ return;
+ }
+
+ const existing = root.querySelector(`.alert[data-id="${resolved.id}"]`);
+ if (existing && resolved.persistent) {
+ console.log("[GlobalNotifications] Persistente Notification existiert bereits:", resolved.id);
+ return;
+ }
+
+ const alertEl = createAlertElement(resolved);
+ if (!alertEl) {
+ console.error("[GlobalNotifications] createAlertElement fehlgeschlagen!");
+ return;
+ }
+ console.log("[GlobalNotifications] Alert anzeigen:", resolved.id);
+ root.appendChild(alertEl);
+
+ if (!resolved.persistent) {
+ setTimeout(() => closeAlert(alertEl), 6500);
+ }
+ }
+
+ function renderPersistentAlerts() {
+ const root = getAlertRoot();
+ if (!root) return;
+ root.querySelectorAll('.alert[data-persistent="true"]').forEach((el) => el.remove());
+ state.config
+ .filter((n) => n.enabled !== false && n.persistent)
+ .forEach((n) => triggerAlert(n));
+ }
+
+ function closeAlert(element) {
+ if (!element) return;
+ element.classList.add("alert--closing");
+ setTimeout(() => element.remove(), 180);
+ }
+
+ function handleSocketMessage(event) {
+ console.log("[GlobalNotifications] WebSocket Nachricht empfangen:", event.data);
+ try {
+ const data = JSON.parse(event.data);
+ console.log("[GlobalNotifications] Geparste Daten:", data);
+ if (data && data.event === "notification_trigger" && data.payload) {
+ console.log("[GlobalNotifications] Notification-Trigger erkannt, zeige Alert...");
+ triggerAlert(data.payload);
+ } else {
+ console.warn("[GlobalNotifications] Unbekanntes Event oder fehlendes Payload:", data);
+ }
+ } catch (err) {
+ console.error("[GlobalNotifications] WebSocket payload ungültig:", err);
+ }
+ }
+
+ function connectSocket() {
+ const protocol = window.location.protocol === "https:" ? "wss" : "ws";
+ const wsUrl = `${protocol}://${window.location.host}/api/notifications/ws`;
+ console.log("[GlobalNotifications] Verbinde WebSocket:", wsUrl);
+ try {
+ state.socket = new WebSocket(wsUrl);
+ state.socket.onopen = () => {
+ console.log("[GlobalNotifications] WebSocket verbunden!");
+ };
+ state.socket.onmessage = handleSocketMessage;
+ state.socket.onclose = () => {
+ console.log("[GlobalNotifications] WebSocket geschlossen, reconnect in 2s...");
+ if (state.reconnectTimer) clearTimeout(state.reconnectTimer);
+ state.reconnectTimer = setTimeout(connectSocket, 2000);
+ };
+ state.socket.onerror = (err) => {
+ console.error("[GlobalNotifications] WebSocket Fehler:", err);
+ try {
+ state.socket.close();
+ } catch (e) {
+ console.error("[GlobalNotifications] WebSocket close error", e);
+ }
+ };
+ } catch (err) {
+ console.error("[GlobalNotifications] WebSocket konnte nicht geöffnet werden:", err);
+ if (state.reconnectTimer) clearTimeout(state.reconnectTimer);
+ state.reconnectTimer = setTimeout(connectSocket, 3000);
+ }
+ }
+
+
+ document.addEventListener("DOMContentLoaded", () => {
+ console.log("[GlobalNotifications] DOMContentLoaded - Initialisiere...");
+ loadNotifications();
+ // WebSocket auf allen Seiten verbinden für globale Benachrichtigungen
+ connectSocket();
+ });
+
+ console.log("[GlobalNotifications] Exportiere Funktionen zu window...");
+ window.loadNotifications = loadNotifications;
+ window.triggerAlert = triggerAlert;
+ window.renderPersistentAlerts = renderPersistentAlerts;
+ window.closeAlert = closeAlert;
+ console.log("[GlobalNotifications] Script vollständig geladen!");
+})();
diff --git a/frontend/static/js/jobs.js b/frontend/static/js/jobs.js
new file mode 100644
index 0000000..ff4f27c
--- /dev/null
+++ b/frontend/static/js/jobs.js
@@ -0,0 +1,3 @@
+document.addEventListener("DOMContentLoaded", () => {
+ console.info("Jobs JS geladen - Platzhalter für Druckjobs-Logik.");
+});
diff --git a/frontend/static/js/live_utils.js b/frontend/static/js/live_utils.js
new file mode 100644
index 0000000..21a68d5
--- /dev/null
+++ b/frontend/static/js/live_utils.js
@@ -0,0 +1,33 @@
+// FilamentHub Utility-Funktionen für Live-Daten und Fallback
+// Diese Seite zeigt, wie du Live-Daten und Fallbacks einfach und robust nutzen kannst.
+
+/**
+ * Gibt den Wert aus den Live-Daten zurück, oder (bei Fehlen) aus der Datenbank, oder einen Fallback.
+ * @param {object} printer - Das Druckerobjekt (enthält .live und DB-Felder)
+ * @param {string[]} keys - Array von Schlüsseln, die nacheinander geprüft werden (zuerst live, dann DB)
+ * @param {any} fallback - Wert, falls nichts gefunden wird (Standard: "-")
+ * @returns {any}
+ */
+function getLiveOrDb(printer, keys, fallback = "-") {
+ for (const key of keys) {
+ if (printer.live && printer.live[key] !== undefined && printer.live[key] !== null) return printer.live[key];
+ if (printer[key] !== undefined && printer[key] !== null) return printer[key];
+ }
+ return fallback;
+}
+
+// Beispiel-Nutzung:
+// const nozzle = getLiveOrDb(printer, ["nozzle_temper", "nozzle_temp"]);
+// const bed = getLiveOrDb(printer, ["bed_temper", "bed_temp"]);
+// const filament = getLiveOrDb(printer, ["tray_type", "filament_material", "printer_type"]);
+
+// Du kannst beliebig viele Fallbacks angeben:
+// const foo = getLiveOrDb(printer, ["live_key1", "db_key1", "db_key2"], "(unbekannt)");
+
+// Diese Funktion kannst du in allen deinen JS-Dateien importieren oder direkt einfügen.
+
+// Für komplexere Fälle (z.B. AMS/Tray) kannst du eigene Hilfsfunktionen nach diesem Muster bauen.
+
+// ---
+// Tipp: Schreibe dir eigene kleine Hilfsfunktionen für wiederkehrende Spezialfälle (z.B. Filamentanzeige, Statusanzeige, ...)
+// und halte deinen Code so übersichtlich und wartbar!
diff --git a/frontend/static/js/materials.js b/frontend/static/js/materials.js
new file mode 100644
index 0000000..2cacc7a
--- /dev/null
+++ b/frontend/static/js/materials.js
@@ -0,0 +1,298 @@
+// === INIT ===
+document.addEventListener('DOMContentLoaded', () => {
+ console.log('[Materials] Page loaded');
+ loadMaterials();
+ setInterval(loadMaterials, 30000);
+});
+
+// === LOAD MATERIALS ===
+async function loadMaterials() {
+ console.log('[Materials] Loading materials...');
+ try {
+ const response = await fetch('/api/materials/');
+ if (!response.ok) {
+ throw new Error(`HTTP ${response.status}`);
+ }
+
+ const materials = await response.json();
+ console.log('[Materials] Loaded:', materials.length);
+ renderMaterialsTable(materials);
+ } catch (error) {
+ console.error('[Materials] Error:', error);
+ renderError('Fehler beim Laden der Materialien');
+ }
+}
+
+// === RENDER TABLE ===
+function renderMaterialsTable(materials) {
+ const tbody = document.getElementById('materialsTableBody');
+ if (!materials || materials.length === 0) {
+ tbody.innerHTML = 'Keine Materialien vorhanden ';
+ return;
+ }
+
+ tbody.innerHTML = materials.map(material => `
+
+ ${escapeHtml(material.name || '-')}
+ ${escapeHtml(material.brand || '-')}
+ ${material.density ? material.density + ' g/cm³' : '-'}
+ ${material.diameter ? material.diameter + ' mm' : '-'}
+
+
+ ✏️
+ 🗑️
+
+
+
+ `).join('');
+
+ console.log('[Materials] Table rendered');
+}
+
+// === ERROR DISPLAY ===
+function renderError(message) {
+ const tbody = document.getElementById('materialsTableBody');
+ tbody.innerHTML = `${escapeHtml(message)} `;
+}
+
+// === EDIT ===
+async function editMaterial(materialId) {
+ console.log('[Materials] Edit material:', materialId);
+ try {
+ await loadBrands();
+ const response = await fetch(`/api/materials/${materialId}`);
+ if (!response.ok) {
+ throw new Error(`HTTP ${response.status}`);
+ }
+ const material = await response.json();
+ document.getElementById('editForm').dataset.materialId = material.id;
+ document.getElementById('editName').value = material.name || '';
+ document.getElementById('editBrand').value = material.brand || '';
+ document.getElementById('editBrandInput').value = '';
+ document.getElementById('editBrandInput').style.display = 'none';
+ document.getElementById('editDensity').value = material.density || '';
+ document.getElementById('editDiameter').value = material.diameter || '';
+ document.getElementById('editNotes').value = material.notes || '';
+ document.getElementById('editModal').classList.add('active');
+ } catch (error) {
+ console.error('[Materials] Error loading material:', error);
+ alert('Fehler beim Laden des Materials: ' + error.message);
+ }
+}
+
+// === LOAD BRANDS ===
+async function loadBrands() {
+ console.log('[Materials] Loading brands...');
+ try {
+ const response = await fetch('/api/materials/brands/list');
+ if (!response.ok) {
+ throw new Error(`HTTP ${response.status}`);
+ }
+ const brands = await response.json();
+ const select = document.getElementById('editBrand');
+ const currentValue = select.value;
+ select.innerHTML = '';
+ const noneOption = document.createElement('option');
+ noneOption.value = '';
+ noneOption.textContent = '-- Keine --';
+ select.appendChild(noneOption);
+ const newOption = document.createElement('option');
+ newOption.value = '__new__';
+ newOption.textContent = '+ Neue Marke...';
+ select.appendChild(newOption);
+ brands.forEach(brand => {
+ const option = document.createElement('option');
+ option.value = brand;
+ option.textContent = brand;
+ select.appendChild(option);
+ });
+ select.value = currentValue;
+ } catch (error) {
+ console.error('[Materials] Error loading brands:', error);
+ }
+}
+
+// === CLOSE MODAL ===
+function closeEditModal() {
+ document.getElementById('editModal').classList.remove('active');
+ document.getElementById('editForm').reset();
+}
+
+// === SAVE MATERIAL ===
+async function saveMaterial(event) {
+ event.preventDefault();
+ const materialId = document.getElementById('editForm').dataset.materialId;
+ console.log('[Materials] Saving material:', materialId);
+ let brand = document.getElementById('editBrand').value;
+ if (brand === '__new__') {
+ brand = document.getElementById('editBrandInput').value || null;
+ }
+ const formData = {
+ name: document.getElementById('editName').value,
+ brand: brand,
+ density: parseFloat(document.getElementById('editDensity').value) || 1.24,
+ diameter: parseFloat(document.getElementById('editDiameter').value) || 1.75,
+ notes: document.getElementById('editNotes').value || null
+ };
+ try {
+ const response = await fetch(`/api/materials/${materialId}`, {
+ method: 'PUT',
+ headers: {
+ 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify(formData)
+ });
+ if (!response.ok) {
+ const error = await response.json();
+ throw new Error(error.detail || `HTTP ${response.status}`);
+ }
+ closeEditModal();
+ loadMaterials();
+ } catch (error) {
+ console.error('[Materials] Save error:', error);
+ alert('Fehler beim Speichern: ' + error.message);
+ }
+}
+
+// === TOGGLE EDIT BRAND INPUT ===
+function toggleEditBrandInput() {
+ const select = document.getElementById('editBrand');
+ const input = document.getElementById('editBrandInput');
+ if (select.value === '__new__') {
+ input.style.display = 'block';
+ input.focus();
+ } else {
+ input.style.display = 'none';
+ input.value = '';
+ }
+}
+
+// === OPEN CREATE MODAL ===
+async function openCreateModal() {
+ console.log('[Materials] Open create modal');
+ try {
+ await loadBrandsForCreate();
+ document.getElementById('createForm').reset();
+ document.getElementById('createDensity').value = '1.24';
+ document.getElementById('createDiameter').value = '1.75';
+ document.getElementById('createModal').classList.add('active');
+ } catch (error) {
+ console.error('[Materials] Error opening create modal:', error);
+ }
+}
+
+// === CLOSE CREATE MODAL ===
+function closeCreateModal() {
+ document.getElementById('createModal').classList.remove('active');
+ document.getElementById('createForm').reset();
+}
+
+// === LOAD BRANDS FOR CREATE ===
+async function loadBrandsForCreate() {
+ console.log('[Materials] Loading brands for create...');
+ try {
+ const response = await fetch('/api/materials/brands/list');
+ if (!response.ok) {
+ throw new Error(`HTTP ${response.status}`);
+ }
+ const brands = await response.json();
+ const select = document.getElementById('createBrand');
+ select.innerHTML = '';
+ const noneOption = document.createElement('option');
+ noneOption.value = '';
+ noneOption.textContent = '-- Keine --';
+ select.appendChild(noneOption);
+ const newOption = document.createElement('option');
+ newOption.value = '__new__';
+ newOption.textContent = '+ Neue Marke...';
+ select.appendChild(newOption);
+ brands.forEach(brand => {
+ const option = document.createElement('option');
+ option.value = brand;
+ option.textContent = brand;
+ select.appendChild(option);
+ });
+ } catch (error) {
+ console.error('[Materials] Error loading brands for create:', error);
+ }
+}
+
+// === CREATE MATERIAL ===
+async function createMaterial(event) {
+ event.preventDefault();
+ console.log('[Materials] Creating material');
+ let brand = document.getElementById('createBrand').value;
+ if (brand === '__new__') {
+ brand = document.getElementById('createBrandInput').value || null;
+ }
+ const formData = {
+ name: document.getElementById('createName').value,
+ brand: brand,
+ density: parseFloat(document.getElementById('createDensity').value) || 1.24,
+ diameter: parseFloat(document.getElementById('createDiameter').value) || 1.75,
+ notes: document.getElementById('createNotes').value || null
+ };
+ try {
+ const response = await fetch('/api/materials/', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify(formData)
+ });
+ if (!response.ok) {
+ const error = await response.json();
+ throw new Error(error.detail || `HTTP ${response.status}`);
+ }
+ closeCreateModal();
+ loadMaterials();
+ } catch (error) {
+ console.error('[Materials] Create error:', error);
+ alert('Fehler beim Erstellen: ' + error.message);
+ }
+}
+
+// === TOGGLE CREATE BRAND INPUT ===
+function toggleCreateBrandInput() {
+ const select = document.getElementById('createBrand');
+ const input = document.getElementById('createBrandInput');
+ if (select.value === '__new__') {
+ input.style.display = 'block';
+ input.focus();
+ } else {
+ input.style.display = 'none';
+ input.value = '';
+ }
+}
+
+// === DELETE ===
+async function deleteMaterial(materialId) {
+ console.log('[Materials] Delete material:', materialId);
+ if (!confirm('Material wirklich löschen?')) {
+ return;
+ }
+ try {
+ const response = await fetch(`/api/materials/${materialId}`, {
+ method: 'DELETE'
+ });
+ if (!response.ok) {
+ throw new Error(`HTTP ${response.status}`);
+ }
+ loadMaterials();
+ } catch (error) {
+ console.error('[Materials] Delete error:', error);
+ alert('Fehler beim Löschen: ' + error.message);
+ }
+}
+
+// === UTILS ===
+function escapeHtml(text) {
+ const map = {
+ '&': '&',
+ '<': '<',
+ '>': '>',
+ '"': '"',
+ "'": '''
+ };
+ return String(text).replace(/[&<>'"]/g, m => map[m]);
+}
diff --git a/frontend/static/js/navbar.js b/frontend/static/js/navbar.js
new file mode 100644
index 0000000..604043c
--- /dev/null
+++ b/frontend/static/js/navbar.js
@@ -0,0 +1,163 @@
+document.addEventListener("DOMContentLoaded", () => {
+ highlightActiveNav();
+ initUserMenu();
+});
+
+function highlightActiveNav() {
+ const active = document.body.dataset.activePage;
+ document.querySelectorAll(".sidebar__nav .nav__item").forEach(link => {
+ if (!active) return;
+ if (active === "dashboard" && link.getAttribute("href") === "/") {
+ link.classList.add("nav__item--active");
+ } else if (link.getAttribute("href")?.includes(`/${active}`)) {
+ link.classList.add("nav__item--active");
+ }
+ });
+}
+
+async function fetchSettings() {
+ try {
+ const res = await fetch("/api/settings");
+ if (!res.ok) throw new Error("fetch settings failed");
+ return await res.json();
+ } catch (e) {
+ console.warn("Settings fetch failed", e);
+ return { ams_mode: "single", debug_ws_logging: false };
+ }
+}
+
+async function updateSetting(partial) {
+ try {
+ const res = await fetch("/api/settings", {
+ method: "PUT",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify(partial),
+ });
+ if (!res.ok) throw new Error("update failed");
+ return await res.json();
+ } catch (e) {
+ console.warn("Settings update failed", e);
+ return null;
+ }
+}
+
+function initUserMenu() {
+ const menu = document.querySelector(".user-menu");
+ if (!menu) return;
+ const trigger = menu.querySelector(".user-menu__trigger");
+ const dropdown = menu.querySelector(".user-menu__dropdown");
+ const themeToggle = menu.querySelector('[data-action="theme-toggle"]');
+ const about = menu.querySelector('[data-action="about"]');
+ const settingsBtn = menu.querySelector('[data-action="settings"]');
+ const amsRadios = menu.querySelectorAll('input[name="ams_mode"][data-setting="ams_mode"]');
+ const debugCheckbox = menu.querySelector('input[type="checkbox"][data-setting="debug_ws_logging"]');
+
+ const settingsModal = document.getElementById("settingsModal");
+ const settingsSaveBtn = document.getElementById("settingsSaveBtn");
+ const settingsElectricityPrice = document.getElementById("settingsElectricityPrice");
+ const settingsClosers = document.querySelectorAll('[data-close-settings]');
+
+ // Restore theme on load
+ const stored = localStorage.getItem("fh_theme");
+ if (stored === "light") {
+ document.body.classList.add("theme-light");
+ }
+
+ const closeAll = () => dropdown.classList.remove("open");
+
+ const toggleMenu = (evt) => {
+ evt.stopPropagation();
+ dropdown.classList.toggle("open");
+ trigger.setAttribute("aria-expanded", dropdown.classList.contains("open"));
+ };
+
+ trigger?.addEventListener("click", toggleMenu);
+ trigger?.addEventListener("keydown", (e) => {
+ if (e.key === "Enter" || e.key === " ") {
+ e.preventDefault();
+ toggleMenu(e);
+ } else if (e.key === "Escape") {
+ closeAll();
+ trigger.focus();
+ }
+ });
+
+ document.addEventListener("click", (e) => {
+ if (!menu.contains(e.target)) closeAll();
+ });
+
+ about?.addEventListener("click", () => {
+ alert("FilamentHub – lokale Instance. Weitere Infos folgen.");
+ closeAll();
+ });
+
+ const closeSettings = () => {
+ if (!settingsModal) return;
+ settingsModal.classList.remove("show");
+ settingsModal.setAttribute("aria-hidden", "true");
+ };
+
+ const openSettings = async () => {
+ dropdown.classList.remove("open");
+ if (!settingsModal) return;
+ try {
+ const settings = await fetchSettings();
+ if (settingsElectricityPrice) {
+ const price = parseFloat(settings?.["cost.electricity_price_kwh"] ?? settings?.electricity_price_kwh);
+ settingsElectricityPrice.value = Number.isFinite(price) ? price : "";
+ }
+ } catch (e) {
+ console.warn("Settings modal load failed", e);
+ }
+ settingsModal.classList.add("show");
+ settingsModal.setAttribute("aria-hidden", "false");
+ };
+
+ settingsBtn?.addEventListener("click", (e) => {
+ e.preventDefault();
+ openSettings();
+ });
+ settingsClosers.forEach(btn => btn.addEventListener("click", closeSettings));
+
+ // close when clicking backdrop
+ settingsModal?.addEventListener("click", (e) => {
+ if (e.target === settingsModal) closeSettings();
+ });
+
+ settingsSaveBtn?.addEventListener("click", async () => {
+ if (!settingsElectricityPrice) return;
+ const val = settingsElectricityPrice.value;
+ const price = val === "" ? null : Number(val);
+ await updateSetting({ "cost.electricity_price_kwh": price });
+ closeSettings();
+ });
+
+ themeToggle?.addEventListener("click", () => {
+ const isLight = document.body.classList.toggle("theme-light");
+ localStorage.setItem("fh_theme", isLight ? "light" : "dark");
+ closeAll();
+ });
+
+ // Bind settings controls
+ fetchSettings().then(settings => {
+ if (settings?.ams_mode) {
+ amsRadios.forEach(r => {
+ r.checked = r.value === settings.ams_mode;
+ });
+ }
+ if (debugCheckbox) {
+ debugCheckbox.checked = !!settings?.debug_ws_logging;
+ }
+ });
+
+ amsRadios.forEach(radio => {
+ radio.addEventListener("change", async () => {
+ if (!radio.checked) return;
+ await updateSetting({ ams_mode: radio.value });
+ });
+ });
+
+ debugCheckbox?.addEventListener("change", async () => {
+ await updateSetting({ debug_ws_logging: debugCheckbox.checked });
+ });
+}
diff --git a/frontend/static/js/printers.js b/frontend/static/js/printers.js
new file mode 100644
index 0000000..b6522f4
--- /dev/null
+++ b/frontend/static/js/printers.js
@@ -0,0 +1,275 @@
+const cardsContainer = document.getElementById("cardsContainer");
+const refreshBtn = document.getElementById("refreshBtn");
+
+async function loadPrinters() {
+ if (!cardsContainer) return;
+ cardsContainer.innerHTML = 'Lade Drucker ...
';
+ try {
+ const res = await fetch("/api/printers/");
+ if (!res.ok) throw new Error("Request failed");
+ const printers = await res.json();
+ const liveRes = await fetch("/api/live-state/");
+ const liveData = await liveRes.ok ? await liveRes.json() : {};
+ // Mappe Live-State nach cloud_serial, nutze payload.print falls vorhanden, sonst payload
+ const liveMap = Object.fromEntries(
+ Object.entries(liveData).map(([k, v]) => {
+ let live = v.payload && v.payload.print ? { ...v.payload.print } : { ...v.payload };
+ // AMS Tray-Daten explizit kopieren
+ if (v.payload?.ams?.ams && Array.isArray(v.payload.ams.ams) && v.payload.ams.ams[0]) {
+ live.tray = v.payload.ams.ams[0].tray;
+ live.tray_now = v.payload.ams.ams[0].tray_now;
+ }
+ return [k, live];
+ })
+ );
+ if (!Array.isArray(printers) || printers.length === 0) {
+ cardsContainer.innerHTML = 'Keine Drucker konfiguriert.
';
+ return;
+ }
+ cardsContainer.innerHTML = printers.map(printer => renderCard({
+ ...printer,
+ live: liveMap[printer.cloud_serial] || null
+ })).join("");
+ } catch (err) {
+ console.error(err);
+ cardsContainer.innerHTML = 'Fehler beim Laden der Drucker.
';
+ }
+}
+
+function renderCard(printer) {
+ const online = printer.online === true;
+ const onlineLabel = online ? "Online" : printer.online === null ? "Manuell" : "Offline";
+ const icon = printer.image_url
+ ? ` `
+ : renderPrinterIcon(printer.printer_type);
+ // Live-Daten bevorzugen, Fallback auf statische Daten
+ const nozzle = printer.live?.nozzle_temper ?? printer.live?.nozzle_temp ?? printer.nozzle_temp ?? printer.nozzle_temper ?? printer.live?.nozzle ?? printer.live?.extruder_temp ?? printer.temperature?.nozzle ?? "-";
+ const bed = printer.live?.bed_temper ?? printer.live?.bed_temp ?? printer.bed_temp ?? printer.bed_temper ?? printer.live?.bed ?? printer.temperature?.bed ?? "-";
+
+ // AMS-Daten extrahieren (Temperatur & Luftfeuchtigkeit)
+ let amsTemp = null;
+ let amsHumidity = null;
+ if (printer.live?.ams?.ams && Array.isArray(printer.live.ams.ams) && printer.live.ams.ams[0]) {
+ amsTemp = printer.live.ams.ams[0].temp;
+ amsHumidity = printer.live.ams.ams[0].humidity;
+ }
+
+ let filament = "-";
+ // Robust: tray/tray_now ggf. aus AMS-Daten extrahieren
+ let tray = printer.live?.tray;
+ let tray_now = printer.live?.tray_now;
+ if ((!tray || tray.length === 0) && printer.live?.ams?.ams && Array.isArray(printer.live.ams.ams) && printer.live.ams.ams[0]) {
+ tray = printer.live.ams.ams[0].tray;
+ tray_now = printer.live.ams.ams[0].tray_now;
+ }
+ console.log("DEBUG AMS tray_now:", tray_now, "tray:", tray, printer);
+ if (tray && Array.isArray(tray) && tray.length > 0) {
+ let tray_now_num = Number(tray_now);
+ if (!isNaN(tray_now_num) && tray[tray_now_num]) {
+ const t = tray[tray_now_num];
+ filament = t.tray_sub_brands || t.tray_type || "-";
+ } else {
+ const t = tray[0];
+ filament = t.tray_sub_brands || t.tray_type || "-";
+ }
+ } else {
+ console.log("DEBUG Filament Fallback:", printer.live);
+ filament = printer.live?.tray_type || printer.live?.filament_material || printer.filament_material || printer.printer_type?.toUpperCase() || "-";
+ if (filament && filament !== "-") filament += " | Datenbank";
+ }
+ const progress = printer.live?.percent ?? printer.progress_percent ?? 0;
+ const progressColor = pickProgressColor(progress);
+
+ // WiFi Signal: try several common locations in live payloads
+ const wifiRaw = printer.live?.wifi_signal || printer.wifi_signal || printer.live?.device?.wifi_signal || printer.live?.net?.wifi_signal || null;
+
+ function rssiToPercent(raw) {
+ if (raw == null) return null;
+ try {
+ let s = String(raw).trim();
+ if (s.toLowerCase().endsWith('dbm')) s = s.slice(0, -3);
+ const m = s.match(/-?\d+/);
+ if (!m) return null;
+ let rssi = parseInt(m[0], 10);
+ if (isNaN(rssi)) return null;
+ // Map -100..-30 dBm -> 0..100%
+ rssi = Math.max(-100, Math.min(-30, rssi));
+ const pct = Math.round((rssi + 100) / 70 * 100);
+ return `${pct}%`;
+ } catch (e) {
+ return null;
+ }
+ }
+
+ const wifiPct = rssiToPercent(wifiRaw);
+ const wifiPctNum = wifiPct ? parseInt(wifiPct, 10) : null;
+ let wifiColor = '#999';
+ if (wifiPctNum !== null && !isNaN(wifiPctNum)) {
+ if (wifiPctNum >= 80) wifiColor = '#2ecc71';
+ else if (wifiPctNum >= 50) wifiColor = '#f39c12';
+ else wifiColor = '#e74c3c';
+ }
+
+ return `
+
+
+
${printer.name || "Unbenannt"}
+
+
+ ${onlineLabel}
+
+
+ Auto Connect
+
+
+
+ ⋮
+
+
+
+
+
+
+
+
+
${Math.round(progress)}%
+
+
+ `;
+}
+
+function pickProgressColor(val) {
+ const v = Math.min(Math.max(val ?? 0, 0), 100);
+ if (v <= 20) return "linear-gradient(90deg, #e74c3c, #c0392b)";
+ if (v <= 80) return "linear-gradient(90deg, #f39c12, #e67e22)";
+ return "linear-gradient(90deg, #2ecc71, #27ae60)";
+}
+
+function renderPrinterIcon(type) {
+ if (type === "bambu" || type === "bambu_lab" || !type) {
+ return ` `;
+ }
+ const accent = type === "klipper" ? "#3498db" : type === "manual" ? "#95a5a6" : "#f39c12";
+ return `
+
+
+
+
+
+
+
+ `;
+}
+
+document.addEventListener("DOMContentLoaded", () => {
+ loadPrinters();
+ if (refreshBtn) {
+ refreshBtn.addEventListener("click", loadPrinters);
+ }
+});
+
+function toggleMenu(evt, id) {
+ evt.stopPropagation();
+ document.querySelectorAll(".kebab-menu").forEach(m => m.classList.remove("open"));
+ const menu = document.getElementById(`menu-${id}`);
+ if (menu) menu.classList.toggle("open");
+ document.addEventListener("click", () => {
+ document.querySelectorAll(".kebab-menu").forEach(m => m.classList.remove("open"));
+ }, { once: true });
+}
+
+function closeEditModal() {
+ const modal = document.getElementById("printerEditModal");
+ if (modal) modal.classList.remove("show");
+}
+
+async function openEditModal(id) {
+ try {
+ const res = await fetch(`/api/printers/${id}`);
+ if (!res.ok) throw new Error("Laden fehlgeschlagen");
+ const p = await res.json();
+ document.getElementById("editId").value = p.id;
+ document.getElementById("editName").value = p.name || "";
+ document.getElementById("editType").value = p.printer_type || "";
+ document.getElementById("editIp").value = p.ip_address || "";
+ document.getElementById("editPort").value = p.port || "";
+ document.getElementById("editMqttVersion").value = p.mqtt_version || "311";
+ document.getElementById("editPower").value = p.power_consumption_kw ?? "";
+ document.getElementById("editMaintenance").value = p.maintenance_cost_yearly ?? "";
+ document.getElementById("editSerial").value = p.cloud_serial || "";
+ document.getElementById("editApiKey").value = p.api_key || "";
+ document.getElementById("editAutoConnect").checked = !!p.auto_connect;
+ document.getElementById("printerEditModal").classList.add("show");
+ } catch (e) {
+ alert("Fehler beim Laden des Druckers");
+ }
+}
+
+async function savePrinterEdit(ev) {
+ ev.preventDefault();
+ const id = document.getElementById("editId").value;
+ const mqttVersion = document.getElementById("editMqttVersion").value;
+ const payload = {
+ name: document.getElementById("editName").value,
+ printer_type: document.getElementById("editType").value,
+ ip_address: document.getElementById("editIp").value,
+ port: document.getElementById("editPort").value ? Number(document.getElementById("editPort").value) : null,
+ mqtt_version: mqttVersion || null, // Leerer String → null für Auto-Erkennung
+ power_consumption_kw: document.getElementById("editPower").value ? Number(document.getElementById("editPower").value) : null,
+ maintenance_cost_yearly: document.getElementById("editMaintenance").value ? Number(document.getElementById("editMaintenance").value) : null,
+ cloud_serial: document.getElementById("editSerial").value,
+ api_key: document.getElementById("editApiKey").value,
+ auto_connect: document.getElementById("editAutoConnect").checked
+ };
+ try {
+ const res = await fetch(`/api/printers/${id}`, {
+ method: "PUT",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify(payload)
+ });
+ if (!res.ok) throw new Error("Speichern fehlgeschlagen");
+ closeEditModal();
+ loadPrinters();
+ } catch (e) {
+ alert("Fehler beim Speichern");
+ }
+}
+
+async function testConnection(id) {
+ try {
+ const res = await fetch(`/api/printers/${id}/test`, { method: "POST" });
+ const data = await res.json();
+ alert(data.message || "Test abgeschlossen");
+ } catch (e) {
+ alert("Fehler beim Verbindungstest");
+ }
+}
+
+async function deletePrinter(id) {
+ if (!confirm("Drucker wirklich löschen?")) return;
+ try {
+ const res = await fetch(`/api/printers/${id}`, { method: "DELETE" });
+ if (!res.ok) throw new Error();
+ loadPrinters();
+ } catch (e) {
+ alert("Fehler beim Löschen");
+ }
+}
diff --git a/frontend/static/js/settings.js b/frontend/static/js/settings.js
new file mode 100644
index 0000000..53dad7b
--- /dev/null
+++ b/frontend/static/js/settings.js
@@ -0,0 +1,64 @@
+document.addEventListener("DOMContentLoaded", () => {
+ bindSettingsControls();
+});
+
+function resolveFetchSettings() {
+ if (typeof fetchSettings === "function") return fetchSettings;
+ // Fallback, falls navbar.js nicht geladen wäre (sollte nicht passieren)
+ return async function () {
+ const res = await fetch("/api/settings");
+ return res.json();
+ };
+}
+
+function resolveUpdateSetting() {
+ if (typeof updateSetting === "function") return updateSetting;
+ return async function (partial) {
+ const res = await fetch("/api/settings", {
+ method: "PUT",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify(partial),
+ });
+ return res.json();
+ };
+}
+
+async function bindSettingsControls() {
+ const amsRadios = document.querySelectorAll('input[name="settings_ams_mode"][data-setting="ams_mode"]');
+ const debugCheckbox = document.querySelector('input[type="checkbox"][data-setting="debug_ws_logging"]');
+ const debugStatus = document.getElementById("debugStatus");
+ if (!amsRadios.length && !debugCheckbox) return;
+
+ const fetchFn = resolveFetchSettings();
+ const updateFn = resolveUpdateSetting();
+
+ try {
+ const settings = await fetchFn();
+ if (settings?.ams_mode) {
+ amsRadios.forEach(r => r.checked = r.value === settings.ams_mode);
+ }
+ if (debugCheckbox) {
+ debugCheckbox.checked = !!settings?.debug_ws_logging;
+ if (debugStatus) {
+ debugStatus.textContent = `Status: ${debugCheckbox.checked ? "aktiv" : "inaktiv"}`;
+ }
+ }
+ } catch (e) {
+ console.warn("Settings laden fehlgeschlagen", e);
+ if (debugStatus) debugStatus.textContent = "Status: Fehler beim Laden";
+ }
+
+ amsRadios.forEach(radio => {
+ radio.addEventListener("change", async () => {
+ if (!radio.checked) return;
+ await updateFn({ ams_mode: radio.value });
+ });
+ });
+
+ debugCheckbox?.addEventListener("change", async () => {
+ await updateFn({ debug_ws_logging: debugCheckbox.checked });
+ if (debugStatus) {
+ debugStatus.textContent = `Status: ${debugCheckbox.checked ? "aktiv" : "inaktiv"}`;
+ }
+ });
+}
diff --git a/frontend/static/js/spools.js b/frontend/static/js/spools.js
new file mode 100644
index 0000000..c585100
--- /dev/null
+++ b/frontend/static/js/spools.js
@@ -0,0 +1,3 @@
+document.addEventListener("DOMContentLoaded", () => {
+ console.info("Spools JS geladen - Platzhalter für Spulen-spezifische Logik.");
+});
diff --git a/frontend/static/js/statistics.js b/frontend/static/js/statistics.js
new file mode 100644
index 0000000..0ab6520
--- /dev/null
+++ b/frontend/static/js/statistics.js
@@ -0,0 +1,490 @@
+let chartData = { byMaterial: [], byPrinter: [] };
+let currentDays = 7;
+let chartInstances = {};
+
+document.addEventListener("DOMContentLoaded", () => {
+ loadStatistics();
+ loadCharts(currentDays);
+ setupTimeFilter();
+});
+
+function setupTimeFilter() {
+ document.querySelectorAll(".filter-btn").forEach(btn => {
+ btn.addEventListener("click", () => {
+ document.querySelectorAll(".filter-btn").forEach(b => b.classList.remove("active"));
+ btn.classList.add("active");
+ currentDays = parseInt(btn.dataset.days);
+ loadCharts(currentDays);
+ });
+ });
+}
+
+async function loadStatistics() {
+ setStatus("Lade Daten ...");
+ try {
+ const [jobs, settings, byMaterial, byPrinter, heatmap] = await Promise.all([
+ fetchJson("/api/jobs/stats/summary"),
+ fetchJson("/api/settings"),
+ fetchJson("/api/statistics/by-material"),
+ fetchJson("/api/statistics/by-printer"),
+ fetchJson("/api/statistics/heatmap?days=90")
+ ]);
+ chartData = { byMaterial, byPrinter };
+ updateKpis(jobs, settings, byMaterial, byPrinter);
+ updateCostBreakdown(jobs, settings);
+ updatePerformance(jobs, byPrinter);
+ renderHeatmap(heatmap?.data ?? []);
+ renderTopMaterials(byMaterial);
+ renderTopPrinters(byPrinter);
+ setStatus("Aktualisiert");
+ await loadCharts(currentDays);
+ } catch (e) {
+ console.error("Statistiken laden fehlgeschlagen", e);
+ setStatus("Fehler beim Laden der Statistiken");
+ }
+}
+
+async function loadCharts(days = 7) {
+ try {
+ const [timelineMaterial, timelineCosts] = await Promise.all([
+ fetchJson(`/api/statistics/timeline-by-material?days=${days}`),
+ fetchJson(`/api/statistics/timeline-costs?days=${days}`)
+ ]);
+ renderTimelineMaterial(timelineMaterial);
+ renderCostsTimeline(timelineCosts);
+ renderMaterial(chartData.byMaterial ?? []);
+ renderPrinter(chartData.byPrinter ?? []);
+ } catch (e) {
+ console.warn("Charts laden fehlgeschlagen", e);
+ }
+}
+
+async function fetchJson(url) {
+ const res = await fetch(url);
+ if (!res.ok) throw new Error("Request failed: " + url);
+ return res.json();
+}
+
+function updateKpis(jobs, settings, byMaterial, byPrinter) {
+ const totalJobs = jobs?.total_jobs ?? 0;
+ const completed = jobs?.completed_jobs ?? 0;
+ const filamentG = jobs?.total_filament_g ?? 0;
+ const filamentKg = filamentG / 1000;
+ const price = jobs?.energy_price_kwh ?? settings?.["cost.electricity_price_kwh"] ?? settings?.electricity_price_kwh ?? 0.30;
+ const energyKwh = jobs?.energy_kwh_total ?? 0;
+ const energyCost = jobs?.energy_cost_total;
+ const energyExact = jobs?.energy_kwh_exact ?? 0;
+ const energyEst = jobs?.energy_kwh_estimated ?? 0;
+ const energyCostExact = energyExact * price;
+ const energyCostEst = energyEst * price;
+ const durationH = jobs?.total_duration_h ?? 0;
+ const successRate = totalJobs > 0 ? Math.round((completed / totalJobs) * 100) : 0;
+
+ // Längster Job aus byPrinter Daten
+ let longestJobH = 0;
+ if (byPrinter && Array.isArray(byPrinter)) {
+ byPrinter.forEach(p => {
+ if (p.duration_h > longestJobH) longestJobH = p.duration_h;
+ });
+ }
+
+ // Häufigstes Material aus byMaterial Daten
+ let topMaterial = "–";
+ let topMaterialWeight = 0;
+ if (byMaterial && Array.isArray(byMaterial)) {
+ byMaterial.forEach(m => {
+ const weight = m.total_weight_g ?? 0;
+ if (weight > topMaterialWeight) {
+ topMaterialWeight = weight;
+ topMaterial = m.material_name ?? "Unbekannt";
+ }
+ });
+ }
+
+ // Card 1: Druckzeit
+ setText("kpiDurationH", `${number(durationH, 0)}h`);
+ setText("kpiLongestJob", longestJobH > 0 ? `Längster Job: ${number(longestJobH, 1)}h` : "Längster Job: –");
+
+ // Card 2: Verbrauch
+ setText("kpiFilamentKg", `${number(filamentKg, 2)}kg`);
+ setText("kpiTopMaterial", topMaterialWeight > 0 ? `Häufigstes: ${topMaterial} (${number(topMaterialWeight/1000, 2)}kg)` : "Häufigstes Material: –");
+
+ // Card 3: Kosten
+ setText("kpiEnergyCost", energyCost != null ? `${number(energyCost, 2)} €` : "–");
+ setText("kpiEnergyBreakdown", `Exakt: ${number(energyCostExact, 2)} € · Geschätzt: ~${number(energyCostEst, 2)} €`);
+
+ // Card 4: Jobs
+ setText("kpiTotalJobs", number(totalJobs));
+ setText("kpiSuccessRate", `Erfolgsquote: ${successRate}%`);
+
+ setStatus(`${number(durationH,0)}h Laufzeit · ${number(filamentKg,2)}kg · ${energyCost != null ? number(energyCost,0)+" €" : "–"}`);
+}
+
+function updateInventory(db, jobs) {
+ // Removed - replaced by updateCostBreakdown and updatePerformance
+}
+
+function updateCostBreakdown(jobs, settings) {
+ const energyCost = jobs?.energy_cost_total;
+ const energyExact = jobs?.energy_kwh_exact ?? 0;
+ const energyEst = jobs?.energy_kwh_estimated ?? 0;
+ const price = jobs?.energy_price_kwh ?? settings?.["cost.electricity_price_kwh"] ?? 0.30;
+ const costExact = energyExact * price;
+ const costEst = energyEst * price;
+ const totalJobs = jobs?.total_jobs ?? 0;
+ const costPerJob = totalJobs > 0 && energyCost ? energyCost / totalJobs : 0;
+
+ setText("costEnergy", energyCost != null ? `${number(energyCost, 2)} €` : "–");
+ setText("costExact", `${number(costExact, 2)} €`);
+ setText("costEstimated", `${number(costEst, 2)} €`);
+ setText("costPerJob", costPerJob > 0 ? `${number(costPerJob, 3)} €` : "–");
+ setText("electricityPrice", number(price, 2));
+}
+
+function updatePerformance(jobs, byPrinter) {
+ const totalJobs = jobs?.total_jobs ?? 0;
+ const completed = jobs?.completed_jobs ?? 0;
+ const durationH = jobs?.total_duration_h ?? 0;
+ const filamentG = jobs?.total_filament_g ?? 0;
+ const avgDuration = totalJobs > 0 ? durationH / totalJobs : 0;
+ const avgFilament = totalJobs > 0 ? filamentG / totalJobs : 0;
+ const successRate = totalJobs > 0 ? Math.round((completed / totalJobs) * 100) : 0;
+
+ let longestJobH = 0;
+ if (byPrinter && Array.isArray(byPrinter)) {
+ byPrinter.forEach(p => {
+ if (p.duration_h > longestJobH) longestJobH = p.duration_h;
+ });
+ }
+
+ setText("perfAvgDuration", `${number(avgDuration, 1)}h`);
+ setText("perfAvgFilament", `${number(avgFilament, 1)}g`);
+ setText("perfLongestJob", longestJobH > 0 ? `${number(longestJobH, 1)}h` : "–");
+ setBar("barSuccess", successRate);
+}
+
+function renderHeatmap(data) {
+ const container = document.getElementById("heatmapContainer");
+ if (!container) return;
+
+ container.innerHTML = "";
+
+ // Find max jobs for scaling
+ const maxJobs = Math.max(...data.map(d => d.jobs || 0), 1);
+
+ data.forEach(day => {
+ const cell = document.createElement("div");
+ cell.className = "heatmap-cell";
+
+ const jobs = day.jobs || 0;
+ const level = jobs === 0 ? 0 : Math.min(4, Math.ceil((jobs / maxJobs) * 4));
+ cell.classList.add(`level-${level}`);
+
+ cell.dataset.date = day.date;
+ cell.dataset.jobs = jobs;
+ cell.dataset.filament = day.filament_g || 0;
+ cell.dataset.duration = day.duration_h || 0;
+
+ cell.addEventListener("mouseenter", showHeatmapTooltip);
+ cell.addEventListener("mouseleave", hideHeatmapTooltip);
+
+ container.appendChild(cell);
+ });
+}
+
+function showHeatmapTooltip(e) {
+ const tooltip = document.getElementById("heatmapTooltip");
+ if (!tooltip) return;
+
+ const date = e.target.dataset.date;
+ const jobs = e.target.dataset.jobs;
+ const filament = e.target.dataset.filament;
+ const duration = e.target.dataset.duration;
+
+ tooltip.innerHTML = `
+ ${date}
+ ${jobs} Jobs · ${number(parseFloat(filament), 1)}g · ${number(parseFloat(duration), 1)}h
+ `;
+
+ const rect = e.target.getBoundingClientRect();
+ tooltip.style.display = "block";
+ tooltip.style.left = `${rect.left + rect.width / 2}px`;
+ tooltip.style.top = `${rect.top - 60}px`;
+}
+
+function hideHeatmapTooltip() {
+ const tooltip = document.getElementById("heatmapTooltip");
+ if (tooltip) tooltip.style.display = "none";
+}
+
+function renderTopMaterials(data) {
+ const container = document.getElementById("topMaterialsList");
+ if (!container) return;
+
+ // Sort by total_weight_g descending and take top 5
+ const top5 = [...data]
+ .sort((a, b) => (b.total_weight_g || 0) - (a.total_weight_g || 0))
+ .slice(0, 5);
+
+ const maxWeight = Math.max(...top5.map(m => m.total_weight_g || 0), 1);
+
+ container.innerHTML = top5.map(material => {
+ const weight = material.total_weight_g || 0;
+ const weightKg = weight / 1000;
+ const percent = Math.round((weight / maxWeight) * 100);
+ const color = palette(top5.indexOf(material));
+
+ return `
+
+
+ ${material.material_name || "Unbekannt"}
+ ${number(weightKg, 2)}kg
+
+
+
+ `;
+ }).join("");
+}
+
+function renderTopPrinters(data) {
+ const container = document.getElementById("topPrintersList");
+ if (!container) return;
+
+ // Sort by duration_h descending and take top 5
+ const top5 = [...data]
+ .sort((a, b) => (b.duration_h || 0) - (a.duration_h || 0))
+ .slice(0, 5);
+
+ const maxDuration = Math.max(...top5.map(p => p.duration_h || 0), 1);
+
+ // Moderne vertikale Card-Layout (Grid)
+ container.innerHTML = `
+
+ ${top5.map((printer, idx) => {
+ const duration = printer.duration_h || 0;
+ const percent = Math.round((duration / maxDuration) * 100);
+ const color = palette(idx + 3);
+ const printerName = printer.printer_name || "Unbekannt";
+
+ return `
+
+
🖨️
+
${printerName}
+
+
+ ${number(duration, 1)}h
+ ${printer.jobs} Job${printer.jobs !== 1 ? 's' : ''}
+
+
+ `;
+ }).join("")}
+
+ `;
+}
+
+function renderTimelineMaterial(data) {
+ if (!window.Chart) return;
+ const ctx = document.getElementById("chartTimelineMaterial");
+ if (!ctx) return;
+
+ // Destroy old chart if exists
+ if (chartInstances.timelineMaterial) {
+ chartInstances.timelineMaterial.destroy();
+ }
+
+ const labels = data?.dates ?? [];
+ const datasets = (data?.datasets ?? []).map((ds, i) => ({
+ label: ds.material,
+ data: ds.data,
+ borderColor: palette(i),
+ backgroundColor: `${palette(i)}33`,
+ tension: 0.35,
+ fill: true,
+ pointRadius: 2,
+ }));
+
+ chartInstances.timelineMaterial = new Chart(ctx, {
+ type: "line",
+ data: { labels, datasets },
+ options: {
+ responsive: true,
+ maintainAspectRatio: false,
+ scales: {
+ y: { beginAtZero: true, stacked: false, ticks: { color: "#cfd8e3" } },
+ x: { ticks: { color: "#cfd8e3", maxRotation: 45 } }
+ },
+ plugins: {
+ legend: { labels: { color: "#cfd8e3", boxWidth: 12 } },
+ tooltip: { mode: "index", intersect: false },
+ },
+ },
+ });
+}
+
+function renderCostsTimeline(data) {
+ if (!window.Chart) return;
+ const ctx = document.getElementById("chartCosts");
+ if (!ctx) return;
+
+ // Destroy old chart if exists
+ if (chartInstances.costs) {
+ chartInstances.costs.destroy();
+ }
+
+ const labels = data?.dates ?? [];
+ const dailyCost = data?.daily_cost ?? [];
+ const cumulativeCost = data?.cumulative_cost ?? [];
+
+ chartInstances.costs = new Chart(ctx, {
+ type: "bar",
+ data: {
+ labels,
+ datasets: [
+ {
+ label: "Tägliche Kosten",
+ data: dailyCost,
+ backgroundColor: "#00c6ff",
+ borderColor: "#00c6ff",
+ borderWidth: 0,
+ borderRadius: 6,
+ barPercentage: 0.75,
+ categoryPercentage: 0.6,
+ maxBarThickness: 44,
+ yAxisID: "y",
+ },
+ {
+ label: "Kumuliert",
+ data: cumulativeCost,
+ type: "line",
+ borderColor: "#f39c12",
+ backgroundColor: "rgba(243,156,18,0.25)",
+ tension: 0.3,
+ fill: false,
+ yAxisID: "y1",
+ pointRadius: 4,
+ pointBackgroundColor: "#f39c12",
+ spanGaps: true,
+ },
+ ],
+ },
+ options: {
+ responsive: true,
+ maintainAspectRatio: false,
+ scales: {
+ y: { beginAtZero: true, position: "left", ticks: { color: "#cfd8e3" } },
+ y1: { beginAtZero: true, position: "right", grid: { drawOnChartArea: false }, ticks: { color: "#cfd8e3" } },
+ x: { ticks: { color: "#cfd8e3", maxRotation: 45 } }
+ },
+ plugins: {
+ legend: {
+ display: true,
+ position: "bottom",
+ align: "center",
+ labels: { color: "#cfd8e3", boxWidth: 12, padding: 8 }
+ },
+ tooltip: { mode: "index", intersect: false },
+ },
+ },
+ });
+}
+
+function renderMaterial(data) {
+ if (!window.Chart) return;
+ const ctx = document.getElementById("chartMaterial");
+ if (!ctx) return;
+
+ // Destroy old chart if exists
+ if (chartInstances.material) {
+ chartInstances.material.destroy();
+ }
+
+ const labels = data.map(d => d.material_name || d.name || "Unbekannt");
+ const values = data.map(d => (d.total_weight_g || 0) / 1000); // Convert to kg
+ const colors = data.map((d, i) => d.color || palette(i));
+
+ chartInstances.material = new Chart(ctx, {
+ type: "doughnut",
+ data: {
+ labels,
+ datasets: [{ data: values, backgroundColor: colors, borderWidth: 0 }]
+ },
+ options: {
+ cutout: "60%",
+ plugins: {
+ legend: { labels: { color: "#cfd8e3" } },
+ tooltip: {
+ callbacks: {
+ label: (ctx) => `${ctx.label}: ${ctx.parsed.toFixed(2)} kg`
+ }
+ }
+ }
+ }
+ });
+}
+
+function renderPrinter(data) {
+ if (!window.Chart) return;
+ const ctx = document.getElementById("chartPrinter");
+ if (!ctx) return;
+
+ // Destroy old chart if exists
+ if (chartInstances.printer) {
+ chartInstances.printer.destroy();
+ }
+
+ const sorted = [...data].sort((a,b) => (b.duration_h ?? 0) - (a.duration_h ?? 0)).slice(0, 6);
+ const labels = sorted.map(d => d.printer_name || "Unbekannt");
+ const values = sorted.map(d => d.duration_h ?? 0);
+ const colors = labels.map((_, i) => palette(i + 3));
+
+ chartInstances.printer = new Chart(ctx, {
+ type: "bar",
+ data: {
+ labels,
+ datasets: [{
+ label: "Dauer (h)",
+ data: values,
+ backgroundColor: colors,
+ borderRadius: 8,
+ }]
+ },
+ options: {
+ indexAxis: "y",
+ scales: {
+ x: { beginAtZero: true, ticks: { color: "#cfd8e3" } },
+ y: { ticks: { color: "#cfd8e3" } }
+ },
+ plugins: { legend: { display: false } }
+ }
+ });
+}
+
+function setText(id, value) {
+ const el = document.getElementById(id);
+ if (el) el.textContent = value;
+}
+
+function setBar(id, pct) {
+ const el = document.getElementById(id);
+ if (el) el.style.width = `${pct}%`;
+}
+
+function setStatus(msg) {
+ setText("statsInfo", msg);
+}
+
+function number(val, digits = 0) {
+ const n = Number(val ?? 0);
+ return n.toLocaleString("de-DE", { minimumFractionDigits: digits, maximumFractionDigits: digits });
+}
+
+function palette(i) {
+ const colors = ["#00c6ff", "#f39c12", "#7d5fff", "#2ecc71", "#ff6b6b", "#1abc9c", "#e84393", "#fdcb6e"];
+ return colors[i % colors.length];
+}
diff --git a/frontend/static/materials.css b/frontend/static/materials.css
new file mode 100644
index 0000000..21b85b5
--- /dev/null
+++ b/frontend/static/materials.css
@@ -0,0 +1,421 @@
+/* Materials Page Additional Styles */
+
+/* MODAL */
+.modal {
+ display: none;
+ position: fixed;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 100%;
+ background: rgba(0, 0, 0, 0.85);
+ z-index: 1000;
+ align-items: center;
+ justify-content: center;
+ backdrop-filter: blur(4px);
+}
+
+.modal.active {
+ display: flex;
+}
+
+.modal-content {
+ background: var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 16px;
+ max-width: 600px;
+ width: 90%;
+ max-height: 90vh;
+ overflow-y: auto;
+ animation: modalSlideIn 0.3s ease;
+ box-shadow: 0 20px 60px rgba(0, 0, 0, 0.5);
+}
+
+@keyframes modalSlideIn {
+ from {
+ opacity: 0;
+ transform: translateY(-50px);
+ }
+ to {
+ opacity: 1;
+ transform: translateY(0);
+ }
+}
+
+.modal-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 24px;
+ border-bottom: 1px solid var(--border);
+}
+
+.modal-header h2 {
+ margin: 0;
+ color: var(--accent);
+ font-size: 1.4rem;
+}
+
+.modal-close {
+ background: none;
+ border: none;
+ color: var(--text-dim);
+ font-size: 2rem;
+ cursor: pointer;
+ padding: 0;
+ width: 40px;
+ height: 40px;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ border-radius: 8px;
+ transition: all 0.3s ease;
+}
+
+.modal-close:hover {
+ background: var(--panel-2);
+ color: var(--error);
+}
+
+/* FORM */
+form {
+ padding: 24px;
+}
+
+.form-group {
+ margin-bottom: 20px;
+}
+
+.form-group label {
+ display: block;
+ margin-bottom: 8px;
+ font-weight: 600;
+ color: var(--text);
+ font-size: 0.95rem;
+}
+
+.form-group input,
+.form-group textarea,
+.form-group select {
+ width: 100%;
+ padding: 12px 14px;
+ background: var(--panel-2);
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ color: var(--text);
+ font-family: inherit;
+ font-size: 0.95rem;
+ transition: all 0.3s ease;
+}
+
+.form-group select {
+ appearance: none;
+ background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 12 12'%3E%3Cpath fill='%23f39c12' d='M6 9L1 4h10z'/%3E%3C/svg%3E");
+ background-repeat: no-repeat;
+ background-position: right 14px center;
+ padding-right: 40px;
+}
+
+.form-group select option {
+ background: var(--panel);
+ color: var(--text);
+ padding: 8px;
+ padding-top: 10px;
+ padding-bottom: 10px;
+}
+
+.form-group select option:hover {
+ background: var(--accent);
+ color: #0c0f14;
+}
+
+.form-group select option:checked {
+ background: linear-gradient(var(--accent), var(--accent));
+ color: #0c0f14;
+}
+
+.form-group input:focus,
+.form-group textarea:focus,
+.form-group select:focus {
+ outline: none;
+ border-color: var(--accent);
+ background-color: rgba(243, 156, 18, 0.05);
+ box-shadow: 0 0 0 3px rgba(243, 156, 18, 0.1);
+}
+
+.form-group textarea {
+ resize: vertical;
+ min-height: 100px;
+}
+
+.form-row {
+ display: grid;
+ grid-template-columns: 1fr 1fr;
+ gap: 16px;
+}
+
+.modal-actions {
+ display: flex;
+ gap: 12px;
+ justify-content: flex-end;
+ padding: 24px;
+ border-top: 1px solid var(--border);
+ margin-top: 0;
+ padding-top: 16px;
+}
+
+.modal-header h2 {
+ margin: 0;
+ color: var(--accent);
+}
+
+.modal-close {
+ background: none;
+ border: none;
+ color: var(--text-dim);
+ font-size: 2rem;
+ cursor: pointer;
+ padding: 0;
+ width: 40px;
+ height: 40px;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ border-radius: 6px;
+ transition: all 0.3s ease;
+}
+
+.modal-close:hover {
+ background: var(--bg-dark);
+ color: var(--error);
+}
+
+.modal-actions {
+ display: flex;
+ gap: 10px;
+ justify-content: flex-end;
+ padding: 20px;
+ border-top: 1px solid var(--border);
+}
+
+/* FORM */
+form {
+ padding: 20px;
+}
+
+.form-group {
+ margin-bottom: 20px;
+}
+
+.form-row {
+ display: grid;
+ grid-template-columns: 1fr 1fr;
+ gap: 15px;
+}
+
+label {
+ display: block;
+ margin-bottom: 8px;
+ color: var(--text);
+ font-weight: 600;
+ font-size: 0.9rem;
+}
+
+input[type="text"],
+input[type="number"],
+input[type="email"],
+select,
+textarea {
+ width: 100%;
+ padding: 10px 12px;
+ background: var(--bg-dark);
+ border: 1px solid var(--border);
+ border-radius: 6px;
+ color: var(--text);
+ font-size: 1rem;
+ transition: all 0.3s ease;
+}
+
+input:focus,
+select:focus,
+textarea:focus {
+ outline: none;
+ border-color: var(--accent);
+ box-shadow: 0 0 0 3px rgba(79, 195, 247, 0.1);
+}
+
+textarea {
+ resize: vertical;
+ font-family: inherit;
+}
+
+small {
+ display: block;
+ margin-top: 5px;
+ font-size: 0.85rem;
+}
+
+/* TABLE ACTIONS */
+.table-actions {
+ display: flex;
+ gap: 8px;
+}
+
+.btn-icon {
+ padding: 8px 12px;
+ background: var(--bg-dark);
+ border: 1px solid var(--border);
+ color: var(--text);
+ border-radius: 6px;
+ cursor: pointer;
+ transition: all 0.3s ease;
+ font-size: 1rem;
+}
+
+.btn-icon:hover {
+ border-color: var(--accent);
+ transform: translateY(-2px);
+}
+
+.btn-delete {
+ color: var(--error);
+}
+
+.btn-delete:hover {
+ border-color: var(--error);
+ background: rgba(239, 83, 80, 0.1);
+}
+
+/* COLOR PREVIEW */
+.color-preview {
+ display: inline-block;
+ width: 30px;
+ height: 30px;
+ border-radius: 6px;
+ border: 2px solid var(--border);
+ vertical-align: middle;
+}
+
+.material-color-cell {
+ display: flex;
+ align-items: center;
+ gap: 0.5rem;
+ font-size: 0.9rem;
+}
+
+.color-code {
+ font-family: "Fira Code", "JetBrains Mono", monospace;
+ color: var(--text-dim);
+}
+
+.color-empty {
+ color: var(--text-dim);
+}
+
+/* TABLE */
+.materials-table {
+ width: 100%;
+ border-collapse: collapse;
+ background: var(--panel);
+ border-radius: 12px;
+ overflow: hidden;
+ box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15);
+}
+
+.materials-table thead {
+ background: linear-gradient(135deg, var(--panel-2) 0%, rgba(243, 156, 18, 0.05) 100%);
+ border-bottom: 2px solid var(--border);
+}
+
+.materials-table th {
+ padding: 18px 16px;
+ text-align: left;
+ font-weight: 700;
+ color: var(--accent);
+ font-size: 0.85rem;
+ text-transform: uppercase;
+ letter-spacing: 1px;
+}
+
+.materials-table td {
+ padding: 16px;
+ border-bottom: 1px solid var(--border);
+ color: var(--text);
+ font-size: 0.95rem;
+}
+
+.materials-table tbody tr {
+ transition: all 0.3s ease;
+}
+
+.materials-table tbody tr:hover {
+ background: var(--panel-2);
+ box-shadow: inset 0 0 12px rgba(243, 156, 18, 0.08);
+}
+
+.materials-table tbody tr:last-child td {
+ border-bottom: none;
+}
+
+/* ACTION BUTTONS */
+.actions-inline {
+ display: flex;
+ gap: 8px;
+ justify-content: center;
+}
+
+.btn-icon {
+ background: var(--panel-2);
+ border: 1px solid var(--border);
+ color: var(--text);
+ width: 36px;
+ height: 36px;
+ padding: 0;
+ border-radius: 8px;
+ cursor: pointer;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ font-size: 1rem;
+ transition: all 0.3s ease;
+ font-weight: 600;
+}
+
+.btn-icon:hover {
+ transform: translateY(-2px);
+ background: var(--accent);
+ border-color: var(--accent);
+ color: #0c0f14;
+ box-shadow: 0 8px 16px rgba(243, 156, 18, 0.3);
+}
+
+.btn-icon.delete:hover {
+ background: var(--error);
+ border-color: var(--error);
+ box-shadow: 0 8px 16px rgba(231, 76, 60, 0.3);
+}
+
+.btn-icon.edit:hover {
+ background: var(--accent);
+ border-color: var(--accent);
+}
+
+/* RESPONSIVE */
+@media (max-width: 768px) {
+ .form-row {
+ grid-template-columns: 1fr;
+ }
+
+ .modal-content {
+ width: 95%;
+ max-height: 95vh;
+ }
+
+ .table-container {
+ overflow-x: auto;
+ }
+
+ .materials-table th,
+ .materials-table td {
+ padding: 10px 12px;
+ font-size: 0.85rem;
diff --git a/frontend/static/materials.js b/frontend/static/materials.js
new file mode 100644
index 0000000..f99f159
--- /dev/null
+++ b/frontend/static/materials.js
@@ -0,0 +1,267 @@
+// Materials Management JavaScript
+
+let materials = [];
+let currentMaterialId = null;
+let deleteTargetId = null;
+
+// === INIT ===
+document.addEventListener('DOMContentLoaded', () => {
+ loadMaterials();
+ setupEventListeners();
+});
+
+function setupEventListeners() {
+ // Search
+ document.getElementById('searchInput').addEventListener('input', filterMaterials);
+
+ // Brand filter
+ document.getElementById('filterBrand').addEventListener('change', filterMaterials);
+}
+
+// === LOAD MATERIALS ===
+async function loadMaterials() {
+ try {
+ const response = await fetch('/api/materials/');
+ materials = await response.json();
+
+ updateBrandFilter();
+ renderMaterials(materials);
+
+ } catch (error) {
+ console.error('Fehler beim Laden der Materialien:', error);
+ showNotification('Fehler beim Laden der Materialien', 'error');
+ }
+}
+
+function updateBrandFilter() {
+ const brands = [...new Set(materials.map(m => m.brand).filter(b => b))];
+ const select = document.getElementById('filterBrand');
+
+ // Keep "Alle Marken" option
+ select.innerHTML = 'Alle Marken ';
+
+ brands.sort().forEach(brand => {
+ const option = document.createElement('option');
+ option.value = brand;
+ option.textContent = brand;
+ select.appendChild(option);
+ });
+}
+
+function renderMaterials(materialsToRender) {
+ const container = document.getElementById('materialsTable');
+ document.getElementById('materialCount').textContent = materialsToRender.length;
+
+ if (materialsToRender.length === 0) {
+ container.innerHTML = `
+
+
📦
+
Keine Materialien gefunden
+
Fügen Sie Ihr erstes Material hinzu!
+
+ ➕ Material hinzufügen
+
+
+ `;
+ return;
+ }
+
+ container.innerHTML = `
+
+
+
+
+ Name
+ Marke
+ Dichte
+ Durchmesser
+ Aktionen
+
+
+
+ ${materialsToRender.map(m => `
+
+ ${m.name}
+ ${m.brand || '-'}
+ ${m.density} g/cm³
+ ${m.diameter} mm
+
+
+
+ ✏️
+
+
+ 🗑️
+
+
+
+
+ `).join('')}
+
+
+
+ `;
+}
+
+// === FILTER ===
+function filterMaterials() {
+ const searchTerm = document.getElementById('searchInput').value.toLowerCase();
+ const brandFilter = document.getElementById('filterBrand').value;
+
+ let filtered = materials;
+
+ // Search filter
+ if (searchTerm) {
+ filtered = filtered.filter(m =>
+ m.name.toLowerCase().includes(searchTerm) ||
+ (m.brand && m.brand.toLowerCase().includes(searchTerm)) ||
+ (m.notes && m.notes.toLowerCase().includes(searchTerm))
+ );
+ }
+
+ // Brand filter
+ if (brandFilter) {
+ filtered = filtered.filter(m => m.brand === brandFilter);
+ }
+
+ renderMaterials(filtered);
+}
+
+function clearFilters() {
+ document.getElementById('searchInput').value = '';
+ document.getElementById('filterBrand').value = '';
+ renderMaterials(materials);
+}
+
+// === MODAL MANAGEMENT ===
+function openAddModal() {
+ currentMaterialId = null;
+ document.getElementById('modalTitle').textContent = '➕ Material hinzufügen';
+ document.getElementById('materialForm').reset();
+ document.getElementById('materialId').value = '';
+ document.getElementById('materialDensity').value = '1.24';
+ document.getElementById('materialDiameter').value = '1.75';
+ document.getElementById('materialModal').classList.add('active');
+}
+
+function openEditModal(id) {
+ const material = materials.find(m => m.id === id);
+ if (!material) return;
+
+ currentMaterialId = id;
+ document.getElementById('modalTitle').textContent = '✏️ Material bearbeiten';
+
+ document.getElementById('materialId').value = material.id;
+ document.getElementById('materialName').value = material.name;
+ document.getElementById('materialBrand').value = material.brand || '';
+ document.getElementById('materialDensity').value = material.density;
+ document.getElementById('materialDiameter').value = material.diameter;
+ document.getElementById('materialNotes').value = material.notes || '';
+
+ document.getElementById('materialModal').classList.add('active');
+}
+
+function closeModal() {
+ document.getElementById('materialModal').classList.remove('active');
+ currentMaterialId = null;
+}
+
+function openDeleteModal(id) {
+ deleteTargetId = id;
+ document.getElementById('deleteModal').classList.add('active');
+}
+
+function closeDeleteModal() {
+ document.getElementById('deleteModal').classList.remove('active');
+ deleteTargetId = null;
+}
+
+// === SAVE MATERIAL ===
+async function saveMaterial(event) {
+ event.preventDefault();
+
+ const data = {
+ name: document.getElementById('materialName').value,
+ brand: document.getElementById('materialBrand').value || null,
+ density: parseFloat(document.getElementById('materialDensity').value),
+ diameter: parseFloat(document.getElementById('materialDiameter').value),
+ notes: document.getElementById('materialNotes').value || null
+ };
+
+ try {
+ let response;
+
+ if (currentMaterialId) {
+ // Update existing
+ response = await fetch(`/api/materials/${currentMaterialId}`, {
+ method: 'PUT',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify(data)
+ });
+ } else {
+ // Create new
+ response = await fetch('/api/materials/', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify(data)
+ });
+ }
+
+ if (response.ok) {
+ showNotification(
+ currentMaterialId ? 'Material aktualisiert!' : 'Material erstellt!',
+ 'success'
+ );
+ closeModal();
+ clearFilters(); // Reset filters
+ await loadMaterials();
+ } else {
+ throw new Error('Speichern fehlgeschlagen');
+ }
+
+ } catch (error) {
+ console.error('Fehler beim Speichern:', error);
+ showNotification('Fehler beim Speichern', 'error');
+ }
+}
+
+// === DELETE MATERIAL ===
+async function confirmDelete() {
+ if (!deleteTargetId) return;
+
+ try {
+ const response = await fetch(`/api/materials/${deleteTargetId}`, {
+ method: 'DELETE'
+ });
+
+ if (response.ok) {
+ showNotification('Material gelöscht', 'success');
+ closeDeleteModal();
+ clearFilters(); // Reset filters
+ await loadMaterials();
+ } else {
+ throw new Error('Löschen fehlgeschlagen');
+ }
+
+ } catch (error) {
+ console.error('Fehler beim Löschen:', error);
+ showNotification('Fehler beim Löschen', 'error');
+ }
+}
+
+// Close modals on ESC key
+document.addEventListener('keydown', (e) => {
+ if (e.key === 'Escape') {
+ closeModal();
+ closeDeleteModal();
+ }
+});
+
+// Close modals on background click
+document.getElementById('materialModal').addEventListener('click', (e) => {
+ if (e.target.id === 'materialModal') closeModal();
+});
+
+document.getElementById('deleteModal').addEventListener('click', (e) => {
+ if (e.target.id === 'deleteModal') closeDeleteModal();
+});
diff --git a/frontend/static/printers.css b/frontend/static/printers.css
new file mode 100644
index 0000000..115fd91
--- /dev/null
+++ b/frontend/static/printers.css
@@ -0,0 +1,134 @@
+/* Printer grid styling */
+:root {
+ --printer-card-bg: #141414;
+ --printer-border: #23272f;
+ --printer-shadow: 0 10px 30px rgba(0,0,0,0.35);
+}
+
+.printer-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(320px, 1fr));
+ gap: 16px;
+ align-items: stretch;
+ justify-items: start;
+}
+
+.printer-card {
+ background: var(--printer-card-bg);
+ border: 1px solid var(--printer-border);
+ border-radius: 12px;
+ padding: 18px;
+ box-shadow: var(--printer-shadow);
+ position: relative;
+ display: flex;
+ flex-direction: column;
+ gap: 14px;
+ max-width: 540px;
+ width: 100%;
+ margin: 0;
+}
+
+.printer-card .status-badge {
+ position: absolute;
+ top: 12px;
+ right: 12px;
+}
+
+.printer-head {
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ gap: 12px;
+ margin-top: 8px;
+}
+
+.printer-icon {
+ font-size: 2.4rem;
+}
+
+.printer-title {
+ text-align: center;
+ font-weight: 700;
+ color: var(--accent);
+}
+
+.info-group.compact {
+ display: grid;
+ grid-template-columns: 1fr;
+ gap: 8px;
+}
+
+.info-group.compact .info-item {
+ display: flex;
+ justify-content: space-between;
+ padding: 8px 10px;
+ background: #0e0e0e;
+ border: 1px solid var(--border);
+ border-radius: 8px;
+}
+
+.info-group.compact .info-label {
+ color: var(--text-dim);
+ font-size: 0.9rem;
+}
+
+.info-group.compact .info-value {
+ color: var(--text);
+ font-weight: 600;
+}
+
+.card-actions {
+ display: flex;
+ gap: 10px;
+ justify-content: center;
+}
+
+.card-actions .btn {
+ flex: 1;
+ display: inline-flex;
+ align-items: center;
+ justify-content: center;
+}
+
+.card-actions .btn-icon {
+ width: auto;
+ padding: 10px 14px;
+ background: var(--bg-card);
+ border-color: var(--border);
+ color: var(--error);
+ white-space: nowrap;
+}
+
+.cta-row {
+ display: flex;
+ gap: 10px;
+ flex-wrap: wrap;
+}
+
+.cta-row .btn {
+ flex: 1;
+ display: inline-flex;
+ align-items: center;
+ justify-content: center;
+ background: var(--bg-card);
+ border: 1px solid var(--border);
+ color: var(--text);
+}
+
+/* Delete modal centering */
+.modal-content.confirm-delete {
+ text-align: center;
+}
+.modal-content.confirm-delete .modal-header,
+.modal-content.confirm-delete .modal-actions {
+ justify-content: center;
+}
+.modal-content.confirm-delete .modal-body p {
+ margin: 8px 0;
+}
+
+@media (max-width: 600px) {
+ .printer-card {
+ padding: 14px;
+ }
+}
diff --git a/frontend/static/printers.js b/frontend/static/printers.js
new file mode 100644
index 0000000..fc5fac2
--- /dev/null
+++ b/frontend/static/printers.js
@@ -0,0 +1,998 @@
+// Printers Management JavaScript
+
+// Notification-Box anzeigen
+
+function showNotification(message, type = 'info') {
+
+ const box = document.getElementById('notificationBox');
+
+ if (!box) return;
+
+ box.innerHTML = `${message}
`;
+
+ box.style.display = 'block';
+
+ setTimeout(() => { box.style.display = 'none'; }, 3000);
+
+}
+
+
+
+let printers = [];
+
+let foundPrinters = [];
+
+let currentPrinterId = null;
+
+let deleteTargetId = null;
+
+
+
+// === INIT ===
+
+document.addEventListener('DOMContentLoaded', () => {
+
+ loadPrinters();
+
+ setupFormListeners();
+
+ // Auto-refresh alle 10 Sekunden
+ setInterval(loadPrinters, 10000);
+
+ // Optional: Automatisch beim Laden scannen
+
+ // scanForPrinters();
+
+});
+
+
+
+function setupFormListeners() {
+
+ // Type change handler already set in HTML with onchange
+
+}
+
+
+
+// === LOAD PRINTERS ===
+
+async function loadPrinters() {
+
+ try {
+
+ const response = await fetch(`/api/printers/?live=true&_=${Date.now()}`, {
+ cache: 'no-store'
+ });
+
+
+
+ if (!response.ok) {
+
+ // If endpoint doesn't exist yet, show empty state
+
+ if (response.status === 404) {
+
+ printers = [];
+
+ renderPrinters();
+
+ return;
+
+ }
+
+ throw new Error('Laden fehlgeschlagen');
+
+ }
+
+
+
+ printers = await response.json();
+ // Spiegel online-Flag in _online, damit UI sofort den Status anzeigt
+ printers = printers.map(p => ({ ...p, _online: p.online === true || p.online === 'true' }));
+
+ renderPrinters();
+
+
+
+ } catch (error) {
+
+ console.error('Fehler beim Laden der Drucker:', error);
+
+ printers = [];
+
+ renderPrinters();
+
+ }
+
+}
+
+
+
+function renderPrinters() {
+
+ const container = document.getElementById('printersGrid');
+
+ if (printers.length === 0) {
+
+ container.innerHTML = `
+
+
+
+
🖨️
+
+
Keine Drucker konfiguriert
+
+
Fügen Sie Ihren ersten Drucker hinzu!
+
+
+
+ ➕ Drucker hinzufügen
+
+
+
+
+
+ `;
+
+ return;
+
+ }
+
+ container.innerHTML = `
+
+
+
+ ${printers.map(p => renderPrinterCard(p)).join('')}
+
+
+
+ `;
+
+ // Verbindungstest nur noch per Button, nicht automatisch
+
+}
+
+
+
+// === FOUND PRINTERS ===
+
+
+
+function renderFoundPrinters() {
+
+ const container = document.getElementById('foundPrintersGrid');
+
+ if (!foundPrinters || foundPrinters.length === 0) {
+
+ container.innerHTML = '';
+
+ return;
+
+ }
+
+ container.innerHTML = `
+
+ Gefundene Drucker im Netzwerk
+
+
+
+ ${foundPrinters.map(p => renderFoundPrinterCard(p)).join('')}
+
+
+
+ `;
+
+}
+
+
+
+function renderFoundPrinterCard(printer) {
+
+ const typeIcons = {
+
+ bambu: '🎯',
+
+ klipper: '🛠️',
+
+ unknown: '❓'
+
+ };
+
+ const typeNames = {
+
+ bambu: 'Bambu Lab',
+
+ klipper: 'Klipper',
+
+ unknown: 'Unbekannt'
+
+ };
+
+ const icon = typeIcons[printer.type] || '🖨️';
+
+ const typeName = typeNames[printer.type] || printer.type;
+
+ return `
+
+
+
+
+
+ Nicht hinzugefügt
+
+
+
+
${icon}
+
+
${printer.hostname || printer.ip}
+
+
+
+
+
+ Typ
+
+ ${typeName}
+
+
+
+
+
+ IP-Adresse
+
+ ${printer.ip}:${printer.port}
+
+
+
+
+
+
+
+ ➕ Hinzufügen
+
+
+
+
+
+ `;
+
+}
+
+function renderFoundPrinterCard(printer) {
+
+ const typeIcons = {
+
+ 'bambu': '🎯',
+
+ 'klipper': '⚙️',
+
+ 'unknown': '❓'
+
+ };
+
+ const typeNames = {
+
+ 'bambu': 'Bambu Lab',
+
+ 'klipper': 'Klipper',
+
+ 'unknown': 'Unbekannt'
+
+ };
+
+ const icon = typeIcons[printer.type] || '🖨️';
+
+ const typeName = typeNames[printer.type] || printer.type;
+
+ return `
+
+
+
+
+
+ Nicht hinzugefügt
+
+
+
+
${icon}
+
+
${printer.hostname || printer.ip}
+
+
+
+
+
+ Typ
+
+ ${typeName}
+
+
+
+
+
+ IP-Adresse
+
+ ${printer.ip}:${printer.port}
+
+
+
+
+
+
+
+ ➕ Hinzufügen
+
+
+
+
+
+ `;
+
+}
+
+
+
+async function scanForPrinters() {
+
+ const btn = document.getElementById('scanPrintersBtn');
+
+ if (btn) btn.disabled = true;
+
+ const container = document.getElementById('foundPrintersGrid');
+
+ container.innerHTML = 'Scan läuft...
';
+
+ try {
+
+ const response = await fetch('/api/scanner/scan/quick');
+
+ if (!response.ok) throw new Error('Scan fehlgeschlagen');
+
+ const result = await response.json();
+
+ foundPrinters = result.printers || [];
+
+ renderFoundPrinters();
+
+ } catch (error) {
+
+ container.innerHTML = 'Fehler beim Scan
';
+
+ }
+
+ if (btn) btn.disabled = false;
+
+}
+
+
+
+function addFoundPrinter(printer) {
+
+ // Modal öffnen und Felder vorausfüllen
+
+ currentPrinterId = null;
+
+ document.getElementById('modalTitle').textContent = '➕ Gefundenen Drucker hinzufügen';
+
+ document.getElementById('printerForm').reset();
+
+ document.getElementById('printerId').value = '';
+
+ document.getElementById('printerName').value = printer.hostname || printer.ip;
+
+ document.getElementById('printerType').value = printer.type !== 'unknown' ? printer.type : '';
+
+ updateFormFields();
+
+ document.getElementById('printerIp').value = printer.ip;
+
+ document.getElementById('printerPort').value = printer.port || '';
+
+ document.getElementById('printerModal').classList.add('active');
+
+}
+
+
+
+function renderPrinterCard(printer) {
+
+ const typeIcons = {
+
+ bambu: '🎯',
+
+ klipper: '🛠️',
+
+ manual: '📝'
+
+ };
+
+ const typeNames = {
+
+ bambu: 'Bambu Lab',
+
+ klipper: 'Klipper',
+
+ manual: 'Manuell'
+
+ };
+
+ const icon = typeIcons[printer.printer_type] || '🖨️';
+
+ const typeName = typeNames[printer.printer_type] || printer.printer_type;
+
+ const isOnline = (printer.online === true) || (printer.online === 'true') || printer._online === true;
+
+ const thumb = printer.image_url
+
+ ? ` `
+
+ : `${icon}
`;
+
+
+
+ return `
+
+
+
+
${isOnline ? 'Online' : 'Offline'}
+
+
+
+
${thumb}
+
+
${printer.name}
+
+
+
+
+
+
+
+ Typ
+
+ ${typeName}
+
+
+
+ ${printer.ip_address ? `
+
+
+
+ IP-Adresse
+
+ ${printer.ip_address}${printer.port ? ':' + printer.port : ''}
+
+
` : ''}
+
+ ${printer.cloud_serial ? `
+
+
+
+ Seriennummer
+
+ ${printer.cloud_serial}
+
+
` : ''}
+
+
+
+ API konfiguriert
+
+ ${printer.api_key ? 'Ja' : 'Nein'}
+
+
+
+
+
+
+
+ ✏️ Bearbeiten
+
+ 🗑️ Löschen
+
+
+
+ ${printer.printer_type !== 'manual' ? `
+
+
+
+ 🔌 Verbindung testen
+
+
` : ''}
+
+
+
+ `;
+
+}
+
+
+
+// === FORM MANAGEMENT ===
+
+
+
+
+
+function updateFormFields() {
+
+ const type = document.getElementById('printerType').value;
+
+
+
+ const networkFields = document.getElementById('networkFields');
+
+ const bambuFields = document.getElementById('bambuFields');
+
+ const klipperFields = document.getElementById('klipperFields');
+
+
+
+ // Hide all optional fields
+
+ networkFields.style.display = 'none';
+
+ bambuFields.style.display = 'none';
+
+ klipperFields.style.display = 'none';
+
+
+
+ // Reset required attribute
+
+ document.getElementById('printerIp').required = false;
+
+
+
+ if (type === 'bambu') {
+
+ networkFields.style.display = 'block';
+
+ bambuFields.style.display = 'block';
+
+ document.getElementById('printerIp').required = true;
+
+ document.getElementById('printerPort').placeholder = '6000';
+
+ } else if (type === 'klipper') {
+
+ networkFields.style.display = 'block';
+
+ klipperFields.style.display = 'block';
+
+ document.getElementById('printerIp').required = true;
+
+ document.getElementById('printerPort').placeholder = '7125';
+
+ }
+
+}
+
+
+
+// === MODAL MANAGEMENT ===
+
+function openAddModal() {
+
+ currentPrinterId = null;
+
+ document.getElementById('modalTitle').textContent = '➕ Drucker hinzufügen';
+
+ document.getElementById('printerForm').reset();
+
+ document.getElementById('printerId').value = '';
+
+ const autoConnectEl = document.getElementById('printerAutoConnect');
+
+ if (autoConnectEl) autoConnectEl.checked = false;
+
+ updateFormFields();
+
+ document.getElementById('printerModal').classList.add('active');
+
+ toggleImageSection(false);
+
+}
+
+
+
+function openEditModal(id) {
+
+ const printer = printers.find(p => p.id === id);
+
+ if (!printer) return;
+
+
+
+ currentPrinterId = id;
+
+ document.getElementById('modalTitle').textContent = '✏️ Drucker bearbeiten';
+
+
+
+ document.getElementById('printerId').value = printer.id;
+
+ document.getElementById('printerName').value = printer.name;
+
+ document.getElementById('printerType').value = printer.printer_type;
+
+
+
+ updateFormFields();
+
+ document.getElementById('printerIp').value = printer.ip_address || '';
+ document.getElementById('printerPort').value = printer.port || '';
+ document.getElementById('printerSerial').value = printer.cloud_serial || '';
+ const autoConnectEl = document.getElementById('printerAutoConnect');
+ if (autoConnectEl) autoConnectEl.checked = !!printer.auto_connect;
+ toggleImageSection(true);
+ setImagePreview(printer.image_url, printer.id);
+
+ // Set API key in correct field based on type
+ if (printer.printer_type === 'bambu') {
+
+ document.getElementById('printerApiKey').value = printer.api_key || '';
+
+ } else if (printer.printer_type === 'klipper') {
+
+ document.getElementById('printerApiKeyKlipper').value = printer.api_key || '';
+
+ }
+
+
+
+ document.getElementById('printerModal').classList.add('active');
+
+}
+
+
+
+function closeModal() {
+
+ document.getElementById('printerModal').classList.remove('active');
+
+ currentPrinterId = null;
+
+}
+
+
+
+function openDeleteModal(id) {
+
+ deleteTargetId = id;
+
+ const p = printers.find(pr => pr.id === id);
+
+ const nameEl = document.getElementById('deletePrinterName');
+
+ if (p && nameEl) {
+
+ nameEl.textContent = p.name || 'Name unbekannt';
+
+ }
+
+ document.getElementById('deleteModal').classList.add('active');
+
+}
+
+
+
+function closeDeleteModal() {
+
+ document.getElementById('deleteModal').classList.remove('active');
+
+ deleteTargetId = null;
+
+}
+
+
+
+// === SAVE PRINTER ===
+
+async function savePrinter(event) {
+ event.preventDefault();
+
+
+
+ const type = document.getElementById('printerType').value;
+
+ const port = document.getElementById('printerPort').value;
+
+
+
+ let apiKey = null;
+
+ if (type === 'bambu') {
+
+ apiKey = document.getElementById('printerApiKey').value || null;
+
+ } else if (type === 'klipper') {
+
+ apiKey = document.getElementById('printerApiKeyKlipper').value || null;
+
+ }
+
+
+
+ const data = {
+
+ name: document.getElementById('printerName').value,
+
+ printer_type: type,
+
+ ip_address: document.getElementById('printerIp').value || null,
+
+ port: port ? parseInt(port) : null,
+
+ cloud_serial: document.getElementById('printerSerial').value || null,
+
+ api_key: apiKey,
+
+ auto_connect: document.getElementById('printerAutoConnect')?.checked || false
+ };
+
+
+ try {
+
+ let response;
+
+
+
+ if (currentPrinterId) {
+
+ // Update existing
+
+ response = await fetch(`/api/printers/${currentPrinterId}`, {
+
+ method: 'PUT',
+
+ headers: { 'Content-Type': 'application/json' },
+
+ body: JSON.stringify(data)
+
+ });
+
+ } else {
+
+ // Create new
+
+ response = await fetch('/api/printers/', {
+
+ method: 'POST',
+
+ headers: { 'Content-Type': 'application/json' },
+
+ body: JSON.stringify(data)
+
+ });
+
+ }
+
+
+
+ if (response.ok) {
+
+ showNotification(
+
+ currentPrinterId ? 'Drucker aktualisiert!' : 'Drucker erstellt!',
+
+ 'success'
+
+ );
+
+ closeModal();
+
+ await loadPrinters();
+
+ } else {
+
+ throw new Error('Speichern fehlgeschlagen');
+
+ }
+
+
+
+ } catch (error) {
+
+ console.error('Fehler beim Speichern:', error);
+
+ showNotification('Fehler beim Speichern', 'error');
+
+ }
+
+}
+
+
+
+// === DELETE PRINTER ===
+
+async function confirmDelete() {
+
+ if (!deleteTargetId) return;
+
+
+
+ try {
+
+ const response = await fetch(`/api/printers/${deleteTargetId}`, {
+
+ method: 'DELETE'
+
+ });
+
+
+
+ if (response.ok) {
+
+ showNotification('Drucker gelöscht', 'success');
+
+ closeDeleteModal();
+
+ await loadPrinters();
+
+ } else {
+
+ throw new Error('Löschen fehlgeschlagen');
+
+ }
+
+
+
+ } catch (error) {
+
+ console.error('Fehler beim Löschen:', error);
+
+ showNotification('Fehler beim Löschen', 'error');
+
+ }
+
+}
+
+
+
+// === TEST CONNECTION ===
+
+async function testConnection(id) {
+ const printer = printers.find(p => p.id === id);
+ if (!printer) return;
+ showNotification('Teste Verbindung...', 'info');
+ try {
+ const response = await fetch(`/api/printers/${id}/test`, {
+
+ method: 'POST'
+
+ });
+
+ if (response.ok) {
+
+ const result = await response.json();
+
+ if (result.status === 'success') {
+
+ showNotification(result.message, 'success');
+
+ printer._online = true;
+
+ } else {
+
+ showNotification(result.message, result.status);
+
+ printer._online = false;
+
+ }
+
+ renderPrinters();
+
+ } else {
+
+ showNotification('Fehler beim Testen der Verbindung', 'error');
+
+ printer._online = false;
+
+ renderPrinters();
+
+ }
+
+ } catch (error) {
+
+ console.error('Fehler beim Connection-Test:', error);
+
+ showNotification('Verbindungsfehler', 'error');
+
+ printer._online = false;
+
+ renderPrinters();
+
+ }
+}
+
+// === IMAGE UPLOAD ===
+function toggleImageSection(show) {
+ const section = document.getElementById('printerImageSection');
+ if (!section) return;
+ section.style.display = show ? 'block' : 'none';
+ if (!show) {
+ setImagePreview(null, null);
+ }
+}
+
+function setImagePreview(url, id) {
+ const img = document.getElementById('printerImagePreview');
+ const placeholder = document.getElementById('printerImagePlaceholder');
+ if (!img || !placeholder) return;
+ if (url) {
+ img.src = url + (id ? `?cb=${Date.now()}` : '');
+ img.style.display = 'block';
+ placeholder.style.display = 'none';
+ } else {
+ img.src = '';
+ img.style.display = 'none';
+ placeholder.style.display = 'block';
+ }
+}
+
+async function uploadPrinterImage() {
+ if (!currentPrinterId) {
+ showNotification('Bitte zuerst speichern, dann Bild hochladen.', 'warning');
+ return;
+ }
+ const fileInput = document.getElementById('printerImageFile');
+ if (!fileInput || !fileInput.files.length) {
+ showNotification('Bitte Bild auswählen (PNG/JPG/WEBP, max 1 MB)', 'warning');
+ return;
+ }
+ const file = fileInput.files[0];
+ if (!['image/png', 'image/jpeg', 'image/webp'].includes(file.type)) {
+ showNotification('Nur PNG/JPG/WEBP erlaubt', 'error');
+ return;
+ }
+ if (file.size > 1_000_000) {
+ showNotification('Bild zu groß (max 1 MB)', 'error');
+ return;
+ }
+ const form = new FormData();
+ form.append('file', file);
+ try {
+ const resp = await fetch(`/api/printers/${currentPrinterId}/image`, {
+ method: 'POST',
+ body: form
+ });
+ const data = await resp.json();
+ if (!resp.ok || !data.success) {
+ throw new Error(data.detail || data.message || 'Upload fehlgeschlagen');
+ }
+ setImagePreview(data.image_url, currentPrinterId);
+ showNotification('Bild gespeichert', 'success');
+ } catch (err) {
+ showNotification(err.message || 'Upload fehlgeschlagen', 'error');
+ }
+}
+
+// Close modals on ESC key
+document.addEventListener('keydown', (e) => {
+ if (e.key === 'Escape') {
+ closeModal();
+ closeDeleteModal();
+ }
+
+});
+
+
+
+// Close modals on background click
+
+document.getElementById('printerModal').addEventListener('click', (e) => {
+
+ if (e.target.id === 'printerModal') closeModal();
+
+});
+
+
+
+document.getElementById('deleteModal').addEventListener('click', (e) => {
+
+ if (e.target.id === 'deleteModal') closeDeleteModal();
+
+});
+
diff --git a/frontend/static/printers_modern.css b/frontend/static/printers_modern.css
new file mode 100644
index 0000000..4bea377
--- /dev/null
+++ b/frontend/static/printers_modern.css
@@ -0,0 +1,410 @@
+@import url("https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700&display=swap");
+
+:root {
+ --bg: #0e1116;
+ --panel: #141a22;
+ --panel-2: #1a212c;
+ --border: #222a35;
+ --text: #e8ecf2;
+ --text-dim: #a7b2c3;
+ --accent: #f39c12;
+ --accent-2: #2ecc71;
+ --accent-3: #3498db;
+ --error: #e74c3c;
+ --shadow: 0 20px 50px rgba(0, 0, 0, 0.45);
+}
+
+* { box-sizing: border-box; }
+
+body.page {
+ margin: 0;
+ display: grid;
+ grid-template-columns: 260px 1fr;
+ min-height: 100vh;
+ background: radial-gradient(circle at 20% 20%, rgba(52, 152, 219, 0.08), transparent 35%), radial-gradient(circle at 80% 0%, rgba(243, 156, 18, 0.08), transparent 30%), var(--bg);
+ color: var(--text);
+ font-family: "Inter", "Segoe UI", system-ui, -apple-system, sans-serif;
+}
+
+.sidebar {
+ background: linear-gradient(180deg, #0c0f14 0%, #0b0d12 100%);
+ border-right: 1px solid var(--border);
+ padding: 24px 20px;
+ display: flex;
+ flex-direction: column;
+ gap: 24px;
+ box-shadow: var(--shadow);
+}
+
+.sidebar__brand {
+ display: flex;
+ align-items: center;
+ gap: 12px;
+ font-weight: 700;
+ letter-spacing: 0.4px;
+}
+.brand__icon {
+ width: 38px;
+ height: 38px;
+ border-radius: 10px;
+ background: linear-gradient(135deg, #f39c12, #ff6b35);
+ display: grid;
+ place-items: center;
+ font-size: 1.1rem;
+ box-shadow: 0 10px 25px rgba(243, 156, 18, 0.35);
+}
+
+.brand__name { color: var(--text); }
+
+.sidebar__nav {
+ display: flex;
+ flex-direction: column;
+ gap: 10px;
+}
+.nav__item {
+ color: var(--text-dim);
+ text-decoration: none;
+ padding: 12px 14px;
+ border-radius: 12px;
+ transition: all 0.15s ease;
+ border: 1px solid transparent;
+}
+.nav__item:hover {
+ color: var(--text);
+ background: #11161f;
+ border-color: var(--border);
+}
+.nav__item--active {
+ background: #1f2835;
+ color: var(--text);
+ border-color: #2f3a4d;
+}
+
+.sidebar__footer {
+ margin-top: auto;
+ font-size: 0.85rem;
+ color: var(--text-dim);
+}
+.foot-label { font-weight: 700; color: var(--accent-3); }
+
+.content {
+ padding: 28px 36px 48px;
+}
+
+.content__header {
+ display: flex;
+ align-items: center;
+ justify-content: space-between;
+ gap: 24px;
+ margin-bottom: 24px;
+}
+.eyebrow {
+ text-transform: uppercase;
+ letter-spacing: 0.12em;
+ font-size: 0.75rem;
+ color: var(--text-dim);
+ margin: 0 0 4px 0;
+}
+.title {
+ margin: 0;
+ font-size: 1.8rem;
+}
+.subtitle {
+ margin: 6px 0 0 0;
+ color: var(--text-dim);
+}
+
+.actions {
+ display: flex;
+ gap: 10px;
+}
+.btn {
+ border: none;
+ border-radius: 12px;
+ padding: 12px 16px;
+ font-weight: 700;
+ cursor: pointer;
+ transition: transform 0.1s ease, box-shadow 0.1s ease;
+}
+.btn.success {
+ background: #2ecc71;
+ color: #0c0f14;
+ border: 1px solid #2ecc71;
+ box-shadow: 0 12px 30px rgba(46, 204, 113, 0.4);
+}
+.btn.ghost {
+ background: #1b2330;
+ color: #e8ecf2;
+ border: 1px solid #2f3a4d;
+ box-shadow: none;
+}
+.btn:hover { transform: translateY(-1px); }
+.btn.success:hover {
+ background: #27ae60;
+ color: #0c0f14;
+ border-color: #27ae60;
+ box-shadow: 0 12px 30px rgba(39, 174, 96, 0.5);
+}
+.btn.ghost:hover {
+ background: #e74c3c;
+ color: #0c0f14;
+ border-color: #e74c3c;
+ box-shadow: 0 10px 24px rgba(231, 76, 60, 0.45);
+}
+
+.card-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(320px, 1fr));
+ gap: 16px;
+ width: 100%;
+}
+.empty {
+ padding: 18px;
+ background: var(--panel);
+ border: 1px dashed var(--border);
+ border-radius: 14px;
+ text-align: center;
+ color: var(--text-dim);
+}
+
+.card {
+ background: radial-gradient(circle at 20% 20%, rgba(255, 255, 255, 0.03), transparent 40%), var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 14px;
+ padding: 14px;
+ box-shadow: var(--shadow);
+ display: grid;
+ grid-template-areas:
+ "head head status"
+ "body body body"
+ "progress progress progress";
+ gap: 10px;
+ min-height: 170px;
+ max-width: 360px;
+}
+.card__head {
+ grid-area: head;
+ display: flex;
+ align-items: center;
+ justify-content: space-between;
+ gap: 10px;
+ flex-wrap: wrap;
+ position: relative;
+ padding-right: 48px;
+}
+.kebab {
+ position: absolute;
+ right: 4px;
+ top: 4px;
+}
+.card__badges {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+}
+.kebab button {
+ background: none;
+ border: none;
+ color: var(--text);
+ font-size: 18px;
+ cursor: pointer;
+ padding: 4px 6px;
+ border-radius: 8px;
+ transition: background 0.15s ease;
+}
+.kebab button:hover { background: #1c2430; }
+.kebab-menu {
+ position: absolute;
+ top: 26px;
+ right: 0;
+ background: #121821;
+ border: 1px solid var(--border);
+ border-radius: 10px;
+ min-width: 170px;
+ box-shadow: var(--shadow);
+ display: none;
+ z-index: 10;
+}
+.kebab-menu.open { display: block; }
+.kebab-menu button {
+ width: 100%;
+ padding: 10px 12px;
+ background: transparent;
+ border: none;
+ color: var(--text);
+ text-align: left;
+ cursor: pointer;
+}
+.kebab-menu button:hover { background: #1c2430; }
+.card__image {
+ width: 90px;
+ height: 90px;
+ border-radius: 12px;
+ background: linear-gradient(145deg, #1c2532, #0f141c);
+ display: grid;
+ place-items: center;
+ border: 1px solid var(--border);
+}
+.card__image img {
+ width: 70px;
+ height: 70px;
+ object-fit: cover;
+ border-radius: 10px;
+ filter: drop-shadow(0 8px 18px rgba(0,0,0,0.35));
+}
+.card__title {
+ font-weight: 700;
+ font-size: 1.1rem;
+}
+.card__status {
+ grid-area: status;
+ justify-self: end;
+ align-self: start;
+ display: inline-flex;
+ align-items: center;
+ gap: 6px;
+ padding: 6px 9px;
+ border-radius: 999px;
+ font-weight: 700;
+ background: rgba(46, 204, 113, 0.12);
+ color: #2ecc71;
+ border: 1px solid rgba(46, 204, 113, 0.3);
+ font-size: 0.9rem;
+}
+.card__status.offline {
+ background: rgba(231, 76, 60, 0.12);
+ color: #e74c3c;
+ border-color: rgba(231, 76, 60, 0.35);
+}
+.badge-aux {
+ margin-left: 6px;
+}
+.dot {
+ width: 10px;
+ height: 10px;
+ border-radius: 50%;
+ background: currentColor;
+}
+
+.card__body {
+ grid-area: body;
+ display: grid;
+ grid-template-columns: auto 1fr;
+ gap: 12px;
+ align-items: center;
+}
+.card__meta {
+ display: grid;
+ grid-template-columns: 1fr;
+ gap: 4px;
+ color: var(--text);
+ font-size: 0.95rem;
+}
+.card__meta .label { color: var(--text-dim); display: inline-block; min-width: 72px; }
+.card__meta .value { color: var(--text); font-weight: 700; }
+
+.temp {
+ display: flex;
+ gap: 6px;
+}
+.temp .label { color: var(--text-dim); font-size: 0.9rem; }
+.temp .value { color: var(--text); font-weight: 600; }
+
+.card__progress {
+ grid-area: progress;
+ display: flex;
+ align-items: center;
+ gap: 10px;
+}
+.progress-bar {
+ flex: 1;
+ height: 6px;
+ background: #0d1117;
+ border-radius: 999px;
+ overflow: hidden;
+ border: 1px solid var(--border);
+}
+.progress-bar__fill {
+ height: 100%;
+ width: 0%;
+ border-radius: inherit;
+ background: linear-gradient(90deg, #f39c12, #e67e22);
+ transition: width 0.25s ease;
+}
+.progress-value { color: var(--text-dim); font-weight: 700; }
+
+.card__foot {
+ grid-area: foot;
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ color: var(--text-dim);
+ font-size: 0.9rem;
+}
+.card__actions {
+ display: flex;
+ gap: 8px;
+}
+.pill {
+ border-radius: 999px;
+ padding: 6px 10px;
+ border: 1px solid var(--border);
+ color: var(--text-dim);
+ font-weight: 600;
+}
+
+/* Modal */
+.modal {
+ position: fixed;
+ inset: 0;
+ background: rgba(0, 0, 0, 0.55);
+ display: none;
+ align-items: center;
+ justify-content: center;
+ z-index: 100;
+}
+.modal.show { display: flex; }
+.modal__dialog {
+ background: #121821;
+ border: 1px solid var(--border);
+ border-radius: 14px;
+ min-width: 320px;
+ max-width: 480px;
+ width: 100%;
+ padding: 18px 20px;
+ box-shadow: var(--shadow);
+}
+.modal__header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ margin-bottom: 12px;
+}
+.modal__close {
+ background: none;
+ border: none;
+ color: var(--text);
+ font-size: 22px;
+ cursor: pointer;
+}
+.modal__body { display: flex; flex-direction: column; gap: 10px; }
+.field { display: flex; flex-direction: column; gap: 4px; color: var(--text-dim); }
+.field input, .field select {
+ background: #0f141c;
+ border: 1px solid var(--border);
+ border-radius: 10px;
+ padding: 10px 12px;
+ color: var(--text);
+}
+.field-row { display: grid; grid-template-columns: 1fr 1fr; gap: 10px; }
+.toggle { display: flex; align-items: center; gap: 10px; color: var(--text); }
+.modal__actions { display: flex; justify-content: flex-end; gap: 10px; margin-top: 10px; }
+
+@media (max-width: 960px) {
+ body.page {
+ grid-template-columns: 1fr;
+ }
+ .sidebar { flex-direction: row; align-items: center; }
+ .sidebar__nav { flex-direction: row; flex-wrap: wrap; }
+ .content { padding: 18px; }
+}
diff --git a/frontend/static/printers_modern.js b/frontend/static/printers_modern.js
new file mode 100644
index 0000000..f03f6ed
--- /dev/null
+++ b/frontend/static/printers_modern.js
@@ -0,0 +1,200 @@
+const container = document.getElementById("cardsContainer");
+const refreshBtn = document.getElementById("refreshBtn");
+
+async function loadPrintersModern() {
+ if (!container) return;
+ container.innerHTML = 'Lade Drucker …
';
+ try {
+ const res = await fetch("/api/printers/");
+ if (!res.ok) throw new Error("Request failed");
+ const data = await res.json();
+ // versuche Live-State mitzuladen und nach cloud_serial zu mappen
+ const liveRes = await fetch("/api/live-state/");
+ const liveData = liveRes.ok ? await liveRes.json() : {};
+ const liveMap = Object.fromEntries(
+ Object.entries(liveData).map(([k, v]) => {
+ let live = v.payload && v.payload.print ? { ...v.payload.print } : { ...v.payload };
+ if (v.payload?.ams?.ams && Array.isArray(v.payload.ams.ams) && v.payload.ams.ams[0]) {
+ live.tray = v.payload.ams.ams[0].tray;
+ live.tray_now = v.payload.ams.ams[0].tray_now;
+ }
+ return [k, live];
+ })
+ );
+
+ if (!Array.isArray(data) || data.length === 0) {
+ container.innerHTML = 'Keine Drucker konfiguriert.
';
+ return;
+ }
+ container.innerHTML = data.map(p => renderCard({ ...p, live: liveMap[p.cloud_serial] || null })).join("");
+ } catch (err) {
+ console.error(err);
+ container.innerHTML = 'Fehler beim Laden der Drucker.
';
+ }
+}
+
+function renderCard(printer) {
+ const online = printer.online === true;
+ const onlineLabel = online ? "Online" : printer.online === null ? "Manuell" : "Offline";
+ const icon = printer.image_url
+ ? ` `
+ : renderPrinterIcon(printer.printer_type);
+ const nozzle = printer.live?.nozzle_temper ?? printer.live?.nozzle_temp ?? printer.nozzle_temp ?? printer.nozzle_temper ?? printer.live?.nozzle ?? printer.live?.extruder_temp ?? printer.temperature?.nozzle ?? "—";
+ const bed = printer.live?.bed_temper ?? printer.live?.bed_temp ?? printer.bed_temp ?? printer.bed_temper ?? printer.live?.bed ?? printer.temperature?.bed ?? "—";
+ const filament = printer.filament_material || printer.printer_type?.toUpperCase() || "—";
+ const progress = printer.progress_percent ?? 0;
+ const progressColor = pickProgressColor(progress);
+
+ return `
+
+
+
${printer.name || "Unbenannt"}
+
+
+ ${onlineLabel}
+
+
+ Auto Connect
+
+
+
+ ⋮
+
+
+
+
+
+
+
+
+
${Math.round(progress)}%
+
+
+ `;
+}
+
+function pickProgressColor(val) {
+ const v = Math.min(Math.max(val ?? 0, 0), 100);
+ if (v <= 20) return "linear-gradient(90deg, #e74c3c, #c0392b)";
+ if (v <= 80) return "linear-gradient(90deg, #f39c12, #e67e22)";
+ return "linear-gradient(90deg, #2ecc71, #27ae60)";
+}
+
+function renderPrinterIcon(type) {
+ if (type === "bambu" || type === "bambu_lab" || !type) {
+ return ` `;
+ }
+ const accent = type === "klipper" ? "#3498db" : type === "manual" ? "#95a5a6" : "#f39c12";
+ return `
+
+
+
+
+
+
+
+ `;
+}
+
+document.addEventListener("DOMContentLoaded", () => {
+ loadPrintersModern();
+ if (refreshBtn) refreshBtn.addEventListener("click", loadPrintersModern);
+});
+
+// Kebab-Menü
+function toggleMenu(evt, id) {
+ evt.stopPropagation();
+ document.querySelectorAll(".kebab-menu").forEach(m => m.classList.remove("open"));
+ const menu = document.getElementById(`menu-${id}`);
+ if (menu) menu.classList.toggle("open");
+ document.addEventListener("click", () => {
+ document.querySelectorAll(".kebab-menu").forEach(m => m.classList.remove("open"));
+ }, { once: true });
+}
+
+// Edit-Modal
+function closeEditModal() {
+ const modal = document.getElementById("printerEditModal");
+ if (modal) modal.classList.remove("show");
+}
+
+async function openEditModal(id) {
+ try {
+ const res = await fetch(`/api/printers/${id}`);
+ if (!res.ok) throw new Error("Laden fehlgeschlagen");
+ const p = await res.json();
+ document.getElementById("editId").value = p.id;
+ document.getElementById("editName").value = p.name || "";
+ document.getElementById("editType").value = p.printer_type || "";
+ document.getElementById("editIp").value = p.ip_address || "";
+ document.getElementById("editPort").value = p.port || "";
+ document.getElementById("editSerial").value = p.cloud_serial || "";
+ document.getElementById("editApiKey").value = p.api_key || "";
+ document.getElementById("editAutoConnect").checked = !!p.auto_connect;
+ document.getElementById("printerEditModal").classList.add("show");
+ } catch (e) {
+ alert("Fehler beim Laden des Druckers");
+ }
+}
+
+async function savePrinterEdit(ev) {
+ ev.preventDefault();
+ const id = document.getElementById("editId").value;
+ const payload = {
+ name: document.getElementById("editName").value,
+ printer_type: document.getElementById("editType").value,
+ ip_address: document.getElementById("editIp").value,
+ port: document.getElementById("editPort").value ? Number(document.getElementById("editPort").value) : null,
+ cloud_serial: document.getElementById("editSerial").value,
+ api_key: document.getElementById("editApiKey").value,
+ auto_connect: document.getElementById("editAutoConnect").checked
+ };
+ try {
+ const res = await fetch(`/api/printers/${id}`, {
+ method: "PUT",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify(payload)
+ });
+ if (!res.ok) throw new Error("Speichern fehlgeschlagen");
+ closeEditModal();
+ loadPrintersModern();
+ } catch (e) {
+ alert("Fehler beim Speichern");
+ }
+}
+
+// Verbindung testen
+async function testConnection(id) {
+ try {
+ const res = await fetch(`/api/printers/${id}/test`, { method: "POST" });
+ const data = await res.json();
+ alert(data.message || "Test abgeschlossen");
+ } catch (e) {
+ alert("Fehler beim Verbindungstest");
+ }
+}
+
+// Löschen
+async function deletePrinter(id) {
+ if (!confirm("Drucker wirklich löschen?")) return;
+ try {
+ const res = await fetch(`/api/printers/${id}`, { method: "DELETE" });
+ if (!res.ok) throw new Error();
+ loadPrintersModern();
+ } catch (e) {
+ alert("Fehler beim Löschen");
+ }
+}
diff --git a/frontend/static/spools.css b/frontend/static/spools.css
new file mode 100644
index 0000000..7bdace0
--- /dev/null
+++ b/frontend/static/spools.css
@@ -0,0 +1,769 @@
+/* Spools Page Styles */
+
+/* Stats Grid */
+.stats-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
+ gap: 1.5rem;
+ margin-bottom: 2rem;
+}
+
+.stat-card {
+ background: linear-gradient(135deg, var(--panel) 0%, rgba(20, 26, 34, 0.8) 100%);
+ border: 1px solid var(--border);
+ border-radius: 12px;
+ padding: 1.5rem;
+ display: flex;
+ align-items: center;
+ gap: 1rem;
+ transition: all 0.3s ease;
+ box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
+}
+
+.stat-card:hover {
+ transform: translateY(-2px);
+ box-shadow: 0 6px 12px rgba(0, 0, 0, 0.15),
+ 0 0 20px rgba(243, 156, 18, 0.1);
+ border-color: var(--accent);
+}
+
+.stat-icon {
+ font-size: 2.5rem;
+ opacity: 0.9;
+}
+
+.stat-content {
+ flex: 1;
+}
+
+.stat-label {
+ font-size: 0.875rem;
+ color: var(--text-dim);
+ margin-bottom: 0.25rem;
+ text-transform: uppercase;
+ letter-spacing: 0.5px;
+}
+
+.stat-value {
+ font-size: 2rem;
+ font-weight: 700;
+ color: var(--text);
+ background: linear-gradient(135deg, var(--accent) 0%, #e67e22 100%);
+ -webkit-background-clip: text;
+ -webkit-text-fill-color: transparent;
+ background-clip: text;
+}
+
+/* Filter Section */
+.filter-section {
+ background: var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 12px;
+ padding: 1.5rem;
+ margin-bottom: 2rem;
+}
+
+.filter-group {
+ display: flex;
+ gap: 1rem;
+ flex-wrap: wrap;
+ align-items: center;
+ margin-bottom: 1rem;
+}
+
+.search-input {
+ flex: 1;
+ min-width: 200px;
+ padding: 0.75rem 1rem;
+ background: var(--bg);
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ color: var(--text);
+ font-size: 0.9375rem;
+ transition: all 0.2s ease;
+}
+
+.search-input:focus {
+ outline: none;
+ border-color: var(--accent);
+ box-shadow: 0 0 0 3px rgba(243, 156, 18, 0.1);
+}
+
+.filter-select {
+ padding: 0.75rem 2.5rem 0.75rem 1rem;
+ background: var(--bg);
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ color: var(--text);
+ font-size: 0.9375rem;
+ cursor: pointer;
+ transition: all 0.2s ease;
+ appearance: none;
+ background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 12 12'%3E%3Cpath fill='%23f39c12' d='M6 9L1 4h10z'/%3E%3C/svg%3E");
+ background-repeat: no-repeat;
+ background-position: right 1rem center;
+}
+
+.filter-select:focus {
+ outline: none;
+ border-color: var(--accent);
+ box-shadow: 0 0 0 3px rgba(243, 156, 18, 0.1);
+}
+
+.filter-select option {
+ background: var(--panel);
+ color: var(--text);
+}
+
+.result-count {
+ color: var(--text-dim);
+ font-size: 0.9375rem;
+}
+
+.result-count span {
+ color: var(--accent);
+ font-weight: 600;
+}
+
+/* Table Section */
+.table-section {
+ background: var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 12px;
+ overflow: hidden;
+ box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
+}
+
+.table-container {
+ overflow-x: auto;
+}
+
+table {
+ width: 100%;
+ border-collapse: collapse;
+ table-layout: fixed;
+}
+
+thead {
+ background: linear-gradient(135deg, rgba(243, 156, 18, 0.15) 0%, rgba(230, 126, 34, 0.1) 100%);
+ border-bottom: 2px solid var(--accent);
+}
+
+thead th {
+ padding: 1rem;
+ text-align: left;
+ font-weight: 600;
+ color: var(--accent);
+ text-transform: uppercase;
+ font-size: 0.8125rem;
+ letter-spacing: 0.5px;
+}
+
+/* Spaltenbreiten optimieren */
+thead th:nth-child(1) { width: 10%; } /* # (Spulen-Nummer) */
+thead th:nth-child(2) { width: 30%; } /* Material */
+thead th:nth-child(3) { width: 20%; } /* Restgewicht */
+thead th:nth-child(4) { width: 15%; } /* Status */
+thead th:nth-child(5) { width: 25%; } /* Aktionen */
+
+tbody tr {
+ border-bottom: 1px solid var(--border);
+ transition: all 0.2s ease;
+}
+
+tbody tr:hover {
+ background: rgba(243, 156, 18, 0.05);
+ box-shadow: inset 0 0 0 1px rgba(243, 156, 18, 0.1);
+}
+
+tbody td {
+ padding: 0.875rem 1rem;
+ color: var(--text);
+ overflow: hidden;
+ text-overflow: ellipsis;
+}
+
+tbody td strong {
+ color: var(--text);
+}
+
+tbody td small {
+ font-size: 0.8125rem;
+}
+
+/* Kompakte Darstellung */
+tbody td:nth-child(2), /* Restgewicht */
+tbody td:nth-child(3) { /* Status */
+ white-space: nowrap;
+}
+
+/* Color Preview */
+.color-preview {
+ display: inline-block;
+ width: 24px;
+ height: 24px;
+ border-radius: 6px;
+ border: 2px solid var(--border);
+ box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2);
+}
+
+/* Progress Bar */
+.progress-bar {
+ width: 100%;
+ height: 8px;
+ background: var(--bg);
+ border-radius: 4px;
+ overflow: hidden;
+ margin-bottom: 4px;
+ border: 1px solid var(--border);
+}
+
+.progress-fill {
+ height: 100%;
+ transition: width 0.3s ease;
+ background: var(--accent);
+ border-radius: 4px;
+}
+
+
+/* Spulen-Nummern-Badges (NEU) */
+.spool-number-badge {
+ display: inline-flex;
+ align-items: center;
+ justify-content: center;
+ padding: 0.25rem 0.75rem;
+ border-radius: 12px;
+ font-size: 0.75rem;
+ font-weight: 700;
+ text-transform: uppercase;
+ letter-spacing: 0.3px;
+ white-space: nowrap;
+}
+
+.manual-badge {
+ background: linear-gradient(135deg, rgba(243, 156, 18, 0.2) 0%, rgba(230, 126, 34, 0.15) 100%);
+ color: var(--accent);
+ border: 1px solid rgba(243, 156, 18, 0.3);
+ box-shadow: 0 2px 4px rgba(243, 156, 18, 0.1);
+}
+
+.rfid-badge {
+ background: linear-gradient(135deg, rgba(52, 152, 219, 0.2) 0%, rgba(41, 128, 185, 0.15) 100%);
+ color: #3498db;
+ border: 1px solid rgba(52, 152, 219, 0.3);
+ box-shadow: 0 2px 4px rgba(52, 152, 219, 0.1);
+}
+
+/* Status Badges */
+.status-badge {
+ display: inline-block;
+ padding: 0.25rem 0.75rem;
+ border-radius: 12px;
+ font-size: 0.75rem;
+ font-weight: 600;
+ text-transform: uppercase;
+ letter-spacing: 0.3px;
+}
+
+.status-badge.status-online {
+ background: rgba(46, 213, 115, 0.2);
+ color: var(--success);
+}
+
+.status-badge.status-printing {
+ background: rgba(243, 156, 18, 0.2);
+ color: var(--accent);
+}
+
+.status-badge.status-offline {
+ background: rgba(231, 76, 60, 0.2);
+ color: var(--error);
+}
+
+.status-badge.status-secondary {
+ background: rgba(255, 167, 38, 0.2);
+ color: var(--warning);
+}
+
+/* Table Actions */
+.table-actions {
+ display: flex;
+ gap: 0.5rem;
+}
+
+.btn-icon {
+ padding: 0.5rem 0.75rem;
+ background: transparent;
+ border: 1px solid var(--border);
+ border-radius: 6px;
+ color: var(--text);
+ cursor: pointer;
+ transition: all 0.2s ease;
+ font-size: 1rem;
+}
+
+.btn-icon:hover {
+ background: rgba(243, 156, 18, 0.1);
+ border-color: var(--accent);
+ transform: translateY(-2px);
+}
+
+.btn-icon.btn-delete:hover {
+ background: rgba(231, 76, 60, 0.1);
+ border-color: var(--error);
+ color: var(--error);
+}
+
+/* Empty State */
+.empty-state {
+ text-align: center;
+ padding: 4rem 2rem;
+ color: var(--text-dim);
+}
+
+.empty-state-icon {
+ font-size: 4rem;
+ margin-bottom: 1rem;
+ opacity: 0.5;
+}
+
+.empty-state h3 {
+ color: var(--text);
+ margin-bottom: 0.5rem;
+}
+
+.empty-state p {
+ margin-bottom: 1.5rem;
+}
+
+/* Modal Styles */
+.modal {
+ display: none;
+ position: fixed;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 100%;
+ padding: 20px;
+ background: rgba(0, 0, 0, 0.7);
+ backdrop-filter: blur(4px);
+ z-index: 1000;
+ align-items: flex-start;
+ justify-content: center;
+ overflow-y: auto;
+}
+
+.modal.active {
+ display: flex;
+}
+
+.modal-content {
+ background: var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 12px;
+ width: 90%;
+ max-width: 900px; /* Vergrößert für 2-Spalten */
+ box-shadow: 0 20px 60px rgba(0, 0, 0, 0.3);
+ animation: modalSlideIn 0.3s ease;
+}
+
+.modal-small {
+ max-width: 400px;
+}
+
+@keyframes modalSlideIn {
+ from {
+ opacity: 0;
+ transform: translateY(-20px);
+ }
+ to {
+ opacity: 1;
+ transform: translateY(0);
+ }
+}
+
+.modal-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 1.5rem;
+ border-bottom: 1px solid var(--border);
+}
+
+.modal-header h2 {
+ margin: 0;
+ color: var(--text);
+ font-size: 1.5rem;
+}
+
+.modal-close {
+ background: transparent;
+ border: none;
+ color: var(--text-dim);
+ font-size: 1.5rem;
+ cursor: pointer;
+ padding: 0.25rem 0.5rem;
+ transition: all 0.2s ease;
+}
+
+.modal-close:hover {
+ color: var(--error);
+ transform: rotate(90deg);
+}
+
+.modal-body {
+ padding: 1.5rem;
+}
+
+/* Form Styles */
+.form-columns {
+ display: grid;
+ grid-template-columns: 1fr 1fr;
+ gap: 2rem;
+ padding: 1.5rem;
+}
+
+.form-column {
+ display: flex;
+ flex-direction: column;
+}
+
+.form-group {
+ margin-bottom: 1.25rem;
+}
+
+.form-group label {
+ display: block;
+ margin-bottom: 0.5rem;
+ color: var(--text);
+ font-weight: 500;
+ font-size: 0.9375rem;
+}
+
+.color-picker-row {
+ display: grid;
+ grid-template-columns: auto minmax(120px, 1fr);
+ gap: 0.75rem;
+ align-items: center;
+}
+
+.color-picker-row .color-input {
+ width: 80px;
+ height: 46px;
+ padding: 0;
+ border-radius: 10px;
+ border: 1px solid var(--border);
+}
+
+.color-picker-row .color-hex {
+ font-family: "Fira Code", "JetBrains Mono", monospace;
+ font-size: 0.9rem;
+ max-width: 150px;
+}
+
+.form-control {
+ width: 100%;
+ padding: 0.75rem;
+ background: var(--bg);
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ color: var(--text);
+ font-size: 0.9375rem;
+ transition: all 0.2s ease;
+}
+
+.form-control:focus {
+ outline: none;
+ border-color: var(--accent);
+ box-shadow: 0 0 0 3px rgba(243, 156, 18, 0.1);
+}
+
+.form-row {
+ display: grid;
+ grid-template-columns: 1fr 1fr;
+ gap: 1rem;
+}
+
+.checkbox-group {
+ display: flex;
+ flex-direction: column;
+ gap: 0.75rem;
+}
+
+.checkbox-group label {
+ display: flex;
+ align-items: center;
+ gap: 0.5rem;
+ cursor: pointer;
+ margin-bottom: 0;
+}
+
+.checkbox-group input[type="checkbox"] {
+ width: 18px;
+ height: 18px;
+ cursor: pointer;
+ accent-color: var(--accent);
+}
+
+.form-info {
+ background: rgba(243, 156, 18, 0.1);
+ border: 1px solid rgba(243, 156, 18, 0.3);
+ border-radius: 8px;
+ padding: 0.75rem 1rem;
+ color: var(--text-dim);
+ font-size: 0.875rem;
+ margin-top: 0.5rem;
+}
+
+.form-info strong {
+ color: var(--accent);
+}
+
+.modal-actions {
+ display: flex;
+ justify-content: flex-end;
+ gap: 1rem;
+ padding: 1.5rem;
+ border-top: 1px solid var(--border);
+}
+
+/* Buttons */
+.btn {
+ padding: 0.75rem 1.5rem;
+ border-radius: 8px;
+ font-weight: 600;
+ cursor: pointer;
+ transition: all 0.2s ease;
+ border: none;
+ font-size: 0.9375rem;
+}
+
+.btn-primary {
+ background: linear-gradient(135deg, var(--accent) 0%, #e67e22 100%);
+ color: #fff;
+}
+
+.btn-primary:hover {
+ transform: translateY(-2px);
+ box-shadow: 0 4px 12px rgba(243, 156, 18, 0.4);
+}
+
+.btn-secondary {
+ background: transparent;
+ border: 1px solid var(--border);
+ color: var(--text);
+}
+
+.btn-secondary:hover {
+ background: rgba(243, 156, 18, 0.1);
+ border-color: var(--accent);
+}
+
+.btn-danger {
+ background: linear-gradient(135deg, var(--error) 0%, #c0392b 100%);
+ color: #fff;
+}
+
+.btn-danger:hover {
+ transform: translateY(-2px);
+ box-shadow: 0 4px 12px rgba(231, 76, 60, 0.4);
+}
+
+/* Notification Toast */
+.notification {
+ position: fixed;
+ bottom: 2rem;
+ right: 2rem;
+ padding: 1rem 1.5rem;
+ background: var(--panel);
+ border: 1px solid var(--border);
+ border-radius: 8px;
+ color: var(--text);
+ box-shadow: 0 4px 12px rgba(0, 0, 0, 0.3);
+ opacity: 0;
+ transform: translateY(20px);
+ transition: all 0.3s ease;
+ z-index: 2000;
+}
+
+.notification.show {
+ opacity: 1;
+ transform: translateY(0);
+}
+
+.notification-success {
+ border-left: 4px solid var(--success);
+}
+
+.notification-error {
+ border-left: 4px solid var(--error);
+}
+
+.notification-warning {
+ border-left: 4px solid var(--warning);
+}
+
+.notification-info {
+ border-left: 4px solid var(--accent);
+}
+
+/* Warnungen Stat Card */
+.stat-card-warning {
+ cursor: pointer;
+ border-color: rgba(255, 167, 38, 0.3);
+ background: linear-gradient(135deg, rgba(255, 167, 38, 0.1) 0%, rgba(243, 156, 18, 0.05) 100%);
+}
+
+.stat-card-warning:hover {
+ border-color: var(--warning);
+}
+
+.stat-card-warning .stat-value {
+ color: var(--warning);
+ background: none;
+ -webkit-text-fill-color: var(--warning);
+}
+
+.btn-details {
+ margin-top: 0.5rem;
+ padding: 0.25rem 0.75rem;
+ background: rgba(255, 167, 38, 0.2);
+ border: 1px solid rgba(255, 167, 38, 0.3);
+ border-radius: 6px;
+ color: var(--warning);
+ font-size: 0.75rem;
+ font-weight: 600;
+ cursor: pointer;
+ transition: all 0.2s ease;
+ display: flex;
+ align-items: center;
+ gap: 0.5rem;
+}
+
+.btn-details:hover {
+ background: rgba(255, 167, 38, 0.3);
+ transform: translateY(-1px);
+}
+
+/* Warnungen Details Section */
+.warnings-details {
+ margin-bottom: 2rem;
+ animation: slideDown 0.3s ease;
+}
+
+@keyframes slideDown {
+ from {
+ opacity: 0;
+ transform: translateY(-10px);
+ }
+ to {
+ opacity: 1;
+ transform: translateY(0);
+ }
+}
+
+.warnings-card {
+ background: linear-gradient(135deg, rgba(255, 167, 38, 0.1) 0%, rgba(243, 156, 18, 0.05) 100%);
+ border: 1px solid rgba(255, 167, 38, 0.3);
+ border-radius: 12px;
+ overflow: hidden;
+ box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
+}
+
+.warnings-list {
+ max-height: 300px;
+ overflow-y: auto;
+}
+
+.warning-item {
+ display: flex;
+ align-items: center;
+ gap: 1rem;
+ padding: 1rem 1.25rem;
+ border-bottom: 1px solid rgba(255, 167, 38, 0.1);
+ transition: background 0.2s ease;
+}
+
+.warning-item:last-child {
+ border-bottom: none;
+}
+
+.warning-item:hover {
+ background: rgba(255, 167, 38, 0.05);
+}
+
+.warning-item .warning-icon {
+ font-size: 1.5rem;
+}
+
+.warning-content {
+ flex: 1;
+}
+
+.warning-content strong {
+ display: block;
+ color: var(--text);
+ margin-bottom: 0.25rem;
+}
+
+.warning-content small {
+ color: var(--text-dim);
+ font-size: 0.8125rem;
+}
+
+.warning-value {
+ text-align: right;
+}
+
+.warning-value strong {
+ display: block;
+ color: var(--warning);
+ font-size: 1.125rem;
+}
+
+.warning-value small {
+ color: var(--text-dim);
+ font-size: 0.8125rem;
+}
+
+/* Responsive */
+@media (max-width: 768px) {
+ .stats-grid {
+ grid-template-columns: 1fr;
+ }
+
+ .filter-group {
+ flex-direction: column;
+ align-items: stretch;
+ }
+
+ .search-input,
+ .filter-select {
+ width: 100%;
+ }
+
+ .form-row {
+ grid-template-columns: 1fr;
+ }
+
+ /* 2-Spalten-Layout auf 1 Spalte bei kleinen Bildschirmen */
+ .form-columns {
+ grid-template-columns: 1fr;
+ gap: 0;
+ padding: 1rem;
+ }
+
+ .modal-content {
+ width: 95%;
+ max-height: 95vh;
+ }
+
+ table {
+ font-size: 0.875rem;
+ }
+
+ thead th,
+ tbody td {
+ padding: 0.75rem 0.5rem;
+ }
+}
diff --git a/frontend/static/spools.js b/frontend/static/spools.js
new file mode 100644
index 0000000..2bd7aa6
--- /dev/null
+++ b/frontend/static/spools.js
@@ -0,0 +1,661 @@
+// Spools Management JavaScript
+
+let spools = [];
+let materials = [];
+let currentSpoolId = null;
+let deleteTargetId = null;
+
+function toNumber(val) {
+ const n = parseFloat(val);
+ return isNaN(n) ? null : n;
+}
+
+// === INIT ===
+document.addEventListener('DOMContentLoaded', () => {
+ loadData();
+ setupEventListeners();
+});
+
+function setupEventListeners() {
+ // Search
+ document.getElementById('searchInput').addEventListener('input', filterSpools);
+
+ // Filters
+ document.getElementById('filterMaterial').addEventListener('change', filterSpools);
+ document.getElementById('filterStatus').addEventListener('change', filterSpools);
+
+ // Color picker sync
+ const colorPicker = document.getElementById('spoolColor');
+ const colorHex = document.getElementById('spoolColorHex');
+
+ if (colorPicker && colorHex) {
+ colorPicker.addEventListener('input', (e) => {
+ colorHex.value = e.target.value;
+ });
+ }
+}
+
+// === LOAD DATA ===
+async function loadData() {
+ try {
+ // Lade Materials ZUERST, dann Spools (wichtig für korrekte Anzeige)
+ await loadMaterials();
+ await loadSpools();
+ await checkUnnumberedSpools(); // Prüfe auf Spulen ohne Nummer
+ } catch (error) {
+ console.error('Fehler beim Laden:', error);
+ }
+}
+
+async function loadMaterials() {
+ try {
+ const response = await fetch('/api/materials/');
+ materials = await response.json();
+ updateMaterialSelects();
+ } catch (error) {
+ console.error('Fehler beim Laden der Materialien:', error);
+ }
+}
+
+async function loadSpools() {
+ try {
+ const response = await fetch('/api/spools/');
+ spools = await response.json();
+
+ updateStats();
+ updateWarnings();
+ renderSpools(spools);
+
+ } catch (error) {
+ console.error('Fehler beim Laden der Spulen:', error);
+ showNotification('Fehler beim Laden der Spulen', 'error');
+ }
+}
+
+function updateMaterialSelects() {
+ const select = document.getElementById('spoolMaterial');
+ const filter = document.getElementById('filterMaterial');
+
+ // Form select
+ select.innerHTML = '-- Material wählen -- ';
+ materials.forEach(m => {
+ const option = document.createElement('option');
+ option.value = m.id;
+ option.textContent = `${m.name}${m.brand ? ' (' + m.brand + ')' : ''}`;
+ select.appendChild(option);
+ });
+
+ // Filter select
+ filter.innerHTML = 'Alle Materialien ';
+ materials.forEach(m => {
+ const option = document.createElement('option');
+ option.value = m.id;
+ option.textContent = `${m.name}${m.brand ? ' (' + m.brand + ')' : ''}`;
+ filter.appendChild(option);
+ });
+}
+
+function updateStats() {
+ // Statistiken nur für Nicht-AMS-Spulen
+ const total = spools.length;
+ const active = spools.filter(s => !s.is_empty).length;
+ const empty = spools.filter(s => s.is_empty).length;
+ const totalWeight = spools.reduce((sum, s) => {
+ if (s.is_empty) return sum;
+ const wf = toNumber(s.weight_full) || 0;
+ const rp = toNumber(s.remain_percent);
+ const remaining = (toNumber(s.weight) ?? toNumber(s.weight_current) ?? toNumber(s.weight_remaining) ?? (rp != null && wf ? (rp / 100) * wf : wf)) ?? 0;
+ return sum + (remaining || 0);
+ }, 0);
+
+ document.getElementById('statTotal').textContent = total;
+ document.getElementById('statActive').textContent = active;
+ document.getElementById('statEmpty').textContent = empty;
+ document.getElementById('statWeight').textContent = Math.round(totalWeight);
+}
+
+function toggleWarnings() {
+ const details = document.getElementById('warningsDetails');
+ const icon = document.getElementById('warningToggleIcon');
+ const text = document.getElementById('warningToggleText');
+
+ if (details.style.display === 'none') {
+ details.style.display = 'block';
+ icon.textContent = '▲';
+ text.textContent = 'Verbergen';
+ } else {
+ details.style.display = 'none';
+ icon.textContent = '▼';
+ text.textContent = 'Details';
+ }
+}
+
+function updateWarnings() {
+ // Zähle Spulen mit niedrigem Bestand (auch im AMS)
+ const lowSpools = spools.filter(s => {
+ if (s.is_empty) return false;
+ const remaining = toNumber(s.weight) ?? toNumber(s.weight_current) ?? toNumber(s.weight_remaining) ?? 0;
+ const percentage = toNumber(s.remain_percent) ?? 0;
+ return percentage <= 20 || remaining < 200;
+ });
+
+ const warningCount = lowSpools.length;
+ document.getElementById('warningCount').textContent = warningCount;
+ // Zeige/Verstecke Warnungs-Card
+ const warningCard = document.getElementById('warningCard');
+ if (warningCount === 0) {
+ warningCard.style.display = 'none';
+ } else {
+ warningCard.style.display = 'flex';
+ }
+ const warningList = document.getElementById('warningList');
+
+ if (warningCount === 0) {
+ warningList.innerHTML = 'Keine Warnungen \u2713
';
+ } else {
+ warningList.innerHTML = lowSpools.map(s => {
+ const material = materials.find(m => m.id === s.material_id);
+ const remaining = Math.round(toNumber(s.weight) ?? toNumber(s.weight_current) ?? toNumber(s.weight_remaining) ?? 0);
+ const percentage = Math.round(toNumber(s.remain_percent) ?? 0);
+ const displayName = s.label || `Spule #${s.id.substring(0, 8)}`;
+ const location = s.ams_slot != null ? `AMS Slot ${s.ams_slot}` : 'Lager';
+
+ return `
+
+
⚠️
+
+ ${displayName}
+ ${material ? material.name : 'Unbekannt'} - ${location}
+
+
+ ${remaining}g
+ ${percentage}%
+
+
+ `;
+ }).join('');
+ }
+}
+
+function renderSpools(spoolsToRender) {
+ const container = document.getElementById('spoolsTable');
+ document.getElementById('spoolCount').textContent = spoolsToRender.length;
+
+ if (spoolsToRender.length === 0) {
+ container.innerHTML = `
+
+
🧵
+
Keine Spulen gefunden
+
Fügen Sie Ihre erste Spule hinzu!
+
+ ➕ Spule hinzufügen
+
+
+ `;
+ return;
+ }
+
+ container.innerHTML = `
+
+
+
+
+ #
+ Material
+ Restgewicht
+ Status
+ Aktionen
+
+
+
+ ${spoolsToRender.map(s => {
+ const material = materials.find(m => m.id === s.material_id);
+ const weightFull = toNumber(s.weight_full) ?? 0;
+ const remainPercent = toNumber(s.remain_percent);
+ const remaining = toNumber(s.weight) ?? toNumber(s.weight_current) ?? toNumber(s.weight_remaining) ?? ((remainPercent != null && weightFull) ? (remainPercent / 100) * weightFull : (weightFull || 0));
+ const trayColor = s.tray_color ? `#${s.tray_color.substring(0, 6)}` : null;
+
+ // NEU: Spulen-Nummern-System
+ const isRFID = s.tray_uuid != null;
+ const spoolNumber = s.spool_number;
+ let numberDisplay = '';
+
+ if (isRFID) {
+ numberDisplay = '📡 RFID ';
+ } else if (spoolNumber) {
+ numberDisplay = `#${spoolNumber} `;
+ } else {
+ numberDisplay = '- ';
+ }
+
+ // Status anzeigen - Priorisiert manuellen Status, dann Leer-Status
+ let statusBadge = '';
+ let locationBadge = '';
+
+ // 1. Haupt-Status (Leer oder nicht)
+ if (remainPercent === 0 || (remaining || 0) <= 0 || s.is_empty) {
+ statusBadge = 'Leer ';
+ } else if (remainPercent != null && remainPercent <= 20) {
+ statusBadge = 'Fast leer ';
+ } else if (remaining < 200) {
+ statusBadge = 'Wenig ';
+ }
+
+ // 2. Lager-Status (wo ist die Spule?)
+ if (s.status === 'Lager') {
+ locationBadge = '🏪 Lager ';
+ } else if (s.status === 'AMS') {
+ locationBadge = '📦 AMS ';
+ } else if (s.status === 'In Benutzung') {
+ locationBadge = '🖨️ In Benutzung ';
+ } else if (s.used_count && s.used_count > 0) {
+ locationBadge = 'Gebraucht ';
+ } else if (s.is_open) {
+ locationBadge = 'Offen ';
+ } else {
+ locationBadge = 'Neu ';
+ }
+
+ statusBadge = statusBadge || locationBadge;
+
+ return `
+
+ ${numberDisplay}
+
+ ${material ? `
+
+ ${trayColor ? `
` : ''}
+
+ ${material.name}
+ ${material.brand ? `${material.brand} ` : ''}
+
+
+ ` : 'Unbekannt '}
+
+
+ ${(remaining || 0).toFixed(2)}g
+ / ${weightFull}g
+
+ ${statusBadge}
+
+
+
+ ✏️
+
+
+ 🗑️
+
+
+
+
+ `;
+ }).join('')}
+
+
+
+ `;
+}
+
+// === FILTER ===
+function filterSpools() {
+ const searchTerm = document.getElementById('searchInput').value.toLowerCase();
+ const materialFilter = document.getElementById('filterMaterial').value;
+ const statusFilter = document.getElementById('filterStatus').value;
+
+ let filtered = spools;
+
+ // Search filter
+ if (searchTerm) {
+ filtered = filtered.filter(s => {
+ const material = materials.find(m => m.id === s.material_id);
+ return (s.label && s.label.toLowerCase().includes(searchTerm)) ||
+ (material && material.name.toLowerCase().includes(searchTerm)) ||
+ (s.manufacturer_spool_id && s.manufacturer_spool_id.toLowerCase().includes(searchTerm));
+ });
+ }
+
+ // Material filter
+ if (materialFilter) {
+ filtered = filtered.filter(s => s.material_id === materialFilter);
+ }
+
+ // Status filter
+ if (statusFilter === 'active') {
+ filtered = filtered.filter(s => !s.is_empty);
+ } else if (statusFilter === 'empty') {
+ filtered = filtered.filter(s => s.is_empty);
+ } else if (statusFilter === 'low') {
+ filtered = filtered.filter(s => {
+ const wf = toNumber(s.weight_full) || 0;
+ const rp = toNumber(s.remain_percent);
+ const remaining = toNumber(s.weight) ?? toNumber(s.weight_current) ?? toNumber(s.weight_remaining) ?? (rp != null && wf ? (rp / 100) * wf : wf);
+ return (rp === 0) || (!s.is_empty && (remaining || 0) < 200);
+ });
+ } else if (statusFilter === 'ams') {
+ // Filter: Nur Spulen im AMS
+ filtered = filtered.filter(s => s.status === 'AMS' || (s.ams_slot != null && s.printer_id));
+ } else if (statusFilter === 'in-use') {
+ // Filter: In Benutzung (ohne AMS) - manuelle Verwendung
+ filtered = filtered.filter(s => s.status === 'In Benutzung');
+ } else if (statusFilter === 'storage') {
+ // Filter: Im Lager
+ filtered = filtered.filter(s => s.status === 'Lager');
+ } else if (statusFilter === 'no-number') {
+ // NEUE FILTER-OPTION: Spulen ohne Nummer (für Benachrichtigung)
+ filtered = filtered.filter(s => s.spool_number == null);
+ }
+
+ renderSpools(filtered);
+}
+
+function clearFilters() {
+ document.getElementById('searchInput').value = '';
+ document.getElementById('filterMaterial').value = '';
+ document.getElementById('filterStatus').value = '';
+ renderSpools(spools);
+}
+
+// === MODAL MANAGEMENT ===
+function openAddModal() {
+ if (materials.length === 0) {
+ showNotification('Bitte erst ein Material anlegen!', 'warning');
+ setTimeout(() => window.location.href = '/materials', 2000);
+ return;
+ }
+
+ currentSpoolId = null;
+ document.getElementById('modalTitle').textContent = '➕ Spule hinzufügen';
+ document.getElementById('spoolForm').reset();
+ document.getElementById('spoolId').value = '';
+ document.getElementById('spoolWeightFull').value = '1000';
+ document.getElementById('spoolWeightEmpty').value = '250';
+ document.getElementById('spoolColor').value = '#ffffff';
+ document.getElementById('spoolColorHex').value = '#ffffff';
+ document.getElementById('spoolStatus').value = 'Lager'; // Neue Spulen starten als "Lager"
+ document.getElementById('spoolNumber').value = '';
+ document.getElementById('spoolModal').classList.add('active');
+}
+
+function openEditModal(id) {
+ const spool = spools.find(s => s.id === id);
+ if (!spool) return;
+
+ currentSpoolId = id;
+ document.getElementById('modalTitle').textContent = '✏️ Spule bearbeiten';
+
+ document.getElementById('spoolId').value = spool.id;
+ document.getElementById('spoolMaterial').value = spool.material_id;
+ document.getElementById('spoolVendor').value = spool.vendor_id || '';
+ document.getElementById('spoolColor').value = spool.tray_color ? '#' + spool.tray_color : '#ffffff';
+ document.getElementById('spoolColorHex').value = spool.tray_color ? '#' + spool.tray_color : '#ffffff';
+ document.getElementById('spoolWeightFull').value = spool.weight_full;
+ document.getElementById('spoolWeightEmpty').value = spool.weight_empty;
+ document.getElementById('spoolWeightRemaining').value = (toNumber(spool.weight) ?? toNumber(spool.weight_current) ?? toNumber(spool.weight_remaining)) ?? '';
+ document.getElementById('spoolManufacturerId').value = spool.manufacturer_spool_id || '';
+ document.getElementById('spoolNumber').value = spool.spool_number || '';
+
+ // Status setzen (basierend auf is_empty und status)
+ let statusValue = spool.status || 'Lager';
+ if (spool.is_empty) {
+ statusValue = 'Leer';
+ }
+ document.getElementById('spoolStatus').value = statusValue;
+
+ // Status-Dropdown sperren wenn Spule im AMS ist
+ const statusDropdown = document.getElementById('spoolStatus');
+ const statusHint = document.getElementById('spoolStatusHint');
+ const isInAMS = spool.ams_slot != null && spool.printer_id != null;
+
+ if (isInAMS) {
+ statusDropdown.disabled = true;
+ statusDropdown.style.opacity = '0.6';
+ statusDropdown.style.cursor = 'not-allowed';
+ statusHint.textContent = '🔒 Status kann nicht geändert werden (Spule ist im AMS)';
+ statusHint.style.color = 'var(--warning)';
+ } else {
+ statusDropdown.disabled = false;
+ statusDropdown.style.opacity = '1';
+ statusDropdown.style.cursor = 'pointer';
+ statusHint.textContent = '💡 Status wird bei AMS-Nutzung automatisch aktualisiert';
+ statusHint.style.color = 'var(--text-dim)';
+ }
+
+ document.getElementById('spoolModal').classList.add('active');
+}
+
+function closeModal() {
+ document.getElementById('spoolModal').classList.remove('active');
+ currentSpoolId = null;
+}
+
+function openDeleteModal(id) {
+ deleteTargetId = id;
+ document.getElementById('deleteModal').classList.add('active');
+}
+
+function closeDeleteModal() {
+ document.getElementById('deleteModal').classList.remove('active');
+ deleteTargetId = null;
+}
+
+// === SAVE SPOOL ===
+async function saveSpool(event) {
+ event.preventDefault();
+
+ const status = document.getElementById('spoolStatus').value;
+
+ // SICHERHEITSABFRAGE: Wenn Status auf "Leer" gesetzt wird
+ if (status === 'Leer') {
+ const confirmed = confirm(
+ '⚠️ ACHTUNG: Spule als LEER markieren?\n\n' +
+ 'Diese Aktion setzt die Spule als aufgebraucht.\n' +
+ 'Möchten Sie fortfahren?'
+ );
+
+ if (!confirmed) {
+ return; // Abbrechen
+ }
+ }
+
+ const weightRemaining = document.getElementById('spoolWeightRemaining').value;
+ const materialId = document.getElementById('spoolMaterial').value;
+ const colorHex = document.getElementById('spoolColor').value;
+ const trayColor = colorHex?.replace('#', '') || null;
+ const spoolNumber = document.getElementById('spoolNumber').value;
+
+ // Status-basierte Flags
+ const is_empty = (status === 'Leer');
+ const is_open = (status === 'Aktiv' || status === 'In Benutzung' || status === 'Leer');
+
+ const data = {
+ material_id: materialId,
+ weight_full: parseFloat(document.getElementById('spoolWeightFull').value),
+ weight_empty: parseFloat(document.getElementById('spoolWeightEmpty').value),
+ weight_current: weightRemaining ? parseFloat(weightRemaining) : null,
+ vendor_id: document.getElementById('spoolVendor').value || null,
+ manufacturer_spool_id: document.getElementById('spoolManufacturerId').value || null,
+ tray_color: trayColor,
+ is_open: is_open,
+ is_empty: is_empty,
+ spool_number: spoolNumber ? parseInt(spoolNumber) : null,
+ status: status || null
+ };
+
+ try {
+ let response;
+
+ if (currentSpoolId) {
+ // Update existing
+ response = await fetch(`/api/spools/${currentSpoolId}`, {
+ method: 'PUT',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify(data)
+ });
+ } else {
+ // Create new
+ response = await fetch('/api/spools/', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify(data)
+ });
+ }
+
+ if (response.ok) {
+ showNotification(
+ currentSpoolId ? 'Spule aktualisiert!' : 'Spule erstellt!',
+ 'success'
+ );
+ closeModal();
+ clearFilters();
+ await loadSpools();
+ } else {
+ const errorData = await response.json().catch(() => ({}));
+ const errorMsg = errorData.detail || 'Speichern fehlgeschlagen';
+ throw new Error(errorMsg);
+ }
+
+ } catch (error) {
+ console.error('Fehler beim Speichern:', error);
+ showNotification('Fehler beim Speichern', 'error');
+ }
+}
+
+// === DELETE SPOOL ===
+async function confirmDelete() {
+ if (!deleteTargetId) return;
+
+ try {
+ const response = await fetch(`/api/spools/${deleteTargetId}`, {
+ method: 'DELETE'
+ });
+
+ if (response.ok) {
+ showNotification('Spule gelöscht', 'success');
+ closeDeleteModal();
+ clearFilters();
+ await loadSpools();
+ } else {
+ throw new Error('Löschen fehlgeschlagen');
+ }
+
+ } catch (error) {
+ console.error('Fehler beim Löschen:', error);
+ showNotification('Fehler beim Löschen', 'error');
+ }
+}
+
+// === BENACHRICHTIGUNGS-SYSTEM FÜR SPULEN OHNE NUMMER ===
+async function checkUnnumberedSpools() {
+ try {
+ const response = await fetch('/api/spools/unnumbered');
+ if (!response.ok) return;
+
+ const unnumbered = await response.json();
+
+ // Filtere nur RFID-Spulen (die vom AMS kommen)
+ const rfidSpools = unnumbered.filter(s => s.tray_uuid != null);
+
+ if (rfidSpools.length > 0) {
+ showUnnumberedNotification(rfidSpools.length);
+ }
+ } catch (error) {
+ console.error('Fehler beim Laden unnummerierter Spulen:', error);
+ }
+}
+
+function showUnnumberedNotification(count) {
+ // Prüfe ob bereits eine Benachrichtigung existiert
+ if (document.getElementById('unnumberedNotification')) return;
+
+ const notification = document.createElement('div');
+ notification.id = 'unnumberedNotification';
+ notification.style.cssText = `
+ position: fixed;
+ top: 20px;
+ right: 20px;
+ background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
+ color: white;
+ padding: 16px 20px;
+ border-radius: 8px;
+ box-shadow: 0 4px 12px rgba(0,0,0,0.3);
+ z-index: 10000;
+ max-width: 350px;
+ cursor: pointer;
+ animation: slideIn 0.3s ease-out;
+ `;
+
+ notification.innerHTML = `
+
+
⚠️
+
+
+ ${count} neue Spule${count > 1 ? 'n' : ''} ohne Nummer
+
+
+ Klicken zum Nummerieren
+
+
+
→
+
+ `;
+
+ notification.addEventListener('click', () => {
+ // Setze Filter auf "Keine Nummer" und schließe Benachrichtigung
+ document.getElementById('filterStatus').value = 'no-number';
+ filterSpools();
+ notification.remove();
+ });
+
+ document.body.appendChild(notification);
+
+ // Automatisch nach 10 Sekunden ausblenden
+ setTimeout(() => {
+ if (notification.parentElement) {
+ notification.style.animation = 'slideOut 0.3s ease-in';
+ setTimeout(() => notification.remove(), 300);
+ }
+ }, 10000);
+}
+
+// CSS Animations
+const style = document.createElement('style');
+style.textContent = `
+ @keyframes slideIn {
+ from {
+ transform: translateX(400px);
+ opacity: 0;
+ }
+ to {
+ transform: translateX(0);
+ opacity: 1;
+ }
+ }
+ @keyframes slideOut {
+ from {
+ transform: translateX(0);
+ opacity: 1;
+ }
+ to {
+ transform: translateX(400px);
+ opacity: 0;
+ }
+ }
+`;
+document.head.appendChild(style);
+
+// Close modals on ESC key
+document.addEventListener('keydown', (e) => {
+ if (e.key === 'Escape') {
+ closeModal();
+ closeDeleteModal();
+ }
+});
+
+// Close modals on background click
+document.getElementById('spoolModal').addEventListener('click', (e) => {
+ if (e.target.id === 'spoolModal') closeModal();
+});
+
+document.getElementById('deleteModal').addEventListener('click', (e) => {
+ if (e.target.id === 'deleteModal') closeDeleteModal();
+});
diff --git a/frontend/templates/admin_login.html b/frontend/templates/admin_login.html
new file mode 100644
index 0000000..21bb0fe
--- /dev/null
+++ b/frontend/templates/admin_login.html
@@ -0,0 +1,502 @@
+
+
+
+
+
+ LCARS - ADMIN ACCESS
+
+
+
+
+
+
+
+
+
+
+
+
+
⚠ ACCESS DENIED ⚠
+
+ INITIATING FRACTAL ENCRYPTION SEQUENCE...
+ 0xF4A7C3E9 0x8B2D1F56 0xE932A7C4 0x7D4E8F21
+ 0x3C9B6A45 0xA18F5E73 0x6E4D2B89 0xF7C3A194
+ 0x2E8A5D67 0xB94F1C32 0x5A7E3D91 0xC6B2F458
+ ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯
+ ENCRYPTION MATRIX: ROTATING PRIME ALGORITHM
+ SECURITY PROTOCOL: OMEGA-7
+ ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯
+ TERMINAL LOCKED: RETRY IN 3 SECONDS...
+
+
+
+
+
+
+
+
+
SYSTEM
+
ACCESS
+
CONTROL
+
STATUS
+
SECURE
+
+
+
+
+
+
+
+
+
diff --git a/frontend/templates/admin_notifications.html b/frontend/templates/admin_notifications.html
new file mode 100644
index 0000000..4b2f339
--- /dev/null
+++ b/frontend/templates/admin_notifications.html
@@ -0,0 +1,100 @@
+{% extends "layout.html" %}
+{% set page_title = "Benachrichtigungen" %}
+{% set page_subtitle = "Globale Alerts verwalten" %}
+{% set active_page = "admin_notifications" %}
+
+{% block content %}
+
+
+
+
+
+ Nachricht
+
+
+
+
+ Typ
+
+ Success
+ Warnung
+ Error
+ Info
+
+
+
+
+ Trigger
+
+ Manuell
+
+ ✅ Job abgeschlossen
+
+ ❌ Job abgebrochen
+ ❌ Job abgebrochen (Abort)
+ ❌ Job gestoppt
+
+ ❌ Job fehlgeschlagen
+ ❌ Job Fehler
+ ❌ Job Exception
+
+ ⚠️ Material niedrig
+ Custom
+
+
+
+
+
+
+
+ Gespeicherte Notifications
+
+
+{% endblock %}
+
+{% block extra_scripts %}
+
+
+{% endblock %}
diff --git a/frontend/templates/admin_panel.html b/frontend/templates/admin_panel.html
new file mode 100644
index 0000000..8c06627
--- /dev/null
+++ b/frontend/templates/admin_panel.html
@@ -0,0 +1,741 @@
+
+
+
+
+
+ FilamentHub Admin
+
+
+
+
+
+
+
+
+
+
+
+
+
⚡ Systemsteuerung
+
Admin Panel
+
Migrationen ausführen, Datenbank pflegen, Begrüßungen anpassen und Coverage direkt im Browser.
+
+ 🔒 Sicher • Auth-required
+ 🛠️ Server-Tools
+ 📊 Live-Status
+
+
+
+ 🚀 Migration
+ 💾 DB-Editor
+ 📈 Coverage
+
+
+
+
+
+
Migration
+
Alembic
+
Upgrade Head & Logs
+
Öffnen
+
+
+
Datenbank
+
SQL Editor
+
INSERT · UPDATE · DELETE
+
Öffnen
+
+
+
Coverage
+
QA
+
Run & Report
+
Öffnen
+
+
+
Willkommen
+
Popup
+
Texte & Trigger
+
Öffnen
+
+
+
Datenbank
+
Tables
+
Schema & Preview
+
Öffnen
+
+
+
+
+
+
+
+
+
+
+
+
Alembic upgrade head ausführen
+
🚀 MIGRATION STARTEN
+
+
Kein Lauf protokolliert.
+
ℹ️ Noch keine Migration durchgeführt
+
ℹ️ Übersprungen (da vorhanden)
+
+
+
+
+
+
+
+
+
+
Datensatz per ID entfernen
+
Tabelle
+
+ material
+ spool
+ printer
+ job
+ userflag
+ setting
+
+
ID
+
+
🗑️ EINTRAG LÖSCHEN
+
+
+
+
+
+
+
+
+
+
+
Text für das Willkommens-Popup
+
+
+ 💾 Speichern
+ ▶️ Popup auslösen
+
+
+
+
+
+
+
+
+
+
+
+
Führe beliebige SQL-Befehle aus (INSERT, UPDATE, DELETE, CREATE, ALTER, DROP). Vorsicht: Änderungen sind direkt wirksam!
+
+
+ ▶️ Ausführen
+ 🗑️ Clear
+
+
+
SQL-Beispiele:
+
+ UPDATE
+ INSERT
+ DELETE
+
+
+
+
+
+
+
+
+
+
+
+
+
Coverage-Auswertung ohne Terminal
+
+ ▶️ Coverage ausführen
+ 📄 Coverage-Report öffnen
+
+
+
+
+
+
+
+
+
+
+
+
Schema-Übersicht mit Spalten, Typen und Datenvorschau
+
Lädt...
+
+
+
+
+
+
+
+
+
+
Verwaltung von Debug-bezogenen Benutzereinstellungen
+
+
+
Pro-Mode Warnung
+
+ Status: Lädt...
+
+
+ Setzt die "Ich weiß was ich mache" Bestätigung zurück. Der Warndialog wird beim nächsten Aktivieren von Pro-Mode erneut angezeigt.
+
+
+ 🔄 Warnung zurücksetzen
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ID *
+
+
+
+ Label
+
+
+
+ Nachricht *
+
+
+
+
+
+
+ Aktiv
+
+
+
+ Persistent
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/frontend/templates/ams.html b/frontend/templates/ams.html
new file mode 100644
index 0000000..a0cbdaf
--- /dev/null
+++ b/frontend/templates/ams.html
@@ -0,0 +1,288 @@
+{% extends "layout.html" %}
+{% set page_title = "AMS Übersicht" %}
+{% set page_subtitle = "Live-Monitoring der Automatic Material System Einheiten" %}
+
+{% block extra_styles %}
+
+{% endblock %}
+
+{% block content %}
+
+
+
+
+
+
+
+
+
+
+
+
📦
+
+
Verfügbares Filament
+
0kg
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Spule suchen
+
+
+
+ 💡 Suche nach Nummer, Name, Hersteller oder Farbe
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Möchtest du die Spule wirklich aus dem Slot entfernen ?
+
+
+ Die Spule wird wieder als verfügbar markiert.
+
+
+
+
+ Abbrechen
+
+
+ Entfernen
+
+
+
+
+
+{% endblock %}
+
+{% block extra_scripts %}
+
+{% endblock %}
diff --git a/frontend/templates/coverage_wrapper.html b/frontend/templates/coverage_wrapper.html
new file mode 100644
index 0000000..2b2467d
--- /dev/null
+++ b/frontend/templates/coverage_wrapper.html
@@ -0,0 +1,248 @@
+
+
+
+
+
+ Coverage Report — FilamentHub
+
+
+
+
+
+
+
+
Coverage Übersicht
+
+
— %
+
Gesamt-Coverage
+
+
+
+
+
+
+
+ Letzter Lauf: —
+
+
+
+ Vollständigen Report in neuem Tab öffnen
+
+
+
Hinweis: Tabelle im Report wird farbig markiert (grün/orange/rot).
+
+
Status
+
🧪 Alpha‑Phase
+
Coverage: ~20–40 % → völlig okay
+
+Warum?
+Architektur ist im Aufbau
+APIs ändern sich ständig
+viel Experimentieren
+Tests wären sonst dauernd kaputt
+
+Ziel in Alpha:
+App startet
+Kernlogik explodiert nicht
+Smoke‑Tests + 1–2 wichtige Services
+
+👉 30 % Coverage = absolut in Ordnung
+
+
+
+
+
+
+
+
+
+
+
diff --git a/frontend/templates/dashboard.html b/frontend/templates/dashboard.html
new file mode 100644
index 0000000..d12905a
--- /dev/null
+++ b/frontend/templates/dashboard.html
@@ -0,0 +1,471 @@
+{% extends "layout.html" %}
+{% set page_title = "Dashboard" %}
+{% set page_subtitle = "Live Übersicht aller Operationen" %}
+
+{% block extra_styles %}
+
+
+{% endblock %}
+
+{% block header_actions %}{% endblock %}
+
+{% block content %}
+
+
+
+
🟢 Online Drucker
+
–
+
0/0
+
+
+
📊 Laufende Jobs
+
–
+
Jobs in Druck
+
+
+
⏱️ Durchschn. heute
+
–
+
Druckzeit
+
+
+
📈 Filament heute
+
–
+
Verbraucht
+
+
+
+
+
+
+
Aktive Drucke
+
Aktualisiert
+
+
+
+
+
+
+
+
Nächste Jobs
+
+
Keine anstehenden Jobs
+
+
+
+
+
+
+
+
+{% endblock %}
+
+{% block extra_scripts %}
+
+
+
+{% endblock %}
diff --git a/frontend/templates/debug_ams.html b/frontend/templates/debug_ams.html
new file mode 100644
index 0000000..51a1a4a
--- /dev/null
+++ b/frontend/templates/debug_ams.html
@@ -0,0 +1,38 @@
+{% extends "layout.html" %}
+{% set page_title = "AMS Debug View" %}
+{% set page_subtitle = "Visualisierte AMS-Daten (parsed & mapped)" %}
+
+{% block extra_styles %}
+
+{% endblock %}
+
+{% block content %}
+{% if debug_center_mode == 'pro' %}
+
+
+ Rohdaten anzeigen
+ parse_ams Ergebnis
+ Mapper Ergebnis
+
+
+
+
+
+
+
+
+
AMS Daten werden geladen ...
+
+
+{% else %}
+
+
+ AMS Debug ist nur im Pro-Modus verfügbar.
+
+
+{% endif %}
+{% endblock %}
+
+{% block extra_scripts %}
+
+{% endblock %}
diff --git a/frontend/templates/jobs.html b/frontend/templates/jobs.html
new file mode 100644
index 0000000..79123ed
--- /dev/null
+++ b/frontend/templates/jobs.html
@@ -0,0 +1,245 @@
+{% extends "layout.html" %}
+{% set page_title = "Jobs" %}
+{% set page_subtitle = "Verwalte laufende und vergangene Druckaufträge" %}
+
+{% block extra_styles %}
+
+{% endblock %}
+
+{% block header_actions %}
+
+
+ ➕ Neuer Job
+
+
+{% endblock %}
+
+{% block content %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Job Name
+ Drucker
+ Spule(n)
+ Verbrauch
+ Status
+ Dauer
+ Aktionen
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Möchtest du diesen Job wirklich löschen? Diese Aktion kann nicht rückgängig gemacht werden.
+
+ Löschen
+ Abbrechen
+
+
+
+
+
+
+
+
+
+
+{% endblock %}
+
+{% block extra_scripts %}
+
+
+{% endblock %}
diff --git a/frontend/templates/layout.html b/frontend/templates/layout.html
new file mode 100644
index 0000000..0897d2c
--- /dev/null
+++ b/frontend/templates/layout.html
@@ -0,0 +1,123 @@
+
+
+
+
+
+ {{ title or "FilamentHub" }}
+
+
+
+ {% block extra_styles %}{% endblock %}
+
+
+ {% include "sidebar.html" %}
+
+ {# Globale Benachrichtigungen auf allen Seiten #}
+
+
+
+ {% block header %}
+ {% if active_page != "debug" %}
+
+ {% endif %}
+ {% endblock %}
+
+ {% block content %}{% endblock %}
+
+
+
+
+
+
+
+
+ Strompreis (€/kWh)
+
+
+
+ Abbrechen
+ Speichern
+
+
+
+
+
+
+
+
+ {% block extra_scripts %}{% endblock %}
+
+
diff --git a/frontend/templates/materials.html b/frontend/templates/materials.html
new file mode 100644
index 0000000..423a6d4
--- /dev/null
+++ b/frontend/templates/materials.html
@@ -0,0 +1,134 @@
+{% extends "layout.html" %}
+{% set page_title = "Materialien" %}
+{% set page_subtitle = "Verwalte Materialprofile und Eigenschaften" %}
+
+{% block extra_styles %}
+
+{% endblock %}
+
+{% block header_actions %}
+
+ Neues Material
+
+{% endblock %}
+
+{% block content %}
+
+
+
+
+ Name
+ Marke
+ Dichte
+ Durchmesser
+ Aktionen
+
+
+
+
+ Laden...
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Name *
+
+
+
+
+
+
+
+
+ Notizen
+
+
+
+
+ Abbrechen
+ Erstellen
+
+
+
+
+{% endblock %}
+
+{% block extra_scripts %}
+
+{% endblock %}
diff --git a/frontend/templates/printers.html b/frontend/templates/printers.html
new file mode 100644
index 0000000..a52a800
--- /dev/null
+++ b/frontend/templates/printers.html
@@ -0,0 +1,100 @@
+{% extends "layout.html" %}
+{% set page_title = "Drucker" %}
+{% set page_subtitle = "Übersicht und Steuerung aller Drucker" %}
+
+{% block extra_styles %}
+
+{% endblock %}
+
+{% block header_actions %}
+
+ Aktualisieren
+
+{% endblock %}
+
+{% block content %}
+
+
+
+
+{% endblock %}
+
+{% block extra_scripts %}
+
+{% endblock %}
diff --git a/frontend/templates/settings.html b/frontend/templates/settings.html
new file mode 100644
index 0000000..55f6ef8
--- /dev/null
+++ b/frontend/templates/settings.html
@@ -0,0 +1,38 @@
+{% extends "layout.html" %}
+{% set page_title = "Settings" %}
+{% set page_subtitle = "Theme, Sprache und weitere Optionen" %}
+
+{% block content %}
+
+
+
+
Debug
+
+
Status: unbekannt
+
+
+
+{% endblock %}
+
+{% block extra_scripts %}
+
+{% endblock %}
diff --git a/frontend/templates/sidebar.html b/frontend/templates/sidebar.html
new file mode 100644
index 0000000..18d04dc
--- /dev/null
+++ b/frontend/templates/sidebar.html
@@ -0,0 +1,33 @@
+
+
+
diff --git a/frontend/templates/spools.html b/frontend/templates/spools.html
new file mode 100644
index 0000000..be0183b
--- /dev/null
+++ b/frontend/templates/spools.html
@@ -0,0 +1,252 @@
+{% extends "layout.html" %}
+{% set page_title = "Spulen" %}
+{% set page_subtitle = "Spulen-Bestand und Materialverwaltung" %}
+
+{% block extra_styles %}
+
+{% endblock %}
+
+{% block header_actions %}
+
+
+ ➕ Neue Spule
+
+
+{% endblock %}
+
+{% block content %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
⚠️
+
+
Warnungen
+
0
+
+ Details
+ ▼
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 💡 Hinweis: Neue Spulen sind standardmäßig verschlossen und bekommen den Status "Lager". Bei Einlage ins AMS und erstem Druck wechselt der Status automatisch zu "Aktiv".
+
+
+
+
+ Abbrechen
+
+
+ 💾 Speichern
+
+
+
+
+
+
+
+
+
+
+
+
+
Möchten Sie diese Spule wirklich löschen? Diese Aktion kann nicht rückgängig gemacht werden.
+
+
+
+
+ Abbrechen
+
+
+ 🗑️ Löschen
+
+
+
+
+
+
+
+
+{% endblock %}
+
+{% block extra_scripts %}
+
+
+{% endblock %}
diff --git a/frontend/templates/statistics.html b/frontend/templates/statistics.html
new file mode 100644
index 0000000..88c018d
--- /dev/null
+++ b/frontend/templates/statistics.html
@@ -0,0 +1,282 @@
+{% extends "layout.html" %}
+{% set page_title = "Statistiken" %}
+{% set page_subtitle = "Charts und Kennzahlen rund um Drucker, Spulen und Jobs" %}
+
+{% block extra_styles %}
+
+{% endblock %}
+
+{% block content %}
+
+
+
Druckzt.
+
–
+
Gesamtlaufzeit
+
Längster Job: –
+
+
+
Verbrch.
+
–
+
Filament verbraucht
+
Häufigstes Material: –
+
+
+
Kosten
+
–
+
Energiekosten
+
Exakt: – · Geschätzt: –
+
+
+
Jobs
+
–
+
Druckaufträge
+
Erfolgsquote: –
+
+
+
+
+
+
Druckaktivität (90 Tage)
+
Dunkler = mehr Aktivität
+
+
+
+
+
+
Kosten-Breakdown
+
+
+ Gesamt
+ –
+
+
+ Exakt
+ –
+
+
+ Geschätzt
+ –
+
+
+ Ø pro Job
+ –
+
+
+
Strompreis: – €/kWh
+
+
+
+
Performance-Metriken
+
+
+ Ø Druckzeit
+ –
+
+
+ Ø Filament
+ –
+
+
+ Längster Job
+ –
+
+
+
+
Basierend auf allen Jobs.
+
+
+
+
+
+
+
Top 5 Materialien
+
Nach Verbrauch (kg)
+
+
+
+
Top 5 Drucker
+
Nach Laufzeit (h)
+
+
+
+
+
+
+
+
Zeitverlauf & Verteilung
+
+ 7T
+ 30T
+ 3M
+ 1J
+
+
+
+
+
+
+
Filamentverbrauch über Zeit
+
Nach Material gruppiert
+
+
+
+
+
+
+
+
Material-Verteilung
+
Verbrauch nach Typ
+
+
+
+
+
+
+
+
Druckzeit je Drucker
+
Stunden & Jobs
+
+
+
+
+
+
+
+
+ Kosten-Entwicklung
+
+
+
+
Energiekosten im Zeitverlauf
+
Tägliche Kosten & kumuliert
+
+
+
+
+
+{% endblock %}
+
+{% block extra_scripts %}
+
+
+{% endblock %}
diff --git a/logs/admin/admin_audit.log b/logs/admin/admin_audit.log
new file mode 100644
index 0000000..e69de29
diff --git a/logs/app/app.log b/logs/app/app.log
new file mode 100644
index 0000000..e69de29
diff --git a/pytest.ini b/pytest.ini
new file mode 100644
index 0000000..24b2960
--- /dev/null
+++ b/pytest.ini
@@ -0,0 +1,4 @@
+[pytest]
+python_files = test_*.py
+testpaths = tests
+norecursedirs = tools Backup .venv
diff --git a/rebuild.sh b/rebuild.sh
new file mode 100644
index 0000000..5df3687
--- /dev/null
+++ b/rebuild.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+# FilamentHub - Vollständiger Rebuild (ohne Datenbank-Reset)
+# Löscht altes Image und baut komplett neu
+
+set -e
+
+echo "========================================="
+echo "FilamentHub - Complete Rebuild"
+echo "========================================="
+echo ""
+
+echo "1. Stoppe Container..."
+docker-compose down
+
+echo "2. Lösche altes Image..."
+docker rmi filamenthub:latest 2>/dev/null || echo " Kein altes Image gefunden"
+
+echo "3. Baue neues Image (ohne Cache)..."
+docker build --no-cache -t filamenthub .
+
+echo "4. Starte Container..."
+docker-compose up -d
+
+echo ""
+echo "========================================="
+echo "Rebuild abgeschlossen!"
+echo "========================================="
+echo ""
+echo "Container läuft jetzt mit neuem Image."
+echo ""
+echo "Nützliche Befehle:"
+echo " docker-compose logs -f # Logs anzeigen"
+echo " docker-compose ps # Status prüfen"
+echo " curl http://localhost:8085/health # Health Check"
+echo ""
+echo "App öffnen: http://localhost:8085"
+echo ""
diff --git a/requirements.txt b/requirements.txt
index 8bf782d..388a8b9 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,8 +1,16 @@
fastapi
-uvicorn
+uvicorn[standard]
sqlmodel
jinja2
psutil>=5.9.0
paho-mqtt>=2.0.0
httpx
alembic
+python-multipart
+pyyaml
+websockets
+wsproto
+bcrypt
+python-dotenv
+pytest
+pytest-cov
diff --git a/run.py b/run.py
index a73c1fd..c761e89 100644
--- a/run.py
+++ b/run.py
@@ -1,3 +1,5 @@
+import sys
+
import uvicorn
import logging
import yaml
@@ -6,61 +8,83 @@
from logging.handlers import TimedRotatingFileHandler, RotatingFileHandler
from utils.dummy_logger import DummyLogger
+# Load .env early so environment variables from a .env file are available
+# to modules that are imported later (e.g. admin password hash).
+try:
+ from dotenv import load_dotenv # type: ignore
+ # Force override so values in .env replace any existing OS environment variables.
+ load_dotenv(override=True)
+except Exception:
+ # dotenv not installed or .env missing — continue without failing
+ pass
+
+
+def _ensure_python_multipart_installed() -> None:
+ try:
+ import multipart # noqa: F401
+ except Exception:
+ in_venv = hasattr(sys, "base_prefix") and sys.prefix != sys.base_prefix
+ hint = (
+ "Fehlendes Paket: python-multipart.\n"
+ "Installiere es in genau der Python-Umgebung, mit der du startest.\n\n"
+ "Empfohlen (Projekt-venv): .\\.venv\\Scripts\\python.exe run.py\n"
+ "Oder installiere global: pip install python-multipart\n"
+ )
+ if in_venv:
+ hint += "(Info: Du bist bereits in einer venv, dort fehlt das Paket.)\n"
+ else:
+ hint += "(Info: Du startest gerade nicht aus der Projekt-venv.)\n"
+
+ print(hint, file=sys.stderr)
+ raise SystemExit(1)
+
-# ---------------------------------------------------------
-# CONFIG LADEN
-# ---------------------------------------------------------
def load_config():
with open("config.yaml", "r", encoding="utf-8") as f:
return yaml.safe_load(f)
-config = load_config()
+def get_server_bind(cfg):
+ host = os.getenv("HOST") or cfg.get("server", {}).get("host") or "0.0.0.0"
+ port_val = os.getenv("PORT") or cfg.get("server", {}).get("port") or 8085
+ try:
+ port = int(port_val)
+ except (TypeError, ValueError):
+ port = 8085
+ return host, port
+
+
+_ensure_python_multipart_installed()
+config = load_config()
# ---------------------------------------------------------
# LOGGING SYSTEM
# ---------------------------------------------------------
-
-# Basis-Ordner (relative Pfade → funktionieren auf Windows, Pi, Docker)
LOG_ROOT = "logs"
-
-# sicherstellen, dass der Hauptordner existiert
os.makedirs(LOG_ROOT, exist_ok=True)
-log_config = config["logging"]
-module_config = log_config["modules"]
-
-# global log level
-global_level = getattr(logging, log_config["level"].upper(), logging.INFO)
-
+log_config = config.get("logging", {})
+module_config = log_config.get("modules", {})
+global_level = getattr(logging, log_config.get("level", "INFO").upper(), logging.INFO)
def create_logger(name: str, subfolder: str, level=logging.INFO, enabled=True):
- """
- Erzeugt einen Logger mit Tagesrotation.
- Falls disabled → DummyLogger.
- """
if not enabled:
return DummyLogger()
folder = os.path.join(LOG_ROOT, subfolder)
os.makedirs(folder, exist_ok=True)
-
logfile = os.path.join(folder, f"{datetime.now().strftime('%Y-%m-%d')}.log")
handler = TimedRotatingFileHandler(
logfile,
when="midnight",
- backupCount=log_config["keep_days"],
+ backupCount=log_config.get("keep_days", 7),
encoding="utf-8",
utc=False
)
-
- formatter = logging.Formatter(
- "%(asctime)s [%(levelname)s] %(name)s - %(message)s"
- )
-
+ formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(name)s - %(message)s")
handler.setFormatter(formatter)
console = logging.StreamHandler()
@@ -68,93 +92,63 @@ def create_logger(name: str, subfolder: str, level=logging.INFO, enabled=True):
logger = logging.getLogger(name)
logger.setLevel(level)
-
logger.addHandler(handler)
logger.addHandler(console)
-
return logger
-# ---------------------------------------------------------
-# MODULE-LOGGER ANLEGEN
-# ---------------------------------------------------------
+app_logger = create_logger("App", "app", level=global_level, enabled=module_config.get("app", {}).get("enabled", True))
+bambu_logger = create_logger("Bambu", "bambu", level=global_level, enabled=module_config.get("bambu", {}).get("enabled", True))
+error_logger = create_logger("Error", "errors", level=logging.ERROR, enabled=module_config.get("errors", {}).get("enabled", True))
+klipper_logger = create_logger("Klipper", "klipper", level=global_level, enabled=module_config.get("klipper", {}).get("enabled", False))
+
-app_logger = create_logger(
- "App",
- "app",
- level=global_level,
- enabled=module_config["app"]["enabled"]
-)
-
-bambu_logger = create_logger(
- "Bambu",
- "bambu",
- level=global_level,
- enabled=module_config["bambu"]["enabled"]
-)
-
-klipper_logger = create_logger(
- "Klipper",
- "klipper",
- level=global_level,
- enabled=module_config["klipper"]["enabled"]
-)
-
-error_logger = create_logger(
- "Error",
- "errors",
- level=logging.ERROR,
- enabled=module_config["errors"]["enabled"]
-)
-
-# MQTT Logger mit Größenbegrenzung (aus config.yaml)
def create_mqtt_logger():
- """Erstellt einen Logger für MQTT-Nachrichten mit Rotation nach Größe."""
- folder = os.path.join(LOG_ROOT, "mqtt")
+ folder = os.path.join(LOG_ROOT, "3d_drucker")
os.makedirs(folder, exist_ok=True)
-
- logfile = os.path.join(folder, "mqtt_messages.log")
-
- # Lese Konfiguration aus config.yaml
+ logfile = os.path.join(folder, "3d_drucker.log")
max_size_mb = log_config.get("max_size_mb", 10)
backup_count = log_config.get("backup_count", 3)
-
- # RotatingFileHandler mit konfigurierbarer Größe
handler = RotatingFileHandler(
logfile,
- maxBytes=max_size_mb * 1024 * 1024, # MB zu Bytes
+ maxBytes=max_size_mb * 1024 * 1024,
backupCount=backup_count,
encoding="utf-8"
)
-
formatter = logging.Formatter("%(asctime)s | %(message)s")
handler.setFormatter(formatter)
-
- logger = logging.getLogger("MQTT")
+ logger = logging.getLogger("3D_drucker")
logger.setLevel(logging.INFO)
logger.addHandler(handler)
-
return logger
+
mqtt_logger = create_mqtt_logger()
app_logger.info("FilamentHub Logging-System initialisiert.")
app_logger.info(f"Aktive Log-Module: {module_config}")
-# ---------------------------------------------------------
-# SERVER START
-# ---------------------------------------------------------
-
+# Development reload is CLI-only to keep Windows start stable:
+# uvicorn app.main:app --reload --port 8085
def start():
- app_logger.info("Starte FilamentHub API Server...")
-
- uvicorn.run(
- "app.main:app",
- host=config["server"]["host"],
- port=config["server"]["port"],
- reload=True,
- )
+ host, port = get_server_bind(config)
+ app_logger.info(f"Starting FilamentHub on {host}:{port} (reload disabled)")
+ try:
+ uvicorn.run(
+ "app.main:app",
+ host=host,
+ port=port,
+ reload=False,
+ log_level="info",
+ loop="asyncio",
+ ws="websockets"
+ )
+ except OSError as exc:
+ if exc.errno in (98, 10048):
+ app_logger.error(f"Port {port} already in use. Set PORT env to a free port or stop the other process.")
+ return
+ raise
if __name__ == "__main__":
diff --git a/services/bambu_service.py b/services/bambu_service.py
index 49aea83..973261f 100644
--- a/services/bambu_service.py
+++ b/services/bambu_service.py
@@ -1,11 +1,218 @@
-"""Bambu LAN / Cloud Service Stub.
+"""Bambu Lab MQTT Service für Live-Sync von AMS Daten, RFID, Material-Verbrauch."""
+from run import bambu_logger, error_logger
+import paho.mqtt.client as mqtt
+import json
+from typing import Optional, Callable
+from sqlmodel import Session, select
+from app.models.spool import Spool
+from app.models.material import Material
+from app.database import get_session
-Hier kommen später:
-- MQTT Anbindung
-- Slot-Parsing
-- Verbrauchslogik
-"""
+class BambuService:
+ """MQTT Service für Bambu Lab Drucker - Auto-Sync von AMS Daten."""
-def connect():
- return False
+ def __init__(self, printer_id: str, host: str, access_code: str, serial: str):
+ self.printer_id = printer_id
+ self.host = host
+ self.access_code = access_code
+ self.serial = serial
+ self.client: Optional[mqtt.Client] = None
+ self.connected = False
+
+ bambu_logger.info(f"BambuService initialisiert für {host} (Serial: {serial})")
+
+ def connect(self):
+ """MQTT Verbindung zum Bambu Lab Drucker aufbauen."""
+ try:
+ self.client = mqtt.Client(client_id=f"FilamentHub_{self.serial}")
+ self.client.username_pw_set("bblp", self.access_code)
+
+ # Callbacks
+ self.client.on_connect = self._on_connect
+ self.client.on_message = self._on_message
+ self.client.on_disconnect = self._on_disconnect
+
+ bambu_logger.info(f"Verbinde zu {self.host}:6000...")
+ self.client.connect(self.host, 6000, 60)
+ self.client.loop_start()
+
+ except Exception as e:
+ error_logger.exception(f"MQTT Verbindung fehlgeschlagen: {e}")
+
+ def disconnect(self):
+ """MQTT Verbindung trennen."""
+ if self.client:
+ self.client.loop_stop()
+ self.client.disconnect()
+ bambu_logger.info("MQTT Verbindung getrennt")
+
+ def _on_connect(self, client, userdata, flags, rc):
+ """Callback wenn MQTT verbunden."""
+ if rc == 0:
+ self.connected = True
+ bambu_logger.info(f"MQTT verbunden mit {self.host}")
+
+ # Subscribe to report topic
+ topic = f"device/{self.serial}/report"
+ client.subscribe(topic)
+ bambu_logger.info(f"Subscribed zu {topic}")
+ else:
+ error_logger.error(f"MQTT Verbindung fehlgeschlagen: RC={rc}")
+
+ def _on_disconnect(self, client, userdata, rc):
+ """Callback wenn MQTT getrennt."""
+ self.connected = False
+ bambu_logger.warning(f"MQTT Verbindung getrennt: RC={rc}")
+
+ def _on_message(self, client, userdata, msg):
+ """Callback für eingehende MQTT Messages - Parse AMS Daten."""
+ try:
+ payload = json.loads(msg.payload.decode())
+
+ # Parse AMS data (Automatic Material System)
+ if "print" in payload and "ams" in payload["print"]:
+ ams_data = payload["print"]["ams"]
+ self._process_ams_data(ams_data)
+
+ # Parse print job data
+ if "print" in payload:
+ print_data = payload["print"]
+ self._process_print_data(print_data)
+
+ except json.JSONDecodeError:
+ bambu_logger.warning("Konnte MQTT Message nicht parsen")
+ except Exception as e:
+ error_logger.exception(f"Fehler beim Verarbeiten der MQTT Message: {e}")
+
+ def _process_ams_data(self, ams_data: dict):
+ """AMS Daten verarbeiten und Spulen auto-anlegen/updaten."""
+ try:
+ # AMS kann mehrere Units haben (AMS 1, AMS 2, etc.)
+ ams_units = ams_data.get("ams", [])
+
+ for ams_unit in ams_units:
+ ams_id = ams_unit.get("id", 0)
+ trays = ams_unit.get("tray", [])
+
+ for tray in trays:
+ tray_id = tray.get("id")
+ if tray_id is None:
+ continue
+
+ # AMS Slot berechnen (0-3 pro Unit, dann 4-7 für Unit 2, etc.)
+ ams_slot = ams_id * 4 + tray_id
+
+ # RFID auslesen
+ rfid = tray.get("tray_uuid")
+
+ # Material-Info
+ tray_type = tray.get("tray_type") # PLA, ABS, PETG, etc.
+ tray_color = tray.get("tray_color") # Hex color
+
+ # Gewicht (in gramm * 1000, umrechnen)
+ remain_weight = tray.get("remain", 0) / 1000.0
+
+ bambu_logger.debug(f"AMS Slot {ams_slot}: Type={tray_type}, Color={tray_color}, RFID={rfid}, Weight={remain_weight}g")
+
+ # Spule in DB anlegen/updaten
+ self._sync_spool(
+ ams_slot=ams_slot,
+ rfid=rfid,
+ material_type=tray_type,
+ color=tray_color,
+ remaining_weight=remain_weight
+ )
+
+ except Exception as e:
+ error_logger.exception(f"Fehler beim Verarbeiten der AMS Daten: {e}")
+
+ def _process_print_data(self, print_data: dict):
+ """Print Job Daten verarbeiten - Material-Verbrauch tracken."""
+ try:
+ # Aktueller AMS Slot in Benutzung
+ ams_slot = print_data.get("ams_status")
+
+ # Filament verbraucht (mm)
+ filament_used = print_data.get("mc_print_line_number", 0)
+
+ bambu_logger.debug(f"Print Status: AMS Slot {ams_slot}, Filament used: {filament_used}mm")
+
+ # Hier könnte man Job-Tracking machen
+ # z.B. laufenden Job updaten mit aktuellem Verbrauch
+
+ except Exception as e:
+ error_logger.exception(f"Fehler beim Verarbeiten der Print Daten: {e}")
+
+ def _sync_spool(self, ams_slot: int, rfid: Optional[str], material_type: Optional[str],
+ color: Optional[str], remaining_weight: float):
+ """Spule in Datenbank anlegen oder updaten basierend auf AMS Daten."""
+ try:
+ # DB Session holen
+ session = next(get_session())
+
+ # Suche Spule mit diesem RFID oder AMS Slot
+ spool = None
+ if rfid:
+ spool = session.exec(select(Spool).where(Spool.rfid_chip_id == rfid)).first()
+
+ if not spool:
+ # Suche nach AMS Slot
+ spool = session.exec(select(Spool).where(Spool.ams_slot == ams_slot)).first()
+
+ if spool:
+ # Update existing spool
+ bambu_logger.info(f"Update Spule {spool.id} - AMS Slot {ams_slot}, Weight: {remaining_weight}g")
+ spool.ams_slot = ams_slot
+ spool.remaining_weight = remaining_weight
+ if rfid and not spool.rfid_chip_id:
+ spool.rfid_chip_id = rfid
+ session.add(spool)
+ session.commit()
+ else:
+ # Neue Spule anlegen
+ bambu_logger.info(f"Neue Spule aus AMS - Slot {ams_slot}, Type: {material_type}")
+
+ # Suche oder erstelle Material
+ material_id = None
+ if material_type:
+ material = session.exec(
+ select(Material).where(Material.material_type == material_type)
+ ).first()
+
+ if not material:
+ # Neues Material anlegen
+ material = Material(
+ name=material_type,
+ material_type=material_type,
+ brand="Bambu Lab",
+ color=color or "#CCCCCC"
+ )
+ session.add(material)
+ session.commit()
+ session.refresh(material)
+ bambu_logger.info(f"Neues Material angelegt: {material_type}")
+
+ material_id = material.id
+
+ # Neue Spule anlegen
+ new_spool = Spool(
+ material_id=material_id,
+ ams_slot=ams_slot,
+ rfid_chip_id=rfid,
+ full_weight=1000.0, # Standard 1kg Spule
+ remaining_weight=remaining_weight,
+ location=f"AMS Slot {ams_slot}"
+ )
+ session.add(new_spool)
+ session.commit()
+ bambu_logger.info(f"Neue Spule angelegt: AMS Slot {ams_slot}")
+
+ session.close()
+
+ except Exception as e:
+ error_logger.exception(f"Fehler beim Sync der Spule: {e}")
+
+ def is_connected(self) -> bool:
+ """Gibt zurück ob MQTT verbunden ist."""
+ return self.connected
diff --git a/services/klipper_service.py b/services/klipper_service.py
index ecc93f2..4433da0 100644
--- a/services/klipper_service.py
+++ b/services/klipper_service.py
@@ -1,5 +1,51 @@
-"""Klipper / Moonraker Service Stub."""
+from run import klipper_logger, error_logger
+import requests
-def connect():
- return False
+class KlipperService:
+ """
+ Service für Klipper-Drucker. Modular, robust,
+ später kompatibel mit Moonraker / KlipperStatus.
+ """
+
+ def __init__(self, host: str = None):
+ self.host = host
+ klipper_logger.info("KlipperService initialisiert.")
+
+ def is_configured(self) -> bool:
+ configured = bool(self.host)
+ klipper_logger.debug(f"Konfiguration geprüft: {configured}")
+ return configured
+
+ def get_status(self) -> dict | None:
+ if not self.is_configured():
+ klipper_logger.warning("KlipperService nicht konfiguriert.")
+ return None
+
+ try:
+ url = f"http://{self.host}/printer/info"
+ klipper_logger.info(f"Abfrage: {url}")
+
+ # Platzhalter – Moonraker API später echte Daten
+ response = requests.get(url, timeout=3)
+
+ if response.status_code != 200:
+ klipper_logger.warning(f"Klipper Error: {response.status_code}")
+ return None
+
+ data = response.json()
+ klipper_logger.debug(f"KlipperStatus: {data}")
+ return data
+
+ except Exception as e:
+ error_logger.exception(f"Klipper API Fehler: {e}")
+ return None
+
+ def send_gcode(self, gcode: str) -> bool:
+ try:
+ klipper_logger.info(f"Sende GCODE: {gcode}")
+ # später -> POST an Moonraker API
+ return True
+ except Exception as e:
+ error_logger.exception(f"GCODE Fehler: {e}")
+ return False
diff --git a/services/manual_printer_service.py b/services/manual_printer_service.py
new file mode 100644
index 0000000..334db65
--- /dev/null
+++ b/services/manual_printer_service.py
@@ -0,0 +1,33 @@
+from run import app_logger, error_logger
+
+
+class ManualService:
+ """
+ Reines manuelles Eingabemodul.
+ Nützlich, wenn:
+ - kein LAN-Modus
+ - keine API
+ - keine Automatisierung
+
+ Hier kann der Benutzer Daten selbst eintragen.
+ """
+
+ def __init__(self):
+ app_logger.info("ManualService initialisiert.")
+
+ def set_filament_usage(self, grams: float) -> bool:
+ try:
+ app_logger.info(f"Manuelle Filamentmenge gesetzt: {grams} g")
+ # Später Speicherung in Datenbank
+ return True
+ except Exception as e:
+ error_logger.exception(f"Fehler beim Setzen: {e}")
+ return False
+
+ def set_printer_status(self, status: str) -> bool:
+ try:
+ app_logger.info(f"Manueller Status: {status}")
+ return True
+ except Exception as e:
+ error_logger.exception(f"Fehler beim manuellen Status: {e}")
+ return False
diff --git a/services/manual_service.py b/services/manual_service.py
deleted file mode 100644
index 29f17dd..0000000
--- a/services/manual_service.py
+++ /dev/null
@@ -1,5 +0,0 @@
-"""Manueller Modus ohne Druckeranbindung."""
-
-
-def enabled():
- return True
diff --git a/services/mqtt_protocol_detector.py b/services/mqtt_protocol_detector.py
new file mode 100644
index 0000000..9c8fe44
--- /dev/null
+++ b/services/mqtt_protocol_detector.py
@@ -0,0 +1,83 @@
+import ssl
+import json
+import threading
+from typing import Dict, Optional
+
+import paho.mqtt.client as mqtt
+
+
+class MQTTProtocolDetector:
+ """Erkennt MQTT-Protokoll (v5/311/31) für Bambu Lab Drucker mit TLS+Auth."""
+
+ def __init__(self, timeout: int = 3) -> None:
+ self.timeout = timeout
+
+ def detect(self, ip: str, api_key: str, port: int = 8883) -> Dict[str, object]:
+ protocols = [("5", mqtt.MQTTv5), ("311", mqtt.MQTTv311), ("31", mqtt.MQTTv31)]
+ last_error: Optional[str] = None
+
+ for label, proto in protocols:
+ event = threading.Event()
+ result: Dict[str, object] = {
+ "detected": False,
+ "protocol": label,
+ "tls": True,
+ "auth": True,
+ "supports_properties": False,
+ "error": None,
+ }
+ payload_holder = {}
+
+ def on_connect(client, userdata, flags, rc, properties=None):
+ if rc == 0:
+ try:
+ client.subscribe("device/+/report")
+ except Exception as e:
+ result["error"] = f"subscribe failed: {e}"
+ client.disconnect()
+ else:
+ result["error"] = f"rc={rc}"
+ client.disconnect()
+
+ def on_message(client, userdata, msg):
+ try:
+ payload_holder["msg"] = msg
+ payload_holder["json"] = json.loads(msg.payload.decode("utf-8", errors="ignore"))
+ except Exception:
+ payload_holder["json"] = None
+ event.set()
+ client.disconnect()
+
+ client = mqtt.Client(protocol=proto)
+ try:
+ client.username_pw_set("bblp", api_key)
+ client.tls_set(cert_reqs=ssl.CERT_NONE)
+ client.tls_insecure_set(True)
+ client.on_connect = on_connect
+ client.on_message = on_message
+ client.connect(ip, port, keepalive=30)
+ client.loop_start()
+ event.wait(timeout=self.timeout)
+ client.loop_stop()
+ client.disconnect()
+ if event.is_set() and "msg" in payload_holder:
+ result["detected"] = True
+ result["supports_properties"] = (proto == mqtt.MQTTv5)
+ result["raw_topic"] = payload_holder.get("msg").topic
+ result["raw_payload"] = payload_holder.get("msg").payload.decode("utf-8", errors="ignore")
+ return result
+ else:
+ last_error = result.get("error") or "timeout"
+ except Exception as e:
+ last_error = str(e)
+ try:
+ client.loop_stop()
+ except Exception:
+ pass
+ try:
+ client.disconnect()
+ except Exception:
+ pass
+ continue
+
+ return {"detected": False, "error": last_error or "no protocol matched"}
diff --git a/services/printer_mqtt_client.py b/services/printer_mqtt_client.py
new file mode 100644
index 0000000..863155c
--- /dev/null
+++ b/services/printer_mqtt_client.py
@@ -0,0 +1,182 @@
+import json
+import ssl
+from typing import Optional
+from datetime import datetime
+
+import paho.mqtt.client as mqtt
+
+from app.services.universal_mapper import UniversalMapper
+from services.printer_service import PrinterService
+
+
+class PrinterMQTTClient:
+ """MQTT Client der immer PrinterData über den UniversalMapper liefert."""
+
+ def __init__(
+ self,
+ ip: str,
+ model: str,
+ name: str,
+ mqtt_version: str,
+ printer_service: PrinterService,
+ username: str = "bblp",
+ password: Optional[str] = None,
+ debug: bool = False,
+ ) -> None:
+ self.ip = ip
+ self.model = model
+ self.name = name
+ self.username = username or "bblp"
+ self.password = password
+ self.printer_service = printer_service
+ self.debug = debug
+
+ # Track connected state for runtime checks
+ self.connected: bool = False
+
+ self.mapper = UniversalMapper(model)
+
+ protocol = {
+ "5": mqtt.MQTTv5,
+ "311": mqtt.MQTTv311,
+ "31": mqtt.MQTTv31,
+ }.get(str(mqtt_version), mqtt.MQTTv311)
+
+ self.client = mqtt.Client(client_id=name, protocol=protocol)
+
+ if password is not None:
+ self.client.username_pw_set(self.username, password)
+
+ self.client.tls_set(cert_reqs=ssl.CERT_NONE)
+ self.client.tls_insecure_set(True)
+
+ self.client.on_connect = self._on_connect
+ self.client.on_message = self._on_message
+ self.client.on_disconnect = self._on_disconnect
+
+ def set_model(self, model: str) -> None:
+ """Bei Modelwechsel Mapper neu setzen."""
+ new_model = (model or "").upper()
+ if new_model and new_model != self.model:
+ self.model = new_model
+ self.mapper = UniversalMapper(new_model)
+ if self.debug:
+ print(f"[MQTT] Mapper aktualisiert auf Modell {new_model}")
+
+ def connect(self) -> None:
+ """Mit TLS (Port 8883) verbinden und Listening starten."""
+ if self.debug:
+ print(f"[MQTT] Verbinde {self.name} ({self.ip}) als {self.username} (TLS, insecure)")
+ self.client.connect(self.ip, 8883, 60)
+ self.client.loop_start()
+
+ def _on_connect(self, client, userdata, flags, reason_code, properties=None) -> None:
+ # MQTT v5 on_connect signature: (client, userdata, flags, reason_code, properties)
+ serial = None
+ try:
+ if isinstance(userdata, dict):
+ serial = userdata.get("cloud_serial")
+ except Exception:
+ serial = None
+
+ print(f"[MQTT] CONNECT rc={reason_code} cloud_serial={serial}")
+
+ # reason_code == 0 indicates success
+ try:
+ if reason_code == 0:
+ self.connected = True
+ else:
+ self.connected = False
+ except Exception:
+ self.connected = False
+
+ if self.connected:
+ if serial:
+ try:
+ topic = f"device/{serial}/report"
+ self.client.subscribe(topic)
+ print(f"[MQTT] SUBSCRIBED {topic}")
+ except Exception as e:
+ print(f"[MQTT] SUBSCRIBE FAILED cloud_serial={serial} error={e}")
+ else:
+ print(f"[MQTT] CONNECTED but no cloud_serial; not subscribing")
+ else:
+ print(f"[MQTT] CONNECT FAILED rc={reason_code} cloud_serial={serial}")
+
+ def _on_disconnect(self, client, userdata, rc) -> None:
+ # Always log disconnects for debugging
+ print(f"[MQTT] DISCONNECT rc={rc}")
+ try:
+ client.reconnect()
+ except Exception as e:
+ print(f"[MQTT] Reconnect fehlgeschlagen: {e}")
+
+ def _on_message(self, client, userdata, msg) -> None:
+ topic = getattr(msg, "topic", "")
+ payload_bytes = getattr(msg, "payload", b"")
+ try:
+ raw = json.loads(payload_bytes.decode("utf-8"))
+ except Exception as e:
+ print(f"[MQTT] JSON Fehler topic={topic} error={e}")
+ return
+
+ # extract cloud_serial from topic device//...
+ serial = None
+ try:
+ parts = (topic or "").split("/")
+ if len(parts) > 1 and parts[0] == "device":
+ serial = parts[1]
+ except Exception:
+ serial = None
+
+ print(f"[MQTT] MESSAGE topic={topic} cloud_serial={serial} size={len(payload_bytes)}")
+
+ # Mark as connected on first receipt
+ try:
+ if serial:
+ from datetime import datetime, timezone
+
+ ts = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
+ try:
+ self.printer_service.set_connected(serial, True, ts)
+ except Exception as e:
+ print(f"[MQTT] set_connected failed for {serial}: {e}")
+ except Exception:
+ pass
+
+ try:
+ mapped = self.mapper.map(raw)
+ if mapped.model and mapped.model != self.model:
+ self.set_model(mapped.model)
+ # Use cloud_serial as logical key for printer updates
+ if serial:
+ self.printer_service.update_printer(serial, mapped)
+ else:
+ print(f"[MQTT] MESSAGE without cloud_serial; ignored")
+ except Exception as e:
+ print(f"[MQTT] Mapping/Update Fehler: {e}")
+ # Update global runtime state so status() returns connected when reports arrive
+ try:
+ if serial:
+ try:
+ # import at runtime to avoid circular imports
+ from app.services import mqtt_runtime
+
+ ts = None
+ try:
+ from datetime import datetime, timezone
+
+ ts = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
+ except Exception:
+ ts = None
+
+ # mark runtime connected and update last_seen/connected_since if first
+ if ts:
+ mqtt_runtime._runtime_state["last_seen"] = ts
+ if not mqtt_runtime._runtime_state.get("connected"):
+ mqtt_runtime._runtime_state["connected"] = True
+ mqtt_runtime._runtime_state["connected_since"] = ts
+ except Exception:
+ pass
+ except Exception:
+ pass
diff --git a/services/printer_service.py b/services/printer_service.py
new file mode 100644
index 0000000..6ae2d76
--- /dev/null
+++ b/services/printer_service.py
@@ -0,0 +1,91 @@
+from typing import Dict, Optional, Any, cast, Union
+from datetime import datetime
+
+from app.services.printer_data import PrinterData
+
+
+class PrinterService:
+ """Zentraler In-Memory-Speicher für Druckerdaten (PrinterData).
+
+ Keyed by cloud_serial (string). No automatic registration by client_id.
+ """
+
+ def __init__(self) -> None:
+ # keys: cloud_serial -> { name, model, printer_id, data, last_update, capabilities }
+ # Use Any for inner dict values; cast when returning typed PrinterData
+ self.printers: Dict[str, Dict[str, Any]] = {}
+
+ def register_printer(self, key: str, name: str, model: str, printer_id: str, source: str = "unknown") -> None:
+ if not key:
+ print(f"[PrinterService] register_printer called without cloud_serial; source={source}")
+ return
+ self.printers[key] = {
+ "name": name,
+ "model": model,
+ "printer_id": printer_id,
+ "data": None,
+ "last_update": None,
+ "capabilities": {},
+ "registered_via": source,
+ "connected": False,
+ "last_seen": None,
+ }
+
+ def update_printer(self, key: str, data: PrinterData) -> None:
+ if not key:
+ print(f"[PrinterService] update_printer called without cloud_serial; skipping update")
+ return
+ if key not in self.printers:
+ # Do not auto-register by client_id anymore
+ print(f"[PrinterService] Unbekannter cloud_serial '{key}', update ignored (no auto-register)")
+ return
+ self.printers[key]["data"] = data
+ self.printers[key]["last_update"] = datetime.utcnow().isoformat()
+
+ def get(self, key: str) -> Optional[PrinterData]:
+ entry = self.printers.get(key)
+ if not entry:
+ return None
+ return cast(Optional[PrinterData], entry.get("data"))
+
+ def get_all(self) -> Dict[str, Optional[PrinterData]]:
+ return {k: cast(Optional[PrinterData], v.get("data")) for k, v in self.printers.items()}
+
+ def update_capabilities(self, key: str, caps: Dict[str, bool]) -> None:
+ if key in self.printers:
+ self.printers[key]["capabilities"] = caps
+
+ def set_connected(self, key: str, connected: bool, last_seen: Optional[str] = None) -> None:
+ if not key:
+ return
+ if key not in self.printers:
+ # do not auto-register here
+ print(f"[PrinterService] set_connected called for unknown cloud_serial '{key}', ignored")
+ return
+ self.printers[key]["connected"] = bool(connected)
+ if last_seen:
+ self.printers[key]["last_seen"] = last_seen
+ # Also mirror last_update for backward compatibility
+ self.printers[key]["last_update"] = last_seen
+
+ def get_status(self, key: str, timeout_seconds: int = 15) -> Dict[str, Optional[Union[bool, str]]]:
+ entry = self.printers.get(key)
+ if not entry:
+ return {"connected": False, "last_seen": None}
+ last_seen = entry.get("last_seen")
+ connected = bool(entry.get("connected", False))
+ # timeout logic: if last_seen older than threshold, treat as disconnected
+ if last_seen:
+ try:
+ from datetime import datetime, timezone
+
+ last_dt = datetime.fromisoformat(last_seen.replace("Z", "+00:00"))
+ now = datetime.now(timezone.utc)
+ delta = (now - last_dt).total_seconds()
+ if delta > float(timeout_seconds):
+ connected = False
+ except Exception:
+ # if parsing fails, keep stored connected flag
+ pass
+
+ return {"connected": connected, "last_seen": last_seen}
diff --git a/temp_live_state.json b/temp_live_state.json
new file mode 100644
index 0000000..bb040b1
--- /dev/null
+++ b/temp_live_state.json
@@ -0,0 +1 @@
+{"00M09A372601070":{"device":"00M09A372601070","ts":"2025-12-27T20:06:44.907380","payload":{"print":{"3D":{"layer_num":0,"total_layer_num":0},"ams":{"ams":[{"dry_time":0,"humidity":"3","humidity_raw":"32","id":"0","info":"1001","temp":"20.5","tray":[{"bed_temp":"0","bed_temp_type":"0","cali_idx":-1,"cols":["000000FF"],"ctype":0,"drying_temp":"55","drying_time":"8","id":"0","nozzle_temp_max":"230","nozzle_temp_min":"190","remain":15,"state":11,"tag_uid":"346089D400000100","total_len":330000,"tray_color":"000000FF","tray_diameter":"1.75","tray_id_name":"A00-K0","tray_info_idx":"GFA00","tray_sub_brands":"PLA Basic","tray_type":"PLA","tray_uuid":"883B9B6136D44528A6D3488631B79FCF","tray_weight":"1000","xcam_info":"803E803EE803E803CDCC4C3F"},{"bed_temp":"35","bed_temp_type":"1","cali_idx":-1,"cols":["00AE42FF"],"ctype":0,"drying_temp":"55","drying_time":"8","id":"1","nozzle_temp_max":"230","nozzle_temp_min":"190","remain":61,"state":11,"tag_uid":"BC0BE5A600000100","total_len":330000,"tray_color":"00AE42FF","tray_diameter":"1.75","tray_id_name":"A00-G6","tray_info_idx":"GFA00","tray_sub_brands":"PLA Basic","tray_type":"PLA","tray_uuid":"A9F739E72BF243249B77BC807537BED1","tray_weight":"1000","xcam_info":"88138813E803E8039A99193F"},{"bed_temp":"35","bed_temp_type":"1","cali_idx":-1,"cols":["F4EE2AFF"],"ctype":0,"drying_temp":"55","drying_time":"8","id":"2","nozzle_temp_max":"230","nozzle_temp_min":"190","remain":65,"state":11,"tag_uid":"0ABEB37000000100","total_len":330000,"tray_color":"F4EE2AFF","tray_diameter":"1.75","tray_id_name":"A00-Y0","tray_info_idx":"GFA00","tray_sub_brands":"PLA Basic","tray_type":"PLA","tray_uuid":"B7EFE1B588CF45E78D89BCC84DE92642","tray_weight":"1000","xcam_info":"8813100EE803E8039A99193F"},{"bed_temp":"0","bed_temp_type":"0","cali_idx":-1,"cols":["C12E1FFF"],"ctype":0,"drying_temp":"55","drying_time":"8","id":"3","nozzle_temp_max":"230","nozzle_temp_min":"190","remain":20,"state":11,"tag_uid":"AA21EEAF00000100","total_len":330000,"tray_color":"C12E1FFF","tray_diameter":"1.75","tray_id_name":"A00-R0","tray_info_idx":"GFA00","tray_sub_brands":"PLA Basic","tray_type":"PLA","tray_uuid":"04AED7007EB34A4F959F71B8468FA8A4","tray_weight":"1000","xcam_info":"D007D007E803E8036666663F"}]}],"ams_exist_bits":"1","ams_exist_bits_raw":"1","cali_id":0,"cali_stat":0,"insert_flag":true,"power_on_flag":true,"tray_exist_bits":"f","tray_is_bbl_bits":"f","tray_now":"255","tray_pre":"255","tray_read_done_bits":"f","tray_reading_bits":"0","tray_tar":"255","unbind_ams_stat":0,"version":64},"ams_rfid_status":0,"ams_status":0,"ap_err":0,"aux":"280100C","aux_part_fan":true,"batch_id":0,"bed_target_temper":0.0,"bed_temper":20.0,"big_fan1_speed":"0","big_fan2_speed":"0","cali_version":0,"canvas_id":0,"care":[{"id":"cr","info":"20631064"},{"id":"ls","info":"1A00"}],"cfg":"1FE29B","command":"push_status","cooling_fan_speed":"0","design_id":"","device":{"bed":{"info":{"temp":20},"state":0},"bed_temp":20,"cam":{"laser":{"cond":253,"state":0}},"ctc":{"info":{"temp":25},"state":0},"ext_tool":{"calib":2,"low_prec":true,"mount":0,"th_temp":0,"type":""},"extruder":{"info":[{"filam_bak":[],"hnow":0,"hpre":0,"htar":0,"id":0,"info":8,"snow":65535,"spre":65535,"star":65535,"stat":0,"temp":22}],"state":1},"fan":0,"laser":{"power":0},"nozzle":{"exist":1,"info":[{"diameter":0.4,"id":0,"tm":0,"type":"HX01","wear":0}],"state":0},"plate":{"base":1,"cali2d_id":"","cur_id":"","mat":1,"tar_id":""},"type":1},"err":"0","fail_reason":"0","fan_gear":0,"file":"","force_upgrade":false,"fun":"20011A30F9CFF","gcode_file":"","gcode_file_prepare_percent":"0","gcode_state":"IDLE","heatbreak_fan_speed":"0","hms":[],"home_flag":-1067065960,"hw_switch_state":0,"info":{"temp":25},"ipcam":{"agora_service":"disable","brtc_service":"enable","bs_state":0,"ipcam_dev":"1","ipcam_record":"enable","laser_preview_res":7,"mode_bits":2,"resolution":"1080p","rtsp_url":"rtsps://192.168.178.41:322/streaming/live/1","timelapse":"disable","tl_store_hpd_type":2,"tl_store_path_type":2,"tutk_server":"disable"},"job":{"cur_stage":{"idx":0,"state":0},"stage":[]},"job_attr":3,"job_id":"","layer_num":0,"lights_report":[{"mode":"on","node":"chamber_light"},{"mode":"flashing","node":"work_light"}],"mapping":[],"mc_action":0,"mc_err":0,"mc_percent":0,"mc_print_error_code":"0","mc_print_stage":"1","mc_print_sub_stage":0,"mc_remaining_time":0,"mc_stage":1,"model_id":"","net":{"conf":16,"info":[{"ip":699574464,"mask":16777215},{"ip":0,"mask":0}]},"nozzle_diameter":"0.4","nozzle_target_temper":0.0,"nozzle_temper":22.0,"nozzle_type":"HX01","online":{"ahb":false,"ext":false,"version":4},"percent":0,"plate_cnt":0,"plate_id":0,"plate_idx":0,"prepare_per":0,"print_error":0,"print_gcode_action":0,"print_real_action":0,"print_type":"","profile_id":"","project_id":"","queue":0,"queue_est":0,"queue_number":0,"queue_sts":0,"queue_total":0,"remain_time":0,"s_obj":[],"sdcard":true,"sequence_id":"2021","spd_lvl":2,"spd_mag":100,"stat":"7A58000","state":0,"stg":[],"stg_cur":-1,"subtask_id":"","subtask_name":"","task_id":"","total_layer_num":0,"upgrade_state":{"ahb_new_version_number":"","ams_new_version_number":"","consistency_request":false,"dis_state":1,"err_code":0,"ext_new_version_number":"","force_upgrade":false,"idx":4,"idx2":1070046502,"lower_limit":"00.00.00.00","message":" upgrade init is not ready, can not handle dds request!","module":"","new_version_state":1,"ota_new_version_number":"01.11.02.00","progress":"0","sequence_id":0,"sn":"00M09A372601070","status":"IDLE"},"upload":{"file_size":0,"finish_size":0,"message":"Good","oss_url":"","progress":0,"sequence_id":"0903","speed":0,"status":"idle","task_id":"","time_remaining":0,"trouble_id":""},"ver":"20000","vir_slot":[{"bed_temp":"0","bed_temp_type":"0","cali_idx":-1,"cols":["00000000"],"ctype":0,"drying_temp":"0","drying_time":"0","id":"255","nozzle_temp_max":"0","nozzle_temp_min":"0","remain":0,"tag_uid":"0000000000000000","total_len":330000,"tray_color":"00000000","tray_diameter":"1.75","tray_id_name":"","tray_info_idx":"","tray_sub_brands":"","tray_type":"","tray_uuid":"00000000000000000000000000000000","tray_weight":"0","xcam_info":"000000000000000000000000"}],"vt_tray":{"bed_temp":"0","bed_temp_type":"0","cali_idx":-1,"cols":["00000000"],"ctype":0,"drying_temp":"0","drying_time":"0","id":"255","nozzle_temp_max":"0","nozzle_temp_min":"0","remain":0,"tag_uid":"0000000000000000","total_len":330000,"tray_color":"00000000","tray_diameter":"1.75","tray_id_name":"","tray_info_idx":"","tray_sub_brands":"","tray_type":"","tray_uuid":"00000000000000000000000000000000","tray_weight":"0","xcam_info":"000000000000000000000000"},"wifi_signal":"-42dBm","xcam":{"allow_skip_parts":false,"buildplate_marker_detector":false,"first_layer_inspector":false,"halt_print_sensitivity":"high","print_halt":true,"printing_monitor":true,"spaghetti_detector":true},"xcam_status":"0"}}}}
\ No newline at end of file
diff --git a/tests/__init__.py b/tests/__init__.py
index e69de29..75d816b 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -0,0 +1 @@
+"""Test package helpers."""
diff --git a/tests/__pycache__/__init__.cpython-313.pyc b/tests/__pycache__/__init__.cpython-313.pyc
deleted file mode 100644
index 2195ca4..0000000
Binary files a/tests/__pycache__/__init__.cpython-313.pyc and /dev/null differ
diff --git a/tests/__pycache__/test_health.cpython-313-pytest-9.0.1.pyc b/tests/__pycache__/test_health.cpython-313-pytest-9.0.1.pyc
deleted file mode 100644
index 5088b36..0000000
Binary files a/tests/__pycache__/test_health.cpython-313-pytest-9.0.1.pyc and /dev/null differ
diff --git a/tests/ams_test_data.py b/tests/ams_test_data.py
new file mode 100644
index 0000000..cfdfd9a
--- /dev/null
+++ b/tests/ams_test_data.py
@@ -0,0 +1,84 @@
+"""
+Synthetische Testdaten für AMS-Parser/-Mapper.
+"""
+
+SINGLE_AMS_JSON = {
+ "ams": {
+ "modules": [
+ {
+ "ams_id": 0,
+ "active_tray": 1,
+ "tray_count": 4,
+ "trays": [
+ {"tray_id": 0, "tray_uuid": "UUID-A0-S0", "material": "PLA"},
+ {"tray_id": 1, "tray_uuid": "UUID-A0-S1", "material": "PETG"},
+ {"tray_id": 2, "tray_uuid": None, "material": None},
+ {"tray_id": 3, "tray_uuid": "UUID-A0-S3", "material": "ABS"},
+ ],
+ }
+ ]
+ }
+}
+
+MULTI_AMS_JSON = {
+ "ams": {
+ "modules": [
+ {
+ "ams_id": 0,
+ "active_tray": 1,
+ "tray_count": 4,
+ "trays": [
+ {"tray_id": 0, "tray_uuid": "UUID-A0-S0", "material": "PLA"},
+ {"tray_id": 1, "tray_uuid": "UUID-A0-S1", "material": "PETG"},
+ {"tray_id": 2, "tray_uuid": None, "material": None},
+ {"tray_id": 3, "tray_uuid": "UUID-A0-S3", "material": "ABS"},
+ ],
+ },
+ {
+ "ams_id": 1,
+ "active_tray": 2,
+ "tray_count": 4,
+ "trays": [
+ {"tray_id": 0, "tray_uuid": "UUID-A1-S0", "material": "PA"},
+ {"tray_id": 1, "tray_uuid": "UUID-A1-S1", "material": None},
+ {"tray_id": 2, "tray_uuid": "UUID-A1-S2", "material": "TPU"},
+ {"tray_id": 3, "tray_uuid": None, "material": None},
+ ],
+ },
+ ]
+ }
+}
+
+EDGE_AMS_JSON = {
+ "ams": {
+ "modules": [
+ {
+ "ams_id": 2,
+ "active_tray": None,
+ "tray_count": 4,
+ "trays": [
+ {"tray_id": 0, "tray_uuid": None, "material": None},
+ {"tray_id": 1, "tray_uuid": "UUID-E1", "material": None},
+ {"tray_id": 2, "tray_uuid": "UUID-E2", "material": "PLA"},
+ {"tray_id": 3, "tray_uuid": None, "material": None},
+ ],
+ }
+ ]
+ }
+}
+
+OLD_FORMAT_AMS_JSON = {
+ "ams": {
+ "tray_0": {"tray_id": 0, "tray_uuid": "UUID-OLD-0", "material": "PLA"},
+ "tray_1": {"tray_id": 1, "tray_uuid": "UUID-OLD-1", "material": "PETG"},
+ "tray_2": {"tray_id": 2, "tray_uuid": "UUID-OLD-2", "material": None},
+ "tray_3": {"tray_id": 3, "tray_uuid": "UUID-OLD-3", "material": "ABS"},
+ "active_tray": 1,
+ }
+}
+
+EMPTY_AMS_JSON = [
+ {},
+ {"ams": None},
+ {"print": {}},
+]
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000..2b84199
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,198 @@
+import pytest
+import os
+import sys
+from pathlib import Path
+from sqlmodel import SQLModel, create_engine
+from sqlalchemy.sql.elements import BinaryExpression
+
+test_root = Path(__file__).resolve().parents[1]
+sys.path.insert(0, str(test_root))
+
+from tests.helpers import ensure_admin_password_hash
+
+ensure_admin_password_hash()
+
+# Test database path
+TEST_DB_PATH = os.path.join(test_root, "data", "test_filamenthub.db")
+
+# Ensure test DB path is used by the application before app imports engine
+os.environ["FILAMENTHUB_DB_PATH"] = TEST_DB_PATH
+
+from app.database import engine
+
+import app.models.job # ensure SQLModel metadata registered
+import app.models.material
+import app.models.printer
+import app.models.spool
+import app.models.settings
+
+
+class Field:
+ def __init__(self, name):
+ self.name = name
+
+ def __eq__(self, other) -> bool:
+ return self.name == getattr(other, "name", None)
+
+
+class DummySpool:
+ tag_uid = Field("tag_uid")
+ tray_uuid = Field("tray_uuid")
+ ams_slot = Field("ams_slot")
+
+ def __init__(self, **kwargs):
+ self.material_id = kwargs.get("material_id")
+ self.printer_id = kwargs.get("printer_id")
+ self.ams_slot = kwargs.get("ams_slot")
+ self.last_slot = kwargs.get("last_slot")
+ self.tag_uid = kwargs.get("tag_uid")
+ self.tray_uuid = kwargs.get("tray_uuid")
+ self.tray_color = kwargs.get("tray_color")
+ self.tray_type = kwargs.get("tray_type")
+ self.remain_percent = kwargs.get("remain_percent", 0.0)
+ self.weight_current = kwargs.get("weight_current")
+ self.weight_full = kwargs.get("weight_full")
+ self.weight_empty = kwargs.get("weight_empty")
+ self.last_seen = kwargs.get("last_seen")
+ self.first_seen = kwargs.get("first_seen")
+ self.used_count = kwargs.get("used_count", 0)
+ self.label = kwargs.get("label")
+
+
+class DummyMaterial:
+ name = Field("name")
+ brand = Field("brand")
+
+ def __init__(self, **kwargs):
+ self.id = kwargs.get("id", "mat-1")
+ self.name = kwargs.get("name")
+ self.brand = kwargs.get("brand")
+ self.color = kwargs.get("color")
+ self.density = kwargs.get("density")
+ self.diameter = kwargs.get("diameter")
+
+
+class FakeResult:
+ def __init__(self, items):
+ self.items = items
+
+ def first(self):
+ return self.items[0] if self.items else None
+
+
+class FakeSelect:
+ def __init__(self, model):
+ self.model = model
+ self.filters = []
+
+ def where(self, condition):
+ self.filters.append(condition)
+ return self
+
+
+def fake_select(model):
+ return FakeSelect(model)
+
+
+class FakeSession:
+ def __init__(self, spools, materials):
+ self._spools = spools
+ self._materials = materials
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc, tb):
+ return False
+
+ def exec(self, select_obj):
+ if select_obj.model is DummyMaterial:
+ return FakeResult(self._materials)
+ if select_obj.model is DummySpool:
+ items = self._spools
+ for flt in select_obj.filters:
+ if isinstance(flt, tuple) and flt[0] == "eq":
+ _, field, expected = flt
+ items = [s for s in items if getattr(s, field, None) == expected]
+ elif isinstance(flt, BinaryExpression):
+ key = flt.left.key
+ value = getattr(flt.right, "value", None)
+ items = [s for s in items if getattr(s, key, None) == value]
+ return FakeResult(items)
+ return FakeResult([])
+
+ def add(self, obj):
+ if isinstance(obj, DummySpool) and obj not in self._spools:
+ self._spools.append(obj)
+ if isinstance(obj, DummyMaterial) and obj not in self._materials:
+ self._materials.append(obj)
+
+ def commit(self):
+ return None
+
+ def refresh(self, obj):
+ return None
+
+ def rollback(self):
+ return None
+
+
+@pytest.fixture
+def fake_session_env(monkeypatch):
+ """
+ Monkeypatch ams_sync Session/select/Spool/Material to in-memory fakes.
+ Returns (spools, materials) lists for inspection.
+ """
+ from app.services import ams_sync
+
+ spools = []
+ materials = []
+
+ monkeypatch.setattr(ams_sync, "Session", lambda engine=None: FakeSession(spools, materials))
+ monkeypatch.setattr(ams_sync, "select", fake_select)
+ monkeypatch.setattr(ams_sync, "Spool", DummySpool)
+ monkeypatch.setattr(ams_sync, "Material", DummyMaterial)
+ monkeypatch.setattr(ams_sync, "engine", None)
+
+ return spools, materials
+
+
+@pytest.fixture(autouse=True, scope="session")
+def reset_db():
+ # Testdatenbank löschen, falls vorhanden
+ if os.path.exists(TEST_DB_PATH):
+ os.remove(TEST_DB_PATH)
+ # Tabellen neu anlegen
+ SQLModel.metadata.create_all(engine)
+ yield
+ # Nach den Tests optional wieder löschen
+ try:
+ engine.dispose()
+ except Exception:
+ pass
+ if os.path.exists(TEST_DB_PATH):
+ os.remove(TEST_DB_PATH)
+
+
+@pytest.fixture
+def db_session():
+ """Provide a simple Session fixture tests can opt into.
+
+ Note: This does not automatically wrap sessions created directly
+ with `Session(engine)` in tests — consider migrating tests to use
+ this fixture or implement a per-test engine/connection strategy.
+ """
+ from sqlmodel import Session as _Session
+
+ sess = _Session(engine)
+ try:
+ yield sess
+ finally:
+ try:
+ sess.rollback()
+ except Exception:
+ pass
+ try:
+ sess.close()
+ except Exception:
+ pass
diff --git a/tests/helpers.py b/tests/helpers.py
new file mode 100644
index 0000000..1a2add6
--- /dev/null
+++ b/tests/helpers.py
@@ -0,0 +1,15 @@
+import os
+
+import bcrypt
+
+TEST_ADMIN_PASSWORD = "filamenthub-test-admin"
+TEST_ADMIN_HASH_ROUNDS = 4
+
+
+def ensure_admin_password_hash():
+ """Setzt einen reproduzierbaren Admin-Hash für Tests, damit wir das Passwort kennen."""
+ hashed = bcrypt.hashpw(
+ TEST_ADMIN_PASSWORD.encode("utf-8"),
+ bcrypt.gensalt(rounds=TEST_ADMIN_HASH_ROUNDS),
+ ).decode("utf-8")
+ os.environ["ADMIN_PASSWORD_HASH"] = hashed
diff --git a/tests/mqtt_test.py b/tests/mqtt_test.py
new file mode 100644
index 0000000..1a30e34
--- /dev/null
+++ b/tests/mqtt_test.py
@@ -0,0 +1,45 @@
+
+import paho.mqtt.client as mqtt
+import ssl
+
+broker = "192.168.178.41"
+
+port = 8885
+# Thema nach Vorgabe, Seriennummer bitte ersetzen:
+serial_number = "00M09A372601070" # <-- hier echte Seriennummer eintragen
+# englisches Thema zum Testen:
+topic = f"device/{serial_number}/report"
+message = "Testnachricht"
+username = "bblp"
+password = "" # <-- Passwort aus Drucker-Netzwerkeinstellungen
+
+
+client = mqtt.Client(protocol=mqtt.MQTTv311, callback_api_version=mqtt.CallbackAPIVersion.VERSION2)
+client.username_pw_set(username, password)
+# TLS explizit ohne Zertifikatsprüfung
+client.tls_set(cert_reqs=ssl.CERT_NONE)
+client.tls_insecure_set(True)
+
+# Callback-Funktionen für Logging
+def on_connect(client, userdata, flags, rc):
+ print(f"Verbindung hergestellt mit Code {rc}")
+
+def on_publish(client, userdata, mid):
+ print(f"Nachricht veröffentlicht, mid={mid}")
+
+def on_log(client, userdata, level, buf):
+ print(f"LOG: {buf}")
+
+client.on_connect = on_connect
+client.on_publish = on_publish
+client.on_log = on_log
+
+try:
+ client.connect(broker, port)
+ client.loop_start()
+ result = client.publish(topic, message)
+ result.wait_for_publish()
+ client.loop_stop()
+ client.disconnect()
+except Exception as e:
+ print(f"Fehler: {e}")
diff --git a/tests/printer_data.py.py b/tests/printer_data.py.py
new file mode 100644
index 0000000..0ab33b8
--- /dev/null
+++ b/tests/printer_data.py.py
@@ -0,0 +1,42 @@
+class PrinterData:
+ """Einheitliches Ausgabeformat für ALLE Bambu Lab Drucker."""
+
+ def __init__(self):
+ self.state = None
+ self.progress = None
+ self.sub_state = None
+
+ self.temperature = {
+ "nozzle": None,
+ "bed": None,
+ "chamber": None
+ }
+
+ self.fan = {
+ "part_cooling": None,
+ "aux": None,
+ "chamber": None
+ }
+
+ self.layer = {
+ "current": None,
+ "total": None
+ }
+
+ self.speed_mode = None
+
+ self.light = {
+ "state": None,
+ "brightness": None
+ }
+
+ self.ams = None
+
+ self.job = {
+ "file": None,
+ "time_elapsed": None,
+ "time_remaining": None
+ }
+
+ self.error = None
+ self.extra = {} # Alle unbekannten Felder
diff --git a/tests/printer_mapper.py b/tests/printer_mapper.py
new file mode 100644
index 0000000..7b51ec4
--- /dev/null
+++ b/tests/printer_mapper.py
@@ -0,0 +1,100 @@
+from printer_data import PrinterData
+
+class UniversalMapper:
+ """Universal Mapper für ALLE BambuLab Drucker."""
+
+ def __init__(self, model: str):
+ self.model = model.upper()
+
+ def map(self, data: dict) -> PrinterData:
+ out = PrinterData()
+
+ # ======= STATUS =======
+ if mc := data.get("mc_print"):
+ out.state = mc.get("stage")
+ out.progress = mc.get("progress")
+ out.sub_state = mc.get("sub_stage")
+
+ out.layer["current"] = mc.get("layer_num")
+ out.layer["total"] = mc.get("total_layers")
+
+ out.job["file"] = mc.get("file_name")
+ out.job["time_elapsed"] = mc.get("time_elapsed")
+ out.job["time_remaining"] = mc.get("time_remaining")
+
+ out.error = mc.get("error_code")
+ out.speed_mode = mc.get("print_speed_mode")
+
+ elif pr := data.get("print"):
+ out.state = pr.get("gcode_state")
+ out.progress = pr.get("progress")
+
+ out.layer["current"] = pr.get("layer_num")
+ out.layer["total"] = pr.get("total_layer")
+
+ out.job["file"] = pr.get("file")
+ out.job["time_elapsed"] = pr.get("time_elapsed")
+ out.job["time_remaining"] = pr.get("time_remaining")
+
+ out.error = pr.get("error_code")
+ out.speed_mode = pr.get("speed_level")
+
+ # ======= TEMPERATURE =======
+ if heater := data.get("heater"):
+ out.temperature["nozzle"] = heater.get("nozzle_temper") or heater.get("nozzle_temp")
+ out.temperature["bed"] = heater.get("bed_temper") or heater.get("bed_temp")
+ out.temperature["chamber"] = heater.get("chamber_temper") or heater.get("chamber_temp")
+
+ if temp := data.get("temperature"): # H2D
+ out.temperature["nozzle"] = temp.get("nozzle")
+ out.temperature["bed"] = temp.get("bed")
+ out.temperature["chamber"] = temp.get("chamber")
+
+ # ======= FANS =======
+ if cooling := data.get("cooling"):
+ out.fan["part_cooling"] = cooling.get("fan_1_speed") or cooling.get("fan_speed")
+ out.fan["aux"] = cooling.get("fan_2_speed")
+ out.fan["chamber"] = cooling.get("fan_3_speed")
+
+ if fan := data.get("fan"):
+ out.fan["part_cooling"] = fan.get("speed")
+
+ if f := data.get("cooling_fan"): # H2D
+ out.fan["part_cooling"] = f.get("toolhead_fan")
+ out.fan["chamber"] = f.get("chamber_fan")
+
+ # ======= LIGHT =======
+ if light := data.get("light"):
+ out.light["state"] = light.get("light_state") or light.get("on")
+ out.light["brightness"] = light.get("light_strength") or light.get("strength")
+
+ # ======= AMS =======
+ if ams := data.get("ams"):
+ out.ams = ams
+ if fil := data.get("filament"):
+ out.ams = fil
+ if matsys := data.get("material_system"): # H2D
+ out.ams = matsys
+
+ # ======= ERROR =======
+ if err := data.get("error"):
+ out.error = err
+
+ # ======= JOB (H2D) =======
+ if job := data.get("job"):
+ out.job["file"] = job.get("name")
+ out.job["time_elapsed"] = job.get("elapsed")
+ out.job["time_remaining"] = job.get("remaining")
+
+ # ======= EXTRA FELDER =======
+ known = {
+ "mc_print", "print", "heater", "cooling", "fan",
+ "light", "ams", "filament", "temperature",
+ "material_system", "job", "error"
+ }
+
+ for k, v in data.items():
+ if k not in known:
+ out.extra[k] = v
+
+ return out
diff --git a/tests/printer_mqtt_client.py b/tests/printer_mqtt_client.py
new file mode 100644
index 0000000..1f24c8d
--- /dev/null
+++ b/tests/printer_mqtt_client.py
@@ -0,0 +1,50 @@
+import ssl
+import json
+import logging
+import paho.mqtt.client as mqtt
+from printer_mapper import UniversalMapper
+
+
+class PrinterMQTTClient:
+
+ def __init__(self, ip, model, name, mqtt_version, printer_service):
+ self.ip = ip
+ self.model = model
+ self.name = name
+ self.printer_service = printer_service
+
+ self.mapper = UniversalMapper(model)
+
+ protocol = {
+ "5": mqtt.MQTTv5,
+ "311": mqtt.MQTTv311,
+ "31": mqtt.MQTTv31
+ }.get(str(mqtt_version), mqtt.MQTTv311)
+
+ self.client = mqtt.Client(client_id=name, protocol=protocol)
+
+ self.client.tls_set(cert_reqs=ssl.CERT_NONE)
+ self.client.tls_insecure_set(True)
+
+ self.client.on_connect = self._on_connect
+ self.client.on_message = self._on_message
+
+ def connect(self):
+ print(f"[MQTT] Verbinde zu {self.name} ({self.ip})")
+ self.client.connect(self.ip, 8883, 60)
+ self.client.loop_start()
+
+ def _on_connect(self, client, userdata, flags, rc):
+ print(f"[MQTT] {self.name} verbunden (RC={rc})")
+ self.client.subscribe("device/+/report")
+
+ def _on_message(self, client, userdata, msg):
+ logger = logging.getLogger(__name__)
+ try:
+ raw = json.loads(msg.payload.decode("utf-8"))
+ except Exception as exc:
+ logger.debug("[MQTT] JSON Fehler beim Parsen der Payload: %s", exc)
+ return
+
+ mapped = self.mapper.map(raw)
+ self.printer_service.update_printer(self.name, mapped)
diff --git a/tests/printer_service.py b/tests/printer_service.py
new file mode 100644
index 0000000..495538f
--- /dev/null
+++ b/tests/printer_service.py
@@ -0,0 +1,21 @@
+class PrinterService:
+ def __init__(self):
+ self.printers = {}
+
+ def register(self, name, model, ip, mqtt_version):
+ self.printers[name] = {
+ "model": model,
+ "ip": ip,
+ "mqtt_version": mqtt_version,
+ "data": None
+ }
+
+ def update_printer(self, name, data):
+ self.printers[name]["data"] = data
+ print(f"[SERVICE] {name} → State: {data.state}, Progress: {data.progress}")
+
+ def get(self, name):
+ return self.printers[name]["data"]
+
+ def get_all(self):
+ return {n: d["data"] for n, d in self.printers.items()}
diff --git a/tests/test_admin_config_routes.py b/tests/test_admin_config_routes.py
new file mode 100644
index 0000000..3fcf625
--- /dev/null
+++ b/tests/test_admin_config_routes.py
@@ -0,0 +1,86 @@
+from fastapi.testclient import TestClient
+from sqlmodel import Session
+
+from app.database import engine
+from app.models.material import Material
+from app.routes.config_routes import DEFAULT_CONFIG
+from app.main import app
+from tests.helpers import TEST_ADMIN_PASSWORD
+
+
+def _admin_client():
+ client = TestClient(app)
+ resp = client.post("/api/admin/login", data={"password": TEST_ADMIN_PASSWORD})
+ assert resp.status_code == 200
+ token = resp.cookies.get("admin_token")
+ assert token
+ client.cookies.set("admin_token", token)
+ return client
+
+
+def test_admin_delete_material_success():
+ client = TestClient(app)
+ payload = {
+ "name": "delete-me",
+ "brand": "test",
+ "density": 1.24,
+ "diameter": 1.75,
+ }
+ create_resp = client.post("/api/materials/", json=payload)
+ assert create_resp.status_code == 201
+ material_id = create_resp.json()["id"]
+
+ admin = _admin_client()
+ resp = admin.post("/api/admin/delete", json={"table": "material", "id": material_id})
+ assert resp.status_code == 200
+ assert resp.json()["success"]
+
+ with Session(engine) as session:
+ assert session.get(Material, material_id) is None
+
+
+def test_admin_delete_requires_auth():
+ client = TestClient(app)
+ resp = client.post("/api/admin/delete", json={"table": "material", "id": "x"})
+ assert resp.status_code == 401
+
+
+def test_admin_delete_unknown_table():
+ admin = _admin_client()
+ resp = admin.post("/api/admin/delete", json={"table": "unknown", "id": "x"})
+ assert resp.status_code == 200
+ assert resp.json()["success"] is False
+ assert "Unbekannte Tabelle" in resp.json()["error"]
+
+
+def test_admin_delete_missing_values():
+ admin = _admin_client()
+ resp = admin.post("/api/admin/delete", json={"table": "", "id": ""})
+ assert resp.status_code == 200
+ json_body = resp.json()
+ assert json_body["success"] is False
+ assert "Tabelle und ID erforderlich" in json_body["error"]
+
+
+def test_config_update_rejects_empty_payload():
+ client = TestClient(app)
+ resp = client.put("/api/config", json={})
+ assert resp.status_code == 400
+ assert resp.json()["detail"] == "No valid keys provided"
+
+
+def test_config_update_respects_logging_validity():
+ client = TestClient(app)
+ resp = client.put(
+ "/api/config",
+ json={
+ "logging": {"level": "NOTLEVEL", "keep_days": -5},
+ "debug": {"system_health": {"warn_latency_ms": 50, "error_latency_ms": 40}},
+ },
+ )
+ assert resp.status_code == 200
+ body = resp.json()
+ assert body["logging"]["level"] == DEFAULT_CONFIG["logging"]["level"]
+ assert body["logging"]["keep_days"] == DEFAULT_CONFIG["logging"]["keep_days"]
+ assert body["debug"]["system_health"]["warn_latency_ms"] == DEFAULT_CONFIG["debug"]["system_health"]["warn_latency_ms"]
+ assert body["debug"]["system_health"]["error_latency_ms"] == DEFAULT_CONFIG["debug"]["system_health"]["error_latency_ms"]
diff --git a/tests/test_ams_api.py b/tests/test_ams_api.py
new file mode 100644
index 0000000..d2e84f8
--- /dev/null
+++ b/tests/test_ams_api.py
@@ -0,0 +1,97 @@
+from fastapi import FastAPI
+from fastapi.testclient import TestClient
+import pytest
+
+import app.services.live_state as live_state_module
+from app.routes import ams_routes
+
+
+@pytest.fixture()
+def client(monkeypatch):
+ sample_payload = {
+ "ams": {
+ "ams": [
+ {
+ "ams_id": 1,
+ "temp": 22.5,
+ "humidity": 55,
+ "tray": [
+ {
+ "tray_id": 0,
+ "tray_uuid": "uuid-1",
+ "tag_uid": "tag-1",
+ "remain": 0.75,
+ "total_len": 123
+ }
+ ]
+ }
+ ]
+ }
+ }
+
+ in_memory = {
+ "DEV123": {
+ "device": "DEV123",
+ "ts": "2025-12-28T00:00:00Z",
+ "payload": sample_payload,
+ },
+ "DEV_EMPTY": {
+ "device": "DEV_EMPTY",
+ "ts": "2025-12-28T00:00:01Z",
+ "payload": {},
+ },
+ }
+
+ monkeypatch.setattr(live_state_module, "get_all_live_state", lambda: in_memory)
+ monkeypatch.setattr(live_state_module, "get_live_state", lambda d: in_memory.get(d))
+
+ app = FastAPI()
+ app.include_router(ams_routes.router)
+ return TestClient(app)
+
+
+def test_list_ams(client):
+ resp = client.get("/api/ams/")
+ assert resp.status_code == 200
+ data = resp.json()
+ assert "devices" in data
+ devices = data["devices"]
+ assert any(d.get("device_serial") == "DEV123" for d in devices)
+ dev = next(d for d in devices if d.get("device_serial") == "DEV123")
+ assert dev.get("ts") == "2025-12-28T00:00:00Z"
+ assert dev.get("online") is True
+ ams_units = dev.get("ams_units")
+ assert isinstance(ams_units, list)
+ assert len(ams_units) == 1
+ unit = ams_units[0]
+ assert unit.get("ams_id") == 1
+ assert unit.get("temp") == 22.5
+ assert unit.get("humidity") == 55
+ trays = unit.get("trays")
+ assert isinstance(trays, list)
+ assert len(trays) == 1
+ tray = trays[0]
+ assert tray.get("slot") == 0
+ assert tray.get("tray_uuid") == "uuid-1"
+ assert tray.get("tag_uid") == "tag-1"
+ assert tray.get("remain_percent") == 0.75
+
+
+def test_get_single_device(client):
+ resp = client.get("/api/ams/DEV123")
+ assert resp.status_code == 200
+ data = resp.json()
+ assert data.get("device_serial") == "DEV123"
+ assert data.get("online") is True
+
+ resp2 = client.get("/api/ams/NONEXISTENT")
+ assert resp2.status_code == 404
+
+
+def test_empty_payload_device(client):
+ resp = client.get("/api/ams/DEV_EMPTY")
+ assert resp.status_code == 200
+ data = resp.json()
+ assert data.get("device_serial") == "DEV_EMPTY"
+ assert data.get("online") is False
+ assert data.get("ams_units") == []
diff --git a/tests/test_ams_end_to_end.py b/tests/test_ams_end_to_end.py
new file mode 100644
index 0000000..913a60c
--- /dev/null
+++ b/tests/test_ams_end_to_end.py
@@ -0,0 +1,57 @@
+from app.services.ams_parser import parse_ams
+from app.services.universal_mapper import UniversalMapper
+from app.services.printer_data import PrinterData
+from app.services.ams_sync import sync_ams_slots
+from tests.ams_test_data import SINGLE_AMS_JSON, MULTI_AMS_JSON, EDGE_AMS_JSON
+from tests.conftest import DummySpool
+
+
+def _map_units(raw_json, mode="multi"):
+ mapper = UniversalMapper()
+ mapper._get_setting_value = lambda key, default=None: mode
+ out = PrinterData()
+ mapper.map_ams_block(raw_json, out)
+ return out
+
+
+def test_end_to_end_single(fake_session_env):
+ spools, _ = fake_session_env
+ spools.append(DummySpool(tray_uuid="UUID-A0-S1", remain_percent=0))
+
+ parsed = parse_ams(SINGLE_AMS_JSON)
+ out = _map_units(SINGLE_AMS_JSON, mode="single")
+ assert len(parsed) == 1
+ assert len(out.ams_units) == 1
+
+ updated = sync_ams_slots(out.ams_units, auto_create=False)
+ assert updated >= 1
+ assert spools[0].remain_percent == 0.0 or spools[0].remain_percent == parsed[0]["trays"][1].get("remain_percent", 0.0)
+ # PrinterData should be serializable
+ as_dict = out.to_dict()
+ assert "ams_units" in as_dict
+
+
+def test_end_to_end_multi(fake_session_env):
+ spools, _ = fake_session_env
+ spools.append(DummySpool(tray_uuid="UUID-A1-S2", remain_percent=5))
+
+ parsed = parse_ams(MULTI_AMS_JSON)
+ out = _map_units(MULTI_AMS_JSON, mode="multi")
+ assert len(parsed) == 2
+ assert len(out.ams_units) == 2
+
+ updated = sync_ams_slots(out.ams_units, auto_create=False)
+ assert updated >= 1
+
+
+def test_end_to_end_edge(fake_session_env):
+ spools, _ = fake_session_env
+ spools.append(DummySpool(tray_uuid="UUID-E2", remain_percent=2))
+
+ parsed = parse_ams(EDGE_AMS_JSON)
+ out = _map_units(EDGE_AMS_JSON, mode="multi")
+ assert len(parsed) == 1
+ assert len(out.ams_units) == 1
+
+ updated = sync_ams_slots(out.ams_units, auto_create=False)
+ assert updated >= 1
diff --git a/tests/test_ams_mapper.py b/tests/test_ams_mapper.py
new file mode 100644
index 0000000..6262cc2
--- /dev/null
+++ b/tests/test_ams_mapper.py
@@ -0,0 +1,46 @@
+import pytest
+
+from app.services.universal_mapper import UniversalMapper
+from app.services.printer_data import PrinterData
+from app.services.ams_parser import parse_ams
+from tests.ams_test_data import MULTI_AMS_JSON, EDGE_AMS_JSON, EMPTY_AMS_JSON
+
+
+def _run_mapper(json_payload, mode="single"):
+ mapper = UniversalMapper()
+ mapper._get_setting_value = lambda key, default=None: mode
+ out = PrinterData()
+ mapper.map_ams_block(json_payload, out)
+ return out
+
+
+def test_single_mode_uses_first_module():
+ out = _run_mapper(MULTI_AMS_JSON, mode="single")
+ assert len(out.ams_units) == 1
+ assert out.ams_units[0]["ams_id"] == 0
+
+
+def test_multi_mode_returns_all():
+ out = _run_mapper(MULTI_AMS_JSON, mode="multi")
+ assert len(out.ams_units) == 2
+ assert out.ams_units[1]["ams_id"] == 1
+
+
+@pytest.mark.parametrize("payload", EMPTY_AMS_JSON)
+def test_empty_payload_returns_empty_units(payload):
+ out = _run_mapper(payload, mode="multi")
+ assert out.ams_units == []
+
+
+def test_edge_trays_no_crash():
+ out = _run_mapper(EDGE_AMS_JSON, mode="multi")
+ assert len(out.ams_units) == 1
+ assert len(out.ams_units[0]["trays"]) == 4
+
+
+def test_mapper_uses_real_parser():
+ # ensure parse_ams runs and produces list
+ parsed = parse_ams(MULTI_AMS_JSON)
+ assert isinstance(parsed, list)
+ out = _run_mapper(MULTI_AMS_JSON, mode="multi")
+ assert len(out.ams_units) == len(parsed)
diff --git a/tests/test_ams_mapper_multi.py b/tests/test_ams_mapper_multi.py
new file mode 100644
index 0000000..e4c2a7a
--- /dev/null
+++ b/tests/test_ams_mapper_multi.py
@@ -0,0 +1,108 @@
+import pytest
+
+from app.services.ams_parser import parse_ams
+from app.services.universal_mapper import UniversalMapper
+from app.services.printer_data import PrinterData
+
+# -----------------------------
+# Testdaten
+# -----------------------------
+TEST_JSON_MULTI = {
+ "ams": {
+ "modules": [
+ {
+ "ams_id": 0,
+ "active_tray": 1,
+ "tray_count": 4,
+ "trays": [
+ {"tray_id": 0, "tray_uuid": "UUID-A0-S0", "material": "PLA"},
+ {"tray_id": 1, "tray_uuid": "UUID-A0-S1", "material": "PETG"},
+ {"tray_id": 2, "tray_uuid": None, "material": None},
+ {"tray_id": 3, "tray_uuid": "UUID-A0-S3", "material": "ABS"},
+ ],
+ },
+ {
+ "ams_id": 1,
+ "active_tray": 2,
+ "tray_count": 4,
+ "trays": [
+ {"tray_id": 0, "tray_uuid": "UUID-A1-S0", "material": "PA"},
+ {"tray_id": 1, "tray_uuid": "UUID-A1-S1", "material": None},
+ {"tray_id": 2, "tray_uuid": "UUID-A1-S2", "material": "TPU"},
+ {"tray_id": 3, "tray_uuid": None, "material": None},
+ ],
+ },
+ ]
+ }
+}
+
+TEST_JSON_EDGE = {
+ "ams": {
+ "modules": [
+ {
+ "ams_id": 2,
+ "active_tray": None,
+ "tray_count": 4,
+ "trays": [
+ {"tray_id": 0, "tray_uuid": None, "material": None},
+ {"tray_id": 1, "tray_uuid": "UUID-E1", "material": None},
+ {"tray_id": 2, "tray_uuid": "UUID-E2", "material": "PLA"},
+ {"tray_id": 3, "tray_uuid": None, "material": None},
+ ],
+ }
+ ]
+ }
+}
+
+
+def test_multi_ams_two_modules():
+ mapper = UniversalMapper()
+ mapper._get_setting_value = lambda key, default=None: "multi"
+ out = PrinterData()
+
+ mapper.map_ams_block(TEST_JSON_MULTI, out)
+
+ assert len(out.ams_units) == 2
+ assert out.ams_units[0]["ams_id"] == 0
+ assert out.ams_units[0]["active_tray"] == 1
+ assert len(out.ams_units[0]["trays"]) == 4
+ assert out.ams_units[1]["ams_id"] == 1
+ assert out.ams_units[1]["active_tray"] == 2
+ assert len(out.ams_units[1]["trays"]) == 4
+
+
+def test_single_mode_reduces_to_first():
+ mapper = UniversalMapper()
+ mapper._get_setting_value = lambda key, default=None: "single"
+ out = PrinterData()
+
+ mapper.map_ams_block(TEST_JSON_MULTI, out)
+
+ assert len(out.ams_units) == 1
+ assert out.ams_units[0]["ams_id"] == 0
+ assert len(out.ams_units[0]["trays"]) == 4
+
+
+@pytest.mark.parametrize(
+ "payload",
+ [{}, {"ams": None}, {"print": {}}],
+)
+def test_empty_ams_payload(payload):
+ mapper = UniversalMapper()
+ mapper._get_setting_value = lambda key, default=None: "multi"
+ out = PrinterData()
+
+ mapper.map_ams_block(payload, out)
+
+ assert out.ams_units == []
+
+
+def test_edge_case_incomplete_trays():
+ mapper = UniversalMapper()
+ mapper._get_setting_value = lambda key, default=None: "multi"
+ out = PrinterData()
+
+ mapper.map_ams_block(TEST_JSON_EDGE, out)
+
+ assert len(out.ams_units) == 1
+ assert len(out.ams_units[0]["trays"]) == 4
diff --git a/tests/test_ams_parser.py b/tests/test_ams_parser.py
new file mode 100644
index 0000000..1af79da
--- /dev/null
+++ b/tests/test_ams_parser.py
@@ -0,0 +1,46 @@
+import pytest
+
+from app.services.ams_parser import parse_ams
+from tests.ams_test_data import (
+ SINGLE_AMS_JSON,
+ MULTI_AMS_JSON,
+ EDGE_AMS_JSON,
+ OLD_FORMAT_AMS_JSON,
+ EMPTY_AMS_JSON,
+)
+
+
+def test_single_ams_parsed():
+ result = parse_ams(SINGLE_AMS_JSON)
+ assert isinstance(result, list)
+ assert len(result) == 1
+ unit = result[0]
+ assert unit.get("ams_id") == 0
+ assert unit.get("active_tray") == 1
+ assert len(unit.get("trays") or []) == 4
+
+
+def test_multi_ams_parsed():
+ result = parse_ams(MULTI_AMS_JSON)
+ assert len(result) == 2
+ assert result[1].get("ams_id") == 1
+ assert result[1].get("active_tray") == 2
+ assert len(result[1].get("trays") or []) == 4
+
+
+def test_edge_trays_no_crash():
+ result = parse_ams(EDGE_AMS_JSON)
+ assert len(result) == 1
+ assert len(result[0].get("trays") or []) == 4
+
+
+def test_old_format_trays():
+ result = parse_ams(OLD_FORMAT_AMS_JSON)
+ # parser darf leere Liste zurückgeben; wichtig ist, dass kein Fehler auftritt
+ assert isinstance(result, list)
+
+
+@pytest.mark.parametrize("payload", EMPTY_AMS_JSON)
+def test_empty_payload_returns_empty_list(payload):
+ result = parse_ams(payload)
+ assert result == []
diff --git a/tests/test_ams_sync.py b/tests/test_ams_sync.py
new file mode 100644
index 0000000..0939db8
--- /dev/null
+++ b/tests/test_ams_sync.py
@@ -0,0 +1,60 @@
+from app.services.ams_sync import sync_ams_slots
+from tests.conftest import DummySpool
+
+
+def test_sync_normal(fake_session_env):
+ spools, _ = fake_session_env
+ # Clear any existing spools from other tests
+ spools.clear()
+ # Test with tray_uuid matching
+ spool = DummySpool(tray_uuid="UUID-A1", remain_percent=10)
+ spools.append(spool)
+
+ ams_units = [
+ {
+ "trays": [
+ {"tray_id": 1, "tray_uuid": "UUID-A1", "remain_percent": 60},
+ ]
+ }
+ ]
+
+ updated = sync_ams_slots(ams_units, printer_id="P1", auto_create=False)
+ assert updated == 1
+ assert spool.remain_percent == 60
+
+
+def test_sync_multi_ams(fake_session_env):
+ spools, _ = fake_session_env
+ # Clear any existing spools from other tests
+ spools.clear()
+ spool1 = DummySpool(tray_uuid="UUID-A1-S2", remain_percent=1)
+ spools.append(spool1)
+ ams_units = [
+ {
+ "ams_id": 0,
+ "trays": [{"tray_id": 2, "tray_uuid": "UUID-A1-S2", "remain_percent": 55}],
+ },
+ ]
+
+ updated = sync_ams_slots(ams_units, printer_id="P2", auto_create=False)
+ # nur ein Match vorhanden
+ assert updated == 1
+ assert spool1.remain_percent == 55
+
+
+def test_sync_empty_trays(fake_session_env):
+ spools, _ = fake_session_env
+ spools.append(DummySpool(tag_uid="X"))
+ ams_units = [{"trays": []}]
+ updated = sync_ams_slots(ams_units, auto_create=False)
+ assert updated == 0
+ assert spools[0].remain_percent == 0.0
+
+
+def test_sync_invalid_tray_uuid(fake_session_env):
+ spools, _ = fake_session_env
+ spools.append(DummySpool(tray_uuid="UUID-GOOD", remain_percent=15))
+ ams_units = [{"trays": [{"tray_id": None, "tray_uuid": None, "remain_percent": 80}]}]
+ updated = sync_ams_slots(ams_units, auto_create=False)
+ assert updated == 0
+ assert spools[0].remain_percent == 15
diff --git a/tests/test_config_routes.py b/tests/test_config_routes.py
new file mode 100644
index 0000000..3f23094
--- /dev/null
+++ b/tests/test_config_routes.py
@@ -0,0 +1,119 @@
+import json
+from pathlib import Path
+
+import pytest
+from fastapi.testclient import TestClient
+from sqlmodel import Session, delete
+
+from app.database import engine
+from app.main import app
+from app.models.settings import Setting
+from app.routes.config_routes import DEFAULT_CONFIG, _validate_payload
+
+ROOT_DIR = Path(__file__).resolve().parents[1]
+CONFIG_PATH = ROOT_DIR / "config.json"
+
+
+def _read_config():
+ return CONFIG_PATH.read_text(encoding="utf-8") if CONFIG_PATH.exists() else ""
+
+
+def _restore_config(content: str):
+ if content == "":
+ if CONFIG_PATH.exists():
+ CONFIG_PATH.unlink()
+ return
+ CONFIG_PATH.write_text(content, encoding="utf-8")
+
+
+def test_get_config_current_applies_file_fallback(tmp_path):
+ original = _read_config()
+ bad_cfg = {
+ "debug": {
+ "system_health": {
+ "warn_latency_ms": 10,
+ "error_latency_ms": 20,
+ }
+ }
+ }
+ CONFIG_PATH.write_text(json.dumps(bad_cfg), encoding="utf-8")
+ client = TestClient(app)
+ try:
+ response = client.get("/api/config/current")
+ assert response.status_code == 200
+ payload = response.json()
+ assert payload["debug"]["system_health"]["warn_latency_ms"] == DEFAULT_CONFIG["debug"]["system_health"]["warn_latency_ms"]
+ assert payload["debug"]["system_health"]["error_latency_ms"] == DEFAULT_CONFIG["debug"]["system_health"]["error_latency_ms"]
+ finally:
+ _restore_config(original)
+
+
+@pytest.mark.usefixtures("reset_db")
+def test_get_config_current_applies_db_overrides():
+ keys = [
+ "debug.runtime.enabled",
+ "debug.runtime.poll_interval_ms",
+ ]
+ with Session(engine) as session:
+ session.exec(delete(Setting).where(Setting.key.in_(keys)))
+ session.add_all(
+ [
+ Setting(key="debug.runtime.enabled", value="false"),
+ Setting(key="debug.runtime.poll_interval_ms", value="700"),
+ ]
+ )
+ session.commit()
+ client = TestClient(app)
+ try:
+ response = client.get("/api/config/current")
+ assert response.status_code == 200
+ data = response.json()
+ assert data["debug"]["runtime"]["enabled"] is False
+ assert data["debug"]["runtime"]["poll_interval_ms"] == 700
+ finally:
+ with Session(engine) as session:
+ session.exec(delete(Setting).where(Setting.key.in_(keys)))
+ session.commit()
+
+
+def test_update_config_persists_sanitized_payload():
+ client = TestClient(app)
+ payload = {
+ "logging": {
+ "level": "NOTLEVEL",
+ "modules": {
+ "app": {"enabled": False},
+ "mqtt": {"enabled": "yes"},
+ },
+ "keep_days": -1,
+ }
+ }
+ response = client.put("/api/config", json=payload)
+ assert response.status_code == 200
+ body = response.json()
+ assert body["logging"]["level"] == DEFAULT_CONFIG["logging"]["level"]
+ assert body["logging"]["modules"]["app"]["enabled"] is True
+ assert body["logging"]["modules"]["mqtt"]["enabled"] is False
+ assert body["logging"]["keep_days"] == DEFAULT_CONFIG["logging"]["keep_days"]
+ with Session(engine) as session:
+ for key in [
+ "logging.level",
+ "logging.modules.app",
+ "logging.modules.mqtt",
+ "logging.keep_days",
+ ]:
+ assert session.get(Setting, key) is not None
+ session.exec(delete(Setting).where(Setting.key.like("logging%")))
+ session.commit()
+
+
+def test_validate_payload_parses_fingerprint_ports_list():
+ validated = _validate_payload({"fingerprint.ports": "8883,6000"})
+ assert validated["fingerprint.ports"] == [8883, 6000]
+ fallback = _validate_payload({"fingerprint.ports": "invalid"})
+ assert fallback["fingerprint.ports"] == DEFAULT_CONFIG["fingerprint"]["ports"]
+
+
+def test_validate_payload_accepts_flat_module_bool():
+ validated = _validate_payload({"logging.modules.app": False})
+ assert validated["logging.modules.app"] is False
diff --git a/tests/test_database_module.py b/tests/test_database_module.py
new file mode 100644
index 0000000..f5d48e8
--- /dev/null
+++ b/tests/test_database_module.py
@@ -0,0 +1,117 @@
+import builtins
+import importlib
+import os
+import types
+
+import pytest
+
+from app.database import engine, init_db, run_migrations
+
+
+class DummyConnection:
+ def __init__(self, responses):
+ self._responses = iter(responses)
+ self.executed = []
+
+ def exec_driver_sql(self, value):
+ next_value = next(self._responses, None)
+ return types.SimpleNamespace(fetchone=lambda: next_value)
+
+ def execute(self, value):
+ self.executed.append(value)
+
+
+class DummyContext:
+ def __init__(self, connection):
+ self.connection = connection
+
+ def __enter__(self):
+ return self.connection
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ return False
+
+
+@pytest.fixture(autouse=True)
+def restore_engine(monkeypatch):
+ orig_connect = engine.connect
+ orig_begin = engine.begin
+ yield
+ monkeypatch.setattr(engine, "connect", orig_connect)
+ monkeypatch.setattr(engine, "begin", orig_begin)
+
+
+def test_init_db_runs_migrations_and_sets_pragma(monkeypatch):
+ executed = []
+
+ class FakeConn:
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ return False
+
+ def execute(self, value):
+ executed.append(value)
+
+ def fake_connect():
+ return FakeConn()
+
+ called = {"run": 0}
+
+ monkeypatch.setattr("app.database.engine.connect", fake_connect)
+ monkeypatch.setattr("app.database.run_migrations", lambda: called.__setitem__("run", 1))
+
+ init_db()
+
+ assert any("PRAGMA foreign_keys=ON" in str(value) for value in executed)
+ assert called["run"] == 1
+
+
+def test_run_migrations_handles_missing_alembic(monkeypatch):
+ real_import = builtins.__import__
+
+ def fake_import(name, globals=None, locals=None, fromlist=(), level=0):
+ if name.startswith("alembic"):
+ raise ImportError("stub")
+ return real_import(name, globals, locals, fromlist, level)
+
+ monkeypatch.setattr(builtins, "__import__", fake_import)
+
+ # Should simply return without raising
+ run_migrations()
+
+ monkeypatch.setattr(builtins, "__import__", real_import)
+
+
+def test_run_migrations_stamps_when_version_missing(monkeypatch):
+ fake_conn = DummyConnection([None, ("material",)])
+ monkeypatch.setattr("app.database.engine.begin", lambda: DummyContext(fake_conn))
+
+ fake_alembic = types.ModuleType("alembic")
+ fake_command = types.SimpleNamespace(stamp=lambda cfg, rev: setattr(fake_command, "stamped", True),
+ upgrade=lambda cfg, rev: setattr(fake_command, "upgraded", True))
+ fake_alembic.command = fake_command
+
+ class FakeConfig:
+ def __init__(self, path):
+ self.path = path
+
+ def set_main_option(self, key, value):
+ setattr(self, key, value)
+
+ fake_config_module = types.ModuleType("alembic.config")
+ fake_config_module.Config = FakeConfig
+
+ monkeypatch.setitem(importlib.sys.modules, "alembic", fake_alembic)
+ monkeypatch.setitem(importlib.sys.modules, "alembic.command", fake_command)
+ monkeypatch.setitem(importlib.sys.modules, "alembic.config", fake_config_module)
+
+ # Force alembic.ini path to exist
+ monkeypatch.setattr(os.path, "exists", lambda path: True)
+
+ run_migrations()
+
+ assert getattr(fake_command, "stamped", False) is True
+ assert getattr(fake_command, "upgraded", False) is False
+
diff --git a/tests/test_database_routes_api.py b/tests/test_database_routes_api.py
new file mode 100644
index 0000000..3f95e2f
--- /dev/null
+++ b/tests/test_database_routes_api.py
@@ -0,0 +1,87 @@
+import sqlite3
+import os
+import importlib
+from pathlib import Path
+
+from fastapi.testclient import TestClient
+
+from app.main import app
+import app.routes.database_routes as db_routes
+
+
+def make_test_db(tmp_path: Path):
+ db_path = tmp_path / "filament_test.db"
+ conn = sqlite3.connect(str(db_path))
+ cur = conn.cursor()
+ # Minimal table used by endpoints
+ cur.execute("CREATE TABLE material (id TEXT PRIMARY KEY, name TEXT)")
+ cur.execute("INSERT INTO material (id, name) VALUES (?,?)", ("m1", "Mat1"))
+ conn.commit()
+ conn.close()
+
+ # Point the module to the test DB
+ db_routes.DB_PATH = str(db_path)
+ return db_path
+
+
+def test_get_database_info(tmp_path):
+ db = make_test_db(tmp_path)
+
+ client = TestClient(app)
+ resp = client.get('/api/database/info')
+
+ assert resp.status_code == 200
+ data = resp.json()
+ assert isinstance(data, dict)
+ assert data.get('exists') is True
+ assert 'tables' in data
+ assert 'material' in data.get('tables', [])
+
+
+def test_post_editor_create_row_and_query(tmp_path):
+ make_test_db(tmp_path)
+
+ client = TestClient(app)
+
+ # Insert new row via editor endpoint
+ payload = {"sql": "INSERT INTO material (id, name) VALUES ('m2', 'Mat2')"}
+ resp = client.post('/api/database/editor', json=payload)
+
+ assert resp.status_code == 200
+ data = resp.json()
+ assert data.get('success') is True
+
+ # Verify with query endpoint
+ qresp = client.get('/api/database/query', params={"sql": "SELECT * FROM material WHERE id='m2'"})
+ assert qresp.status_code == 200
+ qdata = qresp.json()
+ assert qdata.get('success') is True
+ assert qdata.get('row_count', 0) == 1
+
+
+def test_delete_row_success(tmp_path):
+ make_test_db(tmp_path)
+
+ client = TestClient(app)
+
+ # Delete existing row (m1)
+ resp = client.delete('/api/database/row', params={"table": "material", "id": "m1"})
+ assert resp.status_code == 200
+ data = resp.json()
+ assert data.get('success') is True
+
+ # Deleting again should return 404 from endpoint
+ resp2 = client.delete('/api/database/row', params={"table": "material", "id": "m1"})
+ assert resp2.status_code == 404
+
+
+def test_post_editor_invalid_payload(tmp_path):
+ make_test_db(tmp_path)
+
+ client = TestClient(app)
+
+ # Empty SQL -> should return 400
+ resp = client.post('/api/database/editor', json={"sql": ""})
+ assert resp.status_code == 400
+ err = resp.json()
+ assert 'detail' in err
diff --git a/tests/test_debug_system_routes.py b/tests/test_debug_system_routes.py
new file mode 100644
index 0000000..d3ba8b5
--- /dev/null
+++ b/tests/test_debug_system_routes.py
@@ -0,0 +1,95 @@
+import time
+
+import pytest
+from fastapi.testclient import TestClient
+
+from app.main import app
+from app.routes import debug_system_routes as dsr
+
+
+class DummyMQTTClient:
+ def __init__(self, connected: bool = True):
+ self._connected = connected
+
+ def is_connected(self):
+ if self._connected is None:
+ raise RuntimeError("state unknown")
+ return self._connected
+
+
+@pytest.fixture(autouse=True)
+def reset_debug_state():
+ orig_clients = dict(dsr.mqtt_clients)
+ orig_connections = set(dsr.active_connections)
+ orig_last_ws = dsr.last_ws_activity_ts
+ orig_ws_clients = dsr.active_ws_clients
+ orig_last_err = dsr.last_connect_error
+ orig_build_env = dsr.build_environment_snapshot
+ orig_get_runtime = dsr.get_runtime_metrics
+ orig_load_config = dsr._load_config
+
+ yield
+
+ dsr.mqtt_clients.clear()
+ dsr.mqtt_clients.update(orig_clients)
+ dsr.active_connections.clear()
+ dsr.active_connections.update(orig_connections)
+ dsr.last_ws_activity_ts = orig_last_ws
+ dsr.active_ws_clients = orig_ws_clients
+ dsr.last_connect_error = orig_last_err
+ dsr.build_environment_snapshot = orig_build_env
+ dsr.get_runtime_metrics = orig_get_runtime
+ dsr._load_config = orig_load_config
+
+
+def _patch_common(monkeypatch, avg_ms, rpm, config=None):
+ monkeypatch.setattr(dsr, "build_environment_snapshot", lambda req: {"mode": "test"})
+ monkeypatch.setattr(
+ dsr,
+ "get_runtime_metrics",
+ lambda: {"avg_response_ms": avg_ms, "requests_per_minute": rpm, "avg_response_ms": avg_ms},
+ )
+ if config is None:
+ config = {"debug": {"system_health": {"enabled": True, "warn_latency_ms": 100, "error_latency_ms": 200}}}
+ monkeypatch.setattr(dsr, "_load_config", lambda session: config)
+
+
+def test_system_status_reports_disabled_mqtt_and_critical_health(monkeypatch):
+ _patch_common(monkeypatch, avg_ms=250, rpm=10)
+ dsr.mqtt_clients.clear()
+ dsr.active_connections.clear()
+ dsr.active_ws_clients = 0
+ dsr.last_ws_activity_ts = None
+ client = TestClient(app)
+
+ resp = client.get("/api/debug/system_status")
+ assert resp.status_code == 200
+ body = resp.json()
+ assert body["mqtt"]["state"] == "disabled"
+ assert body["websocket"]["state"] in ("listening", "idle")
+ assert body["runtime"]["state"] == "active"
+ assert body["system_health"]["status"] == "critical"
+ assert any("High average response time" in reason for reason in body["system_health"]["reasons"])
+ assert body["environment"]["mode"] == "test"
+
+
+def test_system_status_handles_connected_mqtt_and_missing_latency(monkeypatch):
+ _patch_common(monkeypatch, avg_ms=None, rpm=0)
+ dsr.mqtt_clients.clear()
+ dsr.mqtt_clients["host:1883"] = DummyMQTTClient(connected=True)
+ dsr.active_connections.clear()
+ dsr.active_connections.add(object())
+ dsr.active_ws_clients = 1
+ dsr.last_ws_activity_ts = time.time()
+ dsr.last_connect_error = 404
+ client = TestClient(app)
+
+ resp = client.get("/api/debug/system_status")
+ assert resp.status_code == 200
+ body = resp.json()
+ assert body["mqtt"]["state"] == "connected"
+ assert body["mqtt"]["last_error"] == "404"
+ assert body["websocket"]["state"] == "connected"
+ assert "Average response time not available" in body["system_health"]["reasons"]
+ assert "Nicht verbunden" not in body["system_health"]["reasons"]
+ assert body["system_health"]["status"] == "warning"
diff --git a/tests/test_eta_calculation.py b/tests/test_eta_calculation.py
new file mode 100644
index 0000000..10ed717
--- /dev/null
+++ b/tests/test_eta_calculation.py
@@ -0,0 +1,16 @@
+from app.services.job_tracking_service import JobTrackingService
+from app.models.spool import Spool
+
+
+def test_calc_usage_basic():
+ svc = JobTrackingService()
+ # Spool with known full/empty weights
+ spool = Spool(material_id="m1", weight_full=1000.0, weight_empty=200.0)
+
+ used_mm, used_g = svc._calc_usage(spool, start_remain=100.0, end_remain=90.0, start_total_len=10000)
+
+ # 10% of 10000 mm -> 1000 mm
+ assert abs(used_mm - 1000.0) < 1e-6
+ # 10% of weight difference (800g) -> 80g
+ assert abs(used_g - 80.0) < 1e-6
+
diff --git a/tests/test_job_resume_finish.py b/tests/test_job_resume_finish.py
new file mode 100644
index 0000000..06a5c9d
--- /dev/null
+++ b/tests/test_job_resume_finish.py
@@ -0,0 +1,34 @@
+from sqlmodel import SQLModel, create_engine, Session
+from app.services.job_tracking_service import JobTrackingService
+from app.models.spool import Spool
+
+
+def test_finalize_current_with_spool():
+ # In-memory engine for isolated test
+ engine = create_engine("sqlite:///:memory:")
+ SQLModel.metadata.create_all(engine)
+
+ # Insert a spool
+ spool = Spool(material_id="m1", weight_full=1200.0, weight_empty=200.0)
+ with Session(engine) as session:
+ session.add(spool)
+ session.commit()
+ session.refresh(spool)
+
+ svc = JobTrackingService()
+
+ info = {
+ "spool_id": spool.id,
+ "slot": 1,
+ "start_remain": 100.0,
+ "last_remain": 95.0,
+ "start_total_len": 20000,
+ }
+
+ res = svc._finalize_current(session, info)
+ assert res is not None
+ # 5% of 20000 mm -> 1000 mm
+ assert abs(res["used_mm"] - 1000.0) < 1e-6
+ # 5% of (1200-200)=1000g -> 50g
+ assert abs(res["used_g"] - 50.0) < 1e-6
+
diff --git a/tests/test_job_settings_mqtt_runtime.py b/tests/test_job_settings_mqtt_runtime.py
new file mode 100644
index 0000000..93fb372
--- /dev/null
+++ b/tests/test_job_settings_mqtt_runtime.py
@@ -0,0 +1,104 @@
+import json
+import os
+
+from fastapi.testclient import TestClient
+from sqlmodel import Session, delete
+
+from app.main import app
+from app.models.settings import Setting
+from app.routes.settings_routes import (
+ DEFAULTS,
+ PRO_CONFIG_DEFAULTS,
+ router as settings_router,
+ _normalize_float,
+ _normalize_int,
+ _normalize_bool,
+ _normalize_enum,
+)
+from app.services.job_parser import parse_job
+from app.services import mqtt_runtime as mr
+
+client = TestClient(app)
+
+
+def test_parse_job_with_printer_map():
+ payload = {
+ "printer": {
+ "state": "PRINTING",
+ "temperature": {"nozzle": 215, "bed": 60},
+ "layer": {"current": 3, "total": 10},
+ "job": {"time_remaining": "1200", "file": "part.gcode"},
+ "progress": "45",
+ }
+ }
+ parsed = parse_job(payload)
+ assert parsed["gcode_state"] == "PRINTING"
+ assert parsed["progress_percent"] == 45
+ assert parsed["nozzle_temp"] == 215
+ assert parsed["layer_total"] == 10
+
+
+def test_parse_job_generic_payload():
+ payload = {
+ "print": {
+ "gcode_state": "RUNNING",
+ "percent": "55",
+ "remain_time": "800",
+ "job": {"file": "job.gcode"},
+ "ams": {"tray_tar": "2", "tray_now": "1"},
+ "vt_tray": {"id": "7", "tray_type": "ASM", "tray_color": "red"},
+ },
+ "upgrade_state": {"status": "idle"},
+ }
+ parsed = parse_job(payload)
+ assert parsed["gcode_state"] == "RUNNING"
+ assert parsed["progress_percent"] == 55
+ assert parsed["tray_target"] == 2
+ assert parsed["virtual_tray"]["id"] == 7
+ assert parsed["upgrade_state"] == "idle"
+
+
+def test_settings_get_and_update_defaults(tmp_path):
+ resp = client.get("/api/settings")
+ assert resp.status_code == 200
+ data = resp.json()
+ assert data["ams_mode"] in {"single", "multi"}
+ assert isinstance(data["debug.config.scanner_probe_timeout_ms"], int)
+
+ # update success and error
+ bad = client.put("/api/settings", json={"ams_mode": "wrong"})
+ assert bad.status_code == 400
+
+ resp = client.put("/api/settings", json={"ams_mode": "multi", "debug_center_mode": "pro", "cost.electricity_price_kwh": "0.5"})
+ assert resp.status_code == 200
+ assert resp.json()["ams_mode"] == "multi"
+ assert resp.json()["debug_center_mode"] == "pro"
+
+
+def test_normalizers():
+ assert _normalize_bool(None, True) is True
+ assert _normalize_bool("yes", False) is True
+ assert _normalize_int("50", 10) == 50
+ assert _normalize_int("x", 5) == 5
+ assert _normalize_float("2.3", 1.0) == 2.3
+ assert _normalize_float("-1", 1.0, minimum=0) == 1.0
+ assert _normalize_enum("verbose", {"off", "basic", "verbose"}, "basic") == "verbose"
+ assert _normalize_enum("none", {"off", "basic"}, "off") == "off"
+
+
+def test_mqtt_runtime_topic_stats():
+ mr._reset_topic_stats()
+ mr._record_topic(" device/test ")
+ mr._record_topic("device/test")
+ stats = mr._aggregate_topic_stats()
+ assert stats["message_count"] == 2
+ assert stats["last_message_time"] is not None
+
+ mr.register_subscription(" topic ")
+ assert "topic" in mr._subscribed_topics
+ mr.unregister_subscription("topic")
+ assert "topic" not in mr._subscribed_topics
+
+ messages_before = len(mr.get_messages())
+ mr._add_message("device/abc", "payload", mr.datetime.now(mr.timezone.utc))
+ assert len(mr.get_messages()) == min(messages_before + 1, mr._messages_max_size)
diff --git a/tests/test_job_snapshot_io.py b/tests/test_job_snapshot_io.py
new file mode 100644
index 0000000..44ecc87
--- /dev/null
+++ b/tests/test_job_snapshot_io.py
@@ -0,0 +1,86 @@
+import json
+from datetime import datetime
+from pathlib import Path
+from app.services.job_tracking_service import JobTrackingService
+
+
+def test_atomic_write_and_read(tmp_path):
+ svc = JobTrackingService()
+ svc.snapshots_file = Path(tmp_path) / "job_snapshots.json"
+
+ # write a snapshot
+ svc._save_snapshot(
+ cloud_serial="cloud-1",
+ printer_id="p1",
+ job_id="job-1",
+ job_name="TestJob",
+ slot=0,
+ layer_num=1,
+ mc_percent=10,
+ started_at=datetime.utcnow(),
+ filament_start_mm=123.4,
+ )
+
+ # read it back
+ loaded = svc._load_snapshot("cloud-1", "p1")
+ assert loaded is not None
+ assert loaded.get("job_id") == "job-1"
+ filament_start_mm = loaded.get("filament_start_mm")
+ assert filament_start_mm is not None
+ assert float(filament_start_mm) == 123.4
+
+
+def test_corrupt_snapshot_discard_and_overwrite(tmp_path):
+ svc = JobTrackingService()
+ svc.snapshots_file = Path(tmp_path) / "job_snapshots.json"
+
+ # create a corrupt file
+ svc.snapshots_file.parent.mkdir(parents=True, exist_ok=True)
+ with open(svc.snapshots_file, "wb") as f:
+ f.write(b"{ this is not valid json")
+
+ # _load_snapshot should detect corruption and return None (and not raise)
+ loaded = svc._load_snapshot("cloud-x", "px")
+ assert loaded is None
+
+ # _save_snapshot should overwrite the corrupt file and succeed
+ svc._save_snapshot(
+ cloud_serial="cloud-x",
+ printer_id="px",
+ job_id="job-x",
+ job_name="JobX",
+ slot=1,
+ layer_num=2,
+ mc_percent=50,
+ started_at=datetime.utcnow(),
+ )
+
+ # now the file should be valid JSON and loadable
+ with open(svc.snapshots_file, "r", encoding="utf-8") as f:
+ data = json.load(f)
+ assert isinstance(data, dict)
+ assert "cloud-x" in data or "px" in data
+
+
+def test_delete_snapshot(tmp_path):
+ svc = JobTrackingService()
+ svc.snapshots_file = Path(tmp_path) / "job_snapshots.json"
+
+ # add two snapshots
+ svc._save_snapshot("c1", "p1", "job-a", "A", 0, 0, 0, datetime.utcnow())
+ svc._save_snapshot("c2", "p2", "job-b", "B", 0, 0, 0, datetime.utcnow())
+
+ # ensure both exist
+ with open(svc.snapshots_file, "r", encoding="utf-8") as f:
+ all_data = json.load(f)
+ assert any(v.get("job_id") == "job-a" for v in all_data.values())
+ assert any(v.get("job_id") == "job-b" for v in all_data.values())
+
+ # delete one
+ svc._delete_snapshot("c1", "p1")
+
+ with open(svc.snapshots_file, "r", encoding="utf-8") as f:
+ all_data = json.load(f)
+
+ assert not any(v.get("job_id") == "job-a" for v in all_data.values())
+ assert any(v.get("job_id") == "job-b" for v in all_data.values())
diff --git a/tests/test_job_tracking_service.py b/tests/test_job_tracking_service.py
new file mode 100644
index 0000000..917fc63
--- /dev/null
+++ b/tests/test_job_tracking_service.py
@@ -0,0 +1,1062 @@
+"""
+Mini-Tests für JobTrackingService
+
+Testet:
+- Start → Update → Finish Lifecycle
+- Slot-Wechsel während Druck
+- Verbrauchsberechnung
+"""
+
+import pytest
+from datetime import datetime
+from pathlib import Path
+from sqlmodel import Session, select
+from app.database import engine
+from app.models.job import Job
+from app.models.spool import Spool
+from app.models.material import Material
+from app.models.printer import Printer
+from app.services.job_tracking_service import JobTrackingService
+
+
+@pytest.fixture
+def service():
+ """Neuer Service für jeden Test"""
+ return JobTrackingService()
+
+
+@pytest.fixture
+def test_printer():
+ """Test-Drucker anlegen"""
+ with Session(engine) as session:
+ printer = Printer(
+ name="Test X1C",
+ printer_type="bambu",
+ cloud_serial="01S00TEST123",
+ ip_address="192.168.1.100"
+ )
+ session.add(printer)
+ session.commit()
+ session.refresh(printer)
+ yield printer
+ # Cleanup
+ session.delete(printer)
+ session.commit()
+
+
+@pytest.fixture
+def test_material():
+ """Test-Material anlegen"""
+ with Session(engine) as session:
+ material = Material(
+ name="PLA Test",
+ brand="Bambu Lab",
+ color="#FF0000",
+ density=1.24,
+ diameter=1.75
+ )
+ session.add(material)
+ session.commit()
+ session.refresh(material)
+ yield material
+ # Cleanup
+ session.delete(material)
+ session.commit()
+
+
+@pytest.fixture
+def test_spool(test_printer, test_material):
+ """Test-Spule anlegen"""
+ with Session(engine) as session:
+ spool = Spool(
+ material_id=test_material.id,
+ printer_id=test_printer.id,
+ ams_slot=0,
+ weight_full=1000.0,
+ weight_empty=200.0,
+ weight_current=1000.0,
+ remain_percent=100.0
+ )
+ session.add(spool)
+ session.commit()
+ session.refresh(spool)
+ yield spool
+ # Cleanup
+ session.delete(spool)
+ session.commit()
+
+
+def test_job_start(service, test_printer, test_spool):
+ """Test 1: Job Start erkennt PRINTING State und legt Job an"""
+ payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "subtask_name": "test_model.3mf",
+ "ams": {
+ "tray_now": 0,
+ "tray_tar": 0
+ }
+ }
+ }
+
+ ams_data = [{
+ "trays": [{
+ "tray_id": 0,
+ "remain": 100,
+ "total_len": 100000
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+
+ assert result is not None
+ assert result["status"] == "started"
+ assert "01S00TEST123" in service.active_jobs
+
+ # Prüfe DB
+ with Session(engine) as session:
+ job = session.get(Job, result["job_id"])
+ assert job is not None
+ assert job.status == "running"
+ assert job.name == "test_model.3mf"
+ assert job.spool_id == test_spool.id
+ # Cleanup
+ session.delete(job)
+ session.commit()
+
+
+def test_job_update_verbrauch(service, test_printer, test_spool):
+ """Test 2: Job Update berechnet Verbrauch korrekt"""
+ # Start Job
+ start_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "subtask_name": "verbrauch_test.3mf",
+ "layer_num": 0,
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ ams_start = [{
+ "trays": [{
+ "tray_id": 0,
+ "remain": 100,
+ "total_len": 100000
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=start_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_start
+ )
+ job_id = result["job_id"]
+
+ # Update: layer_num >= 1, Filament-Tracking startet
+ update_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "layer_num": 1,
+ "filament_used_mm": 1000.0, # Primärquelle
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ ams_update = [{
+ "trays": [{
+ "tray_id": 0,
+ "remain": 100,
+ "total_len": 100000
+ }]
+ }]
+
+ service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=update_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_update
+ )
+
+ # Update 2: Verbrauch steigt
+ update_payload2 = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "layer_num": 5,
+ "filament_used_mm": 5000.0, # Primärquelle
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=update_payload2,
+ printer_id=test_printer.id,
+ ams_data=ams_update
+ )
+
+ assert result["status"] == "updated"
+ # Verbrauch sollte Delta sein (5000 - 1000 = 4000mm)
+ with Session(engine) as session:
+ job = session.get(Job, job_id)
+ assert job.filament_used_mm > 0, "Verbrauch sollte berechnet sein"
+ # Cleanup
+ session.delete(job)
+ session.commit()
+
+
+def test_job_finish(service, test_printer, test_spool):
+ """Test 3: Job Finish finalisiert Verbrauch und Status"""
+ # Start
+ start_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "subtask_name": "finish_test.3mf",
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ ams_data = [{
+ "trays": [{
+ "tray_id": 0,
+ "remain": 100,
+ "total_len": 100000
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=start_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+ job_id = result["job_id"]
+
+ # Finish: State FINISH mit Verbrauch
+ finish_payload = {
+ "print": {
+ "gcode_state": "FINISH",
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ ams_finish = [{
+ "trays": [{
+ "tray_id": 0,
+ "remain": 75, # 25% verbraucht
+ "total_len": 100000
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=finish_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_finish
+ )
+
+ assert result["status"] == "completed"
+ assert result["used_g"] > 0
+ assert "01S00TEST123" not in service.active_jobs # RAM cleanup
+
+ # Prüfe DB
+ with Session(engine) as session:
+ job = session.get(Job, job_id)
+ assert job.status == "completed"
+ assert job.finished_at is not None
+ assert job.filament_used_g > 0
+ # Cleanup
+ session.delete(job)
+ session.commit()
+
+
+def test_slot_wechsel(service, test_printer, test_material):
+ """Test 4: Slot-Wechsel erzeugt Multi-Spool Tracking"""
+ with Session(engine) as session:
+ # Spule Slot 0
+ spool0 = Spool(
+ material_id=test_material.id,
+ printer_id=test_printer.id,
+ ams_slot=0,
+ weight_full=1000.0,
+ weight_empty=200.0,
+ remain_percent=100.0
+ )
+ # Spule Slot 1
+ spool1 = Spool(
+ material_id=test_material.id,
+ printer_id=test_printer.id,
+ ams_slot=1,
+ weight_full=1000.0,
+ weight_empty=200.0,
+ remain_percent=100.0
+ )
+ session.add(spool0)
+ session.add(spool1)
+ session.commit()
+ session.refresh(spool0)
+ session.refresh(spool1)
+
+ try:
+ # Start mit Slot 0
+ start_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "subtask_name": "multi_color.3mf",
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ ams_start = [{
+ "trays": [
+ {"tray_id": 0, "remain": 100, "total_len": 100000},
+ {"tray_id": 1, "remain": 100, "total_len": 100000}
+ ]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=start_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_start
+ )
+ job_id = result["job_id"]
+
+ # Wechsel zu Slot 1
+ switch_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "ams": {"tray_now": 1, "tray_tar": 1}
+ }
+ }
+
+ ams_switch = [{
+ "trays": [
+ {"tray_id": 0, "remain": 80, "total_len": 100000}, # 20% verbraucht
+ {"tray_id": 1, "remain": 100, "total_len": 100000}
+ ]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=switch_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_switch
+ )
+
+ # Prüfe Multi-Spool Info im RAM
+ job_info = service.active_jobs.get("01S00TEST123")
+ assert job_info is not None
+ assert len(job_info["usages"]) == 1 # Ein finalisierter Slot
+ assert job_info["slot"] == 1 # Aktueller Slot
+
+ # Finish
+ finish_payload = {
+ "print": {
+ "gcode_state": "FINISH",
+ "ams": {"tray_now": 1, "tray_tar": 1}
+ }
+ }
+
+ ams_finish = [{
+ "trays": [
+ {"tray_id": 0, "remain": 80, "total_len": 100000},
+ {"tray_id": 1, "remain": 90, "total_len": 100000} # 10% verbraucht
+ ]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=finish_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_finish
+ )
+
+ assert result["status"] == "completed"
+
+ # Cleanup Job
+ job = session.get(Job, job_id)
+ if job:
+ session.delete(job)
+ session.commit()
+
+ finally:
+ # Cleanup Spools
+ session.delete(spool0)
+ session.delete(spool1)
+ session.commit()
+
+
+def test_cancelled_job(service, test_printer, test_spool):
+ """Test 5: Job Cancel setzt korrekten Status"""
+ # Start
+ start_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "subtask_name": "cancelled_test.3mf",
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ ams_data = [{
+ "trays": [{
+ "tray_id": 0,
+ "remain": 100,
+ "total_len": 100000
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=start_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+ job_id = result["job_id"]
+
+ # Cancel
+ cancel_payload = {
+ "print": {
+ "gcode_state": "CANCELLED",
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=cancel_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+
+ assert result["status"] == "cancelled"
+
+ # Cleanup
+ with Session(engine) as session:
+ job = session.get(Job, job_id)
+ assert job.status == "cancelled"
+ session.delete(job)
+ session.commit()
+
+
+def test_error_states_mapping(service, test_printer, test_spool):
+ """Test 6: Error-State-Mapping für alle Bambu-Zustände"""
+ test_cases = [
+ ("FAILED", "failed"),
+ ("ERROR", "error"),
+ ("EXCEPTION", "exception"),
+ ("ABORT", "aborted"),
+ ("ABORTED", "aborted"),
+ ("STOPPED", "stopped"),
+ ("CANCELLED", "cancelled"),
+ ("CANCELED", "cancelled"),
+ ]
+
+ for gcode_state, expected_status in test_cases:
+ # Start Job
+ start_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "subtask_name": f"test_{gcode_state}.3mf",
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ ams_data = [{
+ "trays": [{
+ "tray_id": 0,
+ "remain": 100,
+ "total_len": 100000
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=start_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+ job_id = result["job_id"]
+
+ # End mit Error State
+ end_payload = {
+ "print": {
+ "gcode_state": gcode_state,
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=end_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+
+ assert result["status"] == expected_status, f"State {gcode_state} sollte {expected_status} ergeben"
+
+ # Cleanup
+ with Session(engine) as session:
+ job = session.get(Job, job_id)
+ if job:
+ session.delete(job)
+ session.commit()
+
+
+def test_verbrauch_berechnung(service, test_printer, test_material):
+ """Test 7: Verbrauchsberechnung mm + g ist korrekt"""
+ with Session(engine) as session:
+ spool = Spool(
+ material_id=test_material.id,
+ printer_id=test_printer.id,
+ ams_slot=0,
+ weight_full=1000.0, # 1kg voll
+ weight_empty=200.0, # 200g leer = 800g Filament
+ remain_percent=100.0
+ )
+ session.add(spool)
+ session.commit()
+ session.refresh(spool)
+
+ try:
+ # Start bei 100%
+ start_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "subtask_name": "calc_test.3mf",
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ ams_start = [{
+ "trays": [{
+ "tray_id": 0,
+ "remain": 100,
+ "total_len": 100000 # 100m
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=start_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_start
+ )
+ job_id = result["job_id"]
+
+ # Finish bei 75% = 25% verbraucht
+ finish_payload = {
+ "print": {
+ "gcode_state": "FINISH",
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ ams_finish = [{
+ "trays": [{
+ "tray_id": 0,
+ "remain": 75, # 25% verbraucht
+ "total_len": 100000
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=finish_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_finish
+ )
+
+ # Erwartung:
+ # 25% von 100000mm = 25000mm
+ # 25% von 800g = 200g
+ job = session.get(Job, job_id)
+ assert job.filament_used_mm == pytest.approx(25000, abs=10)
+ assert job.filament_used_g == pytest.approx(200, abs=5)
+
+ # Cleanup
+ session.delete(job)
+ session.commit()
+
+ finally:
+ session.delete(spool)
+ session.commit()
+
+
+def test_remain_increase_bug(service, test_printer, test_material):
+ """Test 8: Bambu Lab Bug - remain-Wert steigt (soll ignoriert werden)"""
+ with Session(engine) as session:
+ spool = Spool(
+ material_id=test_material.id,
+ printer_id=test_printer.id,
+ ams_slot=2,
+ weight_full=1000.0,
+ weight_empty=200.0,
+ remain_percent=100.0
+ )
+ session.add(spool)
+ session.commit()
+ session.refresh(spool)
+
+ try:
+ # Start bei 67%
+ start_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "subtask_name": "remain_bug_test.3mf",
+ "layer_num": 0, # Noch kein Layer
+ "ams": {"tray_now": 2, "tray_tar": 2}
+ }
+ }
+
+ ams_start = [{
+ "trays": [{
+ "tray_id": 2,
+ "remain": 67,
+ "total_len": 330000
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=start_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_start
+ )
+ job_id = result["job_id"]
+
+ # Update: layer_num >= 1 erreicht, Filament-Tracking startet
+ update_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "layer_num": 1, # Erster Layer
+ "filament_used_mm": 1000.0, # Primärquelle
+ "ams": {"tray_now": 2, "tray_tar": 2}
+ }
+ }
+
+ ams_update = [{
+ "trays": [{
+ "tray_id": 2,
+ "remain": 67,
+ "total_len": 330000
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=update_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_update
+ )
+
+ # Prüfe: filament_start_mm sollte gesetzt sein
+ job_info = service.active_jobs.get("01S00TEST123")
+ assert job_info["filament_start_mm"] == 1000.0, "filament_start_mm sollte bei layer_num=1 gesetzt werden"
+ assert job_info["filament_started"] is True, "filament_started Flag sollte gesetzt sein"
+
+ # Update: remain STEIGT auf 72% (Bug in Bambu Lab Firmware)
+ update_payload2 = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "layer_num": 5,
+ "filament_used_mm": 2000.0, # Primärquelle weiterhin verfügbar
+ "ams": {"tray_now": 2, "tray_tar": 2}
+ }
+ }
+
+ ams_update2 = [{
+ "trays": [{
+ "tray_id": 2,
+ "remain": 72, # GESTIEGEN! (67 -> 72)
+ "total_len": 330000
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=update_payload2,
+ printer_id=test_printer.id,
+ ams_data=ams_update2
+ )
+
+ # Prüfe: Verbrauch sollte aus Primärquelle berechnet werden (2000 - 1000 = 1000mm)
+ # remain-Anstieg wird ignoriert, da Primärquelle verfügbar ist
+ job = session.get(Job, job_id)
+ assert job.filament_used_mm == pytest.approx(1000.0, abs=10), "Verbrauch sollte aus Primärquelle berechnet werden"
+
+ # Cleanup
+ session.delete(job)
+ session.commit()
+
+ finally:
+ session.delete(spool)
+ session.commit()
+
+
+def test_filament_start_at_layer_1(service, test_printer, test_spool):
+ """Test 9: Filament-Tracking startet erst bei layer_num >= 1"""
+ # Start Job (layer_num = 0)
+ start_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "subtask_name": "layer_test.3mf",
+ "layer_num": 0, # Noch kein Layer
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ ams_data = [{
+ "trays": [{
+ "tray_id": 0,
+ "remain": 100,
+ "total_len": 100000
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=start_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+ job_id = result["job_id"]
+
+ # Prüfe: filament_start_mm sollte noch None sein
+ job_info = service.active_jobs.get("01S00TEST123")
+ assert job_info["filament_start_mm"] is None, "filament_start_mm sollte bei layer_num=0 noch None sein"
+ assert job_info["filament_started"] is False, "filament_started sollte noch False sein"
+
+ # Update: layer_num >= 1 erreicht
+ update_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "layer_num": 1, # Erster Layer
+ "filament_used_mm": 5000.0, # Primärquelle
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=update_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+
+ # Prüfe: filament_start_mm sollte jetzt gesetzt sein
+ job_info = service.active_jobs.get("01S00TEST123")
+ assert job_info["filament_start_mm"] == 5000.0, "filament_start_mm sollte bei layer_num=1 gesetzt werden"
+ assert job_info["filament_started"] is True, "filament_started sollte True sein"
+
+ # Prüfe DB
+ with Session(engine) as session:
+ job = session.get(Job, job_id)
+ assert job.filament_start_mm == 5000.0, "filament_start_mm sollte in DB gespeichert sein"
+ assert job.filament_used_mm == 0.0, "Verbrauch sollte noch 0 sein (kein Delta)"
+ # Cleanup
+ session.delete(job)
+ session.commit()
+
+
+def test_filament_delta_calculation(service, test_printer, test_spool):
+ """Test 10: Delta-Berechnung ab layer_num >= 1"""
+ # Start Job
+ start_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "subtask_name": "delta_test.3mf",
+ "layer_num": 0,
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ ams_data = [{
+ "trays": [{
+ "tray_id": 0,
+ "remain": 100,
+ "total_len": 100000
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=start_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+ job_id = result["job_id"]
+
+ # Layer 1: Filament-Tracking startet
+ update1_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "layer_num": 1,
+ "filament_used_mm": 1000.0, # Startwert
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=update1_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+
+ # Layer 5: Verbrauch sollte Delta sein
+ update2_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "layer_num": 5,
+ "filament_used_mm": 5000.0, # Aktueller Wert
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=update2_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+
+ # Prüfe: Verbrauch sollte Delta sein (5000 - 1000 = 4000mm)
+ with Session(engine) as session:
+ job = session.get(Job, job_id)
+ assert job.filament_used_mm == pytest.approx(4000.0, abs=10), "Verbrauch sollte Delta sein (5000 - 1000)"
+ # Cleanup
+ session.delete(job)
+ session.commit()
+
+
+def test_filament_fallback_calculation(service, test_printer, test_spool):
+ """Test 11: Fallback-Berechnung wenn print.filament_used_mm fehlt"""
+ # Start Job
+ start_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "subtask_name": "fallback_test.3mf",
+ "layer_num": 0,
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ ams_data = [{
+ "trays": [{
+ "tray_id": 0,
+ "remain": 100,
+ "total_len": 100000 # 100m
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=start_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+ job_id = result["job_id"]
+
+ # Layer 1: Kein filament_used_mm, aber total_len vorhanden → Fallback
+ update_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "layer_num": 1,
+ # Kein filament_used_mm!
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=update_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+
+ # Prüfe: Fallback sollte verwendet werden
+ job_info = service.active_jobs.get("01S00TEST123")
+ assert job_info["filament_start_mm"] is not None, "filament_start_mm sollte aus Fallback berechnet sein"
+ assert job_info["using_fallback"] is True, "using_fallback Flag sollte gesetzt sein"
+ # Fallback: total_len * (1 - remain/100) = 100000 * (1 - 100/100) = 0
+ assert job_info["filament_start_mm"] == pytest.approx(0.0, abs=1), "Fallback sollte 0 sein bei 100% remain"
+
+ # Cleanup
+ with Session(engine) as session:
+ job = session.get(Job, job_id)
+ if job:
+ session.delete(job)
+ session.commit()
+
+
+def test_filament_switch_to_primary(service, test_printer, test_spool):
+ """Test 12: Wechsel von Fallback zu Primärquelle"""
+ # Start Job
+ start_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "subtask_name": "switch_test.3mf",
+ "layer_num": 0,
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ ams_data = [{
+ "trays": [{
+ "tray_id": 0,
+ "remain": 100,
+ "total_len": 100000
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=start_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+ job_id = result["job_id"]
+
+ # Layer 1: Fallback (kein filament_used_mm)
+ update1_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "layer_num": 1,
+ # Kein filament_used_mm
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=update1_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+
+ job_info = service.active_jobs.get("01S00TEST123")
+ assert job_info["using_fallback"] is True, "Fallback sollte aktiv sein"
+
+ # Layer 5: Primärquelle wird verfügbar
+ update2_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "layer_num": 5,
+ "filament_used_mm": 5000.0, # Primärquelle jetzt verfügbar
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=update2_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+
+ # Prüfe: Wechsel zur Primärquelle
+ job_info = service.active_jobs.get("01S00TEST123")
+ assert job_info["using_fallback"] is False, "Fallback sollte deaktiviert sein"
+ # Verbrauch sollte aus Primärquelle berechnet werden
+ with Session(engine) as session:
+ job = session.get(Job, job_id)
+ # Verbrauch = 5000 - filament_start_mm (aus Fallback)
+ assert job.filament_used_mm >= 0, "Verbrauch sollte berechnet sein"
+ # Cleanup
+ session.delete(job)
+ session.commit()
+
+
+def test_snapshot_restore_filament_state(test_printer, test_spool):
+ """Test 13: Snapshot-Restore übernimmt filament_start_mm und Flags"""
+ snapshot_path = Path("data/job_snapshots.json")
+ if snapshot_path.exists():
+ snapshot_path.unlink()
+
+ service = JobTrackingService()
+
+ # Start Job
+ start_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "subtask_name": "restore_test.3mf",
+ "layer_num": 0,
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ ams_data = [{
+ "trays": [{
+ "tray_id": 0,
+ "remain": 100,
+ "total_len": 100000
+ }]
+ }]
+
+ result = service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=start_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+ job_id = result["job_id"]
+
+ # Update: layer_num >= 1, Primärquelle vorhanden
+ update_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "layer_num": 1,
+ "mc_percent": 5,
+ "filament_used_mm": 1000.0,
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=update_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+
+ assert snapshot_path.exists()
+
+ # Simuliere Neustart: neuer Service
+ restored_service = JobTrackingService()
+ restore_payload = {
+ "print": {
+ "gcode_state": "PRINTING",
+ "layer_num": 2,
+ "mc_percent": 6,
+ "ams": {"tray_now": 0, "tray_tar": 0}
+ }
+ }
+
+ result = restored_service.process_message(
+ cloud_serial="01S00TEST123",
+ parsed_payload=restore_payload,
+ printer_id=test_printer.id,
+ ams_data=ams_data
+ )
+
+ assert result["status"] == "restored"
+ job_info = restored_service.active_jobs.get("01S00TEST123")
+ assert job_info is not None
+ assert job_info["filament_started"] is True
+ assert job_info["filament_start_mm"] == 1000.0
+ assert job_info["using_fallback"] is False
+
+ # Cleanup
+ with Session(engine) as session:
+ job = session.get(Job, job_id)
+ if job:
+ session.delete(job)
+ session.commit()
+ if snapshot_path.exists():
+ snapshot_path.unlink()
diff --git a/tests/test_low_coverage_quickwins.py b/tests/test_low_coverage_quickwins.py
new file mode 100644
index 0000000..0dd20e2
--- /dev/null
+++ b/tests/test_low_coverage_quickwins.py
@@ -0,0 +1,75 @@
+from fastapi.testclient import TestClient
+from uuid import uuid4
+
+from app.main import app
+from app.database import get_session
+from tests.helpers import TEST_ADMIN_PASSWORD
+
+
+def _fake_empty_printer_session():
+ class DummyQuery:
+ def filter(self, *args, **kwargs):
+ return self
+
+ def all(self):
+ return []
+
+ class DummySession:
+ def query(self, *args, **kwargs):
+ return DummyQuery()
+
+ yield DummySession()
+
+
+def test_admin_login_success_sets_cookie():
+ client = TestClient(app)
+ response = client.post("/api/admin/login", data={"password": TEST_ADMIN_PASSWORD})
+ assert response.status_code == 200
+ assert response.json().get("success") is True
+ assert response.cookies.get("admin_token")
+
+
+def test_admin_login_without_password_fails():
+ client = TestClient(app)
+ response = client.post("/api/admin/login", data={})
+ assert response.status_code == 401
+ assert response.json().get("success") is False
+
+
+def test_admin_greeting_requires_auth():
+ """Greeting-Endpoint ist jetzt öffentlich lesbar (bewusste Änderung)"""
+ client = TestClient(app)
+ response = client.get("/api/admin/greeting")
+ # GEÄNDERT: Greeting ist jetzt öffentlich lesbar (kein admin_required)
+ assert response.status_code == 200
+ assert "greeting_text" in response.json()
+
+
+def test_create_material_duplicate_name_returns_conflict():
+ client = TestClient(app)
+ name = f"PLA Black {uuid4().hex}"
+ payload = {
+ "name": name,
+ "brand": "QuickTest",
+ "density": 1.24,
+ "diameter": 1.75,
+ }
+ first = client.post("/api/materials/", json=payload)
+ assert first.status_code == 201
+ second = client.post("/api/materials/", json=payload)
+ assert second.status_code == 409
+ assert "Material existiert bereits" in second.json().get("detail", "")
+
+
+def test_mqtt_topic_suggest_fallback_when_no_printers(monkeypatch):
+ app.dependency_overrides[get_session] = _fake_empty_printer_session
+ client = TestClient(app)
+ try:
+ response = client.get("/api/mqtt/topics/suggest")
+ assert response.status_code == 200
+ data = response.json()
+ assert "bambu_lab" in data
+ assert "device/+/report" in data["bambu_lab"]
+ assert "common" in data and "#"
+ finally:
+ app.dependency_overrides.pop(get_session, None)
diff --git a/tests/test_mapper.py b/tests/test_mapper.py
new file mode 100644
index 0000000..a13eb81
--- /dev/null
+++ b/tests/test_mapper.py
@@ -0,0 +1,78 @@
+import os, sys, json, ssl
+import paho.mqtt.client as mqtt
+
+# Projektroot einbinden
+ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
+sys.path.insert(0, ROOT)
+
+from app.services.universal_mapper import UniversalMapper
+
+# ==== HIER DEINE DRUCKERDATEN EINTRAGEN ====
+PRINTER_IP = "192.168.178.41" # <- deine X1C IP
+USERNAME = "bblp" # <- fest
+API_KEY = "" # <- dein Key
+MODEL = "X1C"
+# ===========================================
+# Datei, in die wir die erste Payload speichern
+OUTPUT_FILE = os.path.join("tests", "fixtures", "x1c_idle.json")
+
+mapper = UniversalMapper(MODEL)
+_received_once = False
+
+
+def on_connect(client, userdata, flags, rc):
+ print("[MQTT] Verbunden, rc =", rc)
+ client.subscribe("device/+/report")
+ print("[MQTT] Warte auf erste Live-Daten...")
+
+
+def on_message(client, userdata, msg):
+ global _received_once
+ if _received_once:
+ return # nur erste Message interessiert uns
+
+ _received_once = True
+
+ print("\n=== ERSTE RAW PAYLOAD ===")
+ try:
+ raw = json.loads(msg.payload.decode("utf-8"))
+ except Exception as e:
+ print("[ERROR] JSON decode:", e)
+ client.disconnect()
+ return
+
+ # Schön anzeigen
+ print(json.dumps(raw, indent=2, ensure_ascii=False))
+
+ # In Datei speichern
+ os.makedirs(os.path.dirname(OUTPUT_FILE), exist_ok=True)
+ with open(OUTPUT_FILE, "w", encoding="utf-8") as f:
+ json.dump(raw, f, indent=2, ensure_ascii=False)
+ print(f"\n[INFO] Payload in {OUTPUT_FILE} gespeichert.")
+
+ # Direkt auch gemappte Daten anzeigen
+ mapped = mapper.map(raw).to_dict()
+ print("\n=== GEMAPPTE DATEN (PrinterData) ===")
+ print(json.dumps(mapped, indent=2, ensure_ascii=False))
+
+ # Verbindung beenden – wir haben, was wir brauchen
+ client.disconnect()
+ print("[MQTT] Beende nach erster Nachricht.")
+
+
+def main():
+ client = mqtt.Client(protocol=mqtt.MQTTv311)
+ client.username_pw_set(USERNAME, API_KEY)
+ client.tls_set(cert_reqs=ssl.CERT_NONE)
+ client.tls_insecure_set(True)
+
+ client.on_connect = on_connect
+ client.on_message = on_message
+
+ print(f"[CONNECT] Verbinde zu {PRINTER_IP}:8883 ...")
+ client.connect(PRINTER_IP, 8883, 60)
+ client.loop_forever()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tests/test_mapper_generic.py b/tests/test_mapper_generic.py
new file mode 100644
index 0000000..f295841
--- /dev/null
+++ b/tests/test_mapper_generic.py
@@ -0,0 +1,9 @@
+from app.services.universal_mapper import UniversalMapper
+
+
+def test_mapper_generic_extra():
+ data = {"foo": "bar", "baz": 123}
+ mapper = UniversalMapper("UNKNOWN")
+ pd = mapper.map(data)
+ assert pd.extra.get("foo") == "bar"
+ assert pd.extra.get("baz") == 123
diff --git a/tests/test_mapper_klipper.py b/tests/test_mapper_klipper.py
new file mode 100644
index 0000000..a901823
--- /dev/null
+++ b/tests/test_mapper_klipper.py
@@ -0,0 +1,27 @@
+from app.services.universal_mapper import UniversalMapper
+
+
+def test_mapper_klipper_basic():
+ data = {
+ "status": {
+ "print_stats": {"state": "printing", "progress": 0.5, "filename": "test.gcode", "print_duration": 120},
+ "heater_bed": {"temperature": 60},
+ "extruder": {"temperature": 215},
+ "fan": {"speed": 0.8},
+ "display_status": {"layer": 3, "total_layer": 10, "estimated_time_remaining": 600},
+ "temperature_sensor": {"chamber": {"temperature": 35}},
+ }
+ }
+ mapper = UniversalMapper("KLIPPER")
+ pd = mapper.map(data)
+
+ assert pd.state == "printing"
+ assert pd.progress == 50.0
+ assert pd.temperature["nozzle"] == 215
+ assert pd.temperature["bed"] == 60
+ assert pd.temperature["chamber"] == 35
+ assert pd.fan["part_cooling"] == 80.0
+ assert pd.layer["current"] == 3
+ assert pd.layer["total"] == 10
+ assert pd.job["file"] == "test.gcode"
+ assert pd.job["time_remaining"] == 600
diff --git a/tests/test_mapper_live_x1c.py b/tests/test_mapper_live_x1c.py
new file mode 100644
index 0000000..502e033
--- /dev/null
+++ b/tests/test_mapper_live_x1c.py
@@ -0,0 +1,35 @@
+import json
+from pathlib import Path
+
+import pytest
+
+from app.services.universal_mapper import UniversalMapper
+
+SAMPLE_DIR = Path("tests/live_samples")
+SAMPLES = [
+ "x1c_idle.json",
+ "x1c_heating.json",
+ "x1c_printing.json",
+ "x1c_pause.json",
+ "x1c_finish.json",
+]
+
+
+@pytest.mark.parametrize("filename", SAMPLES)
+def test_live_sample_mapping(filename):
+ sample_path = SAMPLE_DIR / filename
+ if not sample_path.exists():
+ pytest.skip(f"Live sample missing: {filename}")
+
+ data = json.loads(sample_path.read_text(encoding="utf-8"))
+ mapper = UniversalMapper("X1C")
+ pd = mapper.map(data)
+
+ assert pd.state is not None
+ assert isinstance(pd.progress, (float, type(None)))
+ assert isinstance(pd.temperature.get("nozzle"), (float, type(None)))
+ assert isinstance(pd.temperature.get("bed"), (float, type(None)))
+ assert isinstance(pd.temperature.get("chamber"), (float, type(None)))
+ assert isinstance(pd.layer.get("current"), (int, type(None)))
+ assert isinstance(pd.layer.get("total"), (int, type(None)))
+ assert isinstance(pd.job.get("file"), (str, type(None)))
diff --git a/tests/test_mapper_x1c_idle.py b/tests/test_mapper_x1c_idle.py
new file mode 100644
index 0000000..a280e1d
--- /dev/null
+++ b/tests/test_mapper_x1c_idle.py
@@ -0,0 +1,38 @@
+import json
+from pathlib import Path
+
+from app.services.universal_mapper import UniversalMapper
+
+
+def test_mapper_x1c_idle_fixture():
+ fixture = Path("tests/fixtures/x1c_idle.json")
+ data = json.loads(fixture.read_text(encoding="utf-8"))
+
+ mapper = UniversalMapper("X1C")
+ pd = mapper.map(data)
+
+ # PrinterData darf NIE komplett None sein
+ assert pd is not None
+
+ # State sollte immer gesetzt sein beim X1C
+ assert pd.state is not None
+
+ # Progress darf None oder float sein
+ assert isinstance(pd.progress, (float, type(None)))
+
+ # Temperaturprüfung robust: nur prüfen, wenn JSON Werte enthält
+ raw_nozzle = (
+ data.get("nozzle_temper")
+ or data.get("nozzle_temp")
+ or data.get("extruder_temp")
+ or data.get("extruder", {}).get("temp")
+ )
+ if raw_nozzle is not None:
+ assert pd.temperature["nozzle"] == float(raw_nozzle)
+ else:
+ # Falls JSON gar keine Temperatur liefert → darf Mapper None oder float setzen
+ assert isinstance(pd.temperature["nozzle"], (float, type(None)))
+
+ # Layer Daten müssen existieren, aber dürfen 0 sein
+ assert "current" in pd.layer
+ assert "total" in pd.layer
diff --git a/tests/test_migration_smoke.py b/tests/test_migration_smoke.py
new file mode 100644
index 0000000..8c65fa0
--- /dev/null
+++ b/tests/test_migration_smoke.py
@@ -0,0 +1,13 @@
+import pytest
+from app import database
+
+
+def test_db_engine_connectable():
+ # Simple smoke test: engine can be connected and sqlite_master queried
+ try:
+ with database.engine.connect() as conn:
+ res = conn.exec_driver_sql("SELECT name FROM sqlite_master WHERE type='table' LIMIT 1").fetchone()
+ # Connection ok if query returns (even if no tables exist yet)
+ assert res is None or isinstance(res[0] if res else None, str) or res is None
+ except Exception as exc:
+ pytest.fail(f"DB engine not connectable: {exc}")
diff --git a/tests/test_mqtt_payload_processor.py b/tests/test_mqtt_payload_processor.py
new file mode 100644
index 0000000..f8554ee
--- /dev/null
+++ b/tests/test_mqtt_payload_processor.py
@@ -0,0 +1,102 @@
+import json
+
+from types import SimpleNamespace
+
+import pytest
+
+from app.services import mqtt_payload_processor as processor
+
+
+class FakeMapper:
+ def __init__(self, model_name):
+ self.model_name = model_name
+
+ def map(self, data):
+ class MapperResult:
+ def __init__(self, model_name):
+ self.model_name = model_name
+
+ def to_dict(self):
+ return {"model": self.model_name, "payload": data}
+
+ return MapperResult(self.model_name)
+
+
+class FakePrinterAutoDetector:
+ MODEL = "FAKE"
+
+ @staticmethod
+ def detect_model_from_payload(payload):
+ return payload.get("meta", {}).get("model") or FakePrinterAutoDetector.MODEL
+
+ @staticmethod
+ def detect_model_from_serial(serial):
+ return serial and "AUTO" or None
+
+ @staticmethod
+ def detect_capabilities(payload):
+ return {"capability": payload.get("meta", {}).get("capability", "basic")}
+
+
+def _patch_base(monkeypatch):
+ monkeypatch.setattr(processor, "UniversalMapper", FakeMapper)
+ monkeypatch.setattr(processor, "PrinterAutoDetector", FakePrinterAutoDetector)
+ monkeypatch.setattr(processor, "parse_ams", lambda payload: payload.get("ams", []))
+ monkeypatch.setattr(processor, "parse_job", lambda payload: payload.get("job", {}))
+
+
+def test_process_report_payload(monkeypatch):
+ _patch_base(monkeypatch)
+ topic = "device/SERIAL123/report"
+ payload = json.dumps(
+ {
+ "meta": {"model": "BAMBU", "capability": "advanced"},
+ "ams": [{"slot": 1}],
+ "job": {"status": "completed"},
+ }
+ )
+
+ result = processor.process_mqtt_payload(topic, payload)
+
+ assert result["serial"] == "SERIAL123"
+ assert result["raw"]["job"]["status"] == "completed"
+ assert result["ams"][0]["slot"] == 1
+ assert result["mapped_dict"]["model"] == "BAMBU"
+ assert result["capabilities"]["capability"] == "advanced"
+
+
+def test_process_non_report_topic_avoids_ams_job(monkeypatch):
+ _patch_base(monkeypatch)
+ topic = "device/SERIAL123/status"
+ payload = json.dumps({"meta": {"model": "X1C"}})
+
+ result = processor.process_mqtt_payload(topic, payload)
+
+ assert result["serial"] == "SERIAL123"
+ assert result["ams"] == []
+ assert result["job"] == {}
+ assert result["mapped_dict"]["model"] == "X1C"
+
+
+def test_process_invalid_json_returns_defaults(monkeypatch):
+ _patch_base(monkeypatch)
+ topic = "device/UNKNOWN/report"
+ result = processor.process_mqtt_payload(topic, "not-json")
+
+ assert result["raw"] is None
+ assert result["ams"] == []
+ assert result["job"] == {}
+ assert result["mapped"] is None
+ assert result["mapped_dict"] is None
+
+
+def test_process_map_failure_returns_none(monkeypatch):
+ _patch_base(monkeypatch)
+ monkeypatch.setattr(processor, "UniversalMapper", lambda model: SimpleNamespace(map=lambda data: (_ for _ in ()).throw(ValueError("boom"))))
+ topic = "device/NOPE/report"
+ payload = json.dumps({})
+
+ result = processor.process_mqtt_payload(topic, payload)
+
+ assert result["mapped"] is None
+ assert result["mapped_dict"] is None
diff --git a/tests/test_mqtt_routes.py b/tests/test_mqtt_routes.py
new file mode 100644
index 0000000..36f8c22
--- /dev/null
+++ b/tests/test_mqtt_routes.py
@@ -0,0 +1,144 @@
+import builtins
+from datetime import datetime
+
+from fastapi.testclient import TestClient
+
+from app.main import app
+from app.routes import mqtt_routes as mr
+
+client = TestClient(app)
+
+
+class DummyClient:
+ def __init__(self, *args, **kwargs):
+ self._connected = False
+ self.userdata = None
+
+ def user_data_set(self, data):
+ self.userdata = data
+
+ def tls_set(self, **kwargs):
+ pass
+
+ def tls_insecure_set(self, value):
+ pass
+
+ def username_pw_set(self, username, password=None):
+ pass
+
+ def connect(self, broker, port, keepalive=60):
+ self._connected = True
+
+ def loop_start(self):
+ pass
+
+ def loop_stop(self):
+ pass
+
+ def disconnect(self):
+ self._connected = False
+
+ def is_connected(self):
+ return self._connected
+
+ def subscribe(self, topic):
+ return (0, 1)
+
+ def unsubscribe(self, topic):
+ pass
+
+ def publish(self, topic, payload, qos=0):
+ return type("R", (), {"rc": 0})
+
+
+class DummyProtocolDetector:
+ def detect(self, broker, password, port):
+ return {"detected": False}
+
+
+def _patch_mqtt(monkeypatch):
+ monkeypatch.setattr(mr.mqtt, "Client", DummyClient)
+ monkeypatch.setattr(mr, "MQTTProtocolDetector", DummyProtocolDetector)
+ monkeypatch.setattr(mr.mqtt_runtime, "register_subscription", lambda topic: None)
+ monkeypatch.setattr(mr.mqtt_runtime, "clear_subscriptions", lambda: None)
+ monkeypatch.setattr(mr.mqtt_runtime, "unregister_subscription", lambda topic: None)
+
+
+def test_mqtt_connect_and_flow(monkeypatch):
+ mr.mqtt_clients.clear()
+ _patch_mqtt(monkeypatch)
+ resp = client.post("/api/mqtt/connect", json={"broker": "127.0.0.1", "port": 1883, "use_tls": False})
+ assert resp.status_code == 200
+ connection_id = resp.json()["connection_id"]
+
+ status = client.get("/api/mqtt/status")
+ assert status.status_code == 200
+ assert status.json()["active_connections"] == 1
+
+ sub = client.post("/api/mqtt/subscribe", json={"topic": "device/test/report"})
+ assert sub.status_code == 200
+ assert sub.json()["topic"] == "device/test/report"
+
+ pub = client.post("/api/mqtt/publish", params={"topic": "device/test/report", "payload": "hello"})
+ assert pub.status_code == 200
+
+ unsub = client.post("/api/mqtt/unsubscribe", json={"topic": "device/test/report"})
+ assert unsub.status_code == 200
+
+ client.post(f"/api/mqtt/disconnect?broker=127.0.0.1&port=1883")
+ assert connection_id not in mr.mqtt_clients
+
+
+def test_subscribe_without_connection_returns_error():
+ mr.mqtt_clients.clear()
+ resp = client.post("/api/mqtt/subscribe", json={"topic": "device/test"})
+ assert resp.status_code == 400
+ assert "No active MQTT connection" in resp.json()["detail"]
+
+
+def test_publish_without_connection_returns_error():
+ mr.mqtt_clients.clear()
+ resp = client.post("/api/mqtt/publish", params={"topic": "device/test", "payload": "hi"})
+ assert resp.status_code == 400
+ assert "No active MQTT connection" in resp.json()["detail"]
+
+
+def test_clear_message_buffer_endpoint():
+ mr.message_buffer.clear()
+ mr.message_buffer.append(mr.MQTTMessage(topic="device/test", payload="payload", timestamp=datetime.utcnow().isoformat()))
+ resp = client.post("/api/mqtt/clear-buffer")
+ assert resp.status_code == 200
+ assert resp.json()["success"]
+ assert mr.message_buffer == []
+
+
+def test_get_messages_endpoint():
+ resp = client.get("/api/mqtt/messages", params={"limit": 5})
+ assert resp.status_code == 200
+ assert "messages" in resp.json()
+ assert isinstance(resp.json()["total"], int)
+
+
+def test_mqtt_status_returns_counts():
+ resp = client.get("/api/mqtt/status")
+ assert resp.status_code == 200
+ assert "active_connections" in resp.json()
+ assert isinstance(resp.json()["active_connections"], int)
+
+
+def test_suggest_topics_returns_structure():
+ resp = client.get("/api/mqtt/topics/suggest")
+ assert resp.status_code == 200
+ body = resp.json()
+ assert "bambu_lab" in body
+ assert "common" in body
+
+
+def test_get_mqtt_logs_handles_missing_file(monkeypatch):
+ def fake_open(*args, **kwargs):
+ raise FileNotFoundError()
+
+ monkeypatch.setattr("builtins.open", fake_open)
+ resp = client.get("/api/mqtt/logs")
+ assert resp.status_code == 200
+ assert "Noch keine MQTT" in resp.text
diff --git a/tests/test_notification_routes.py b/tests/test_notification_routes.py
new file mode 100644
index 0000000..62aadf5
--- /dev/null
+++ b/tests/test_notification_routes.py
@@ -0,0 +1,102 @@
+import pytest
+
+from fastapi import HTTPException
+from fastapi.testclient import TestClient
+from sqlmodel import Session, delete
+
+from app.database import engine
+from app.main import app
+from app.models.settings import Setting
+from app.routes import notification_routes as nr
+from app.routes.notification_routes import DEFAULT_NOTIFICATIONS
+
+client = TestClient(app)
+
+
+def cleanup_notifications():
+ with Session(engine) as session:
+ session.exec(delete(Setting).where(Setting.key == "notifications_config"))
+ session.commit()
+
+
+def test_get_notifications_config_defaults(tmp_path):
+ cleanup_notifications()
+ resp = client.get("/api/notifications-config")
+ assert resp.status_code == 200
+ data = resp.json()
+ assert data["notifications"][0]["id"] == DEFAULT_NOTIFICATIONS[0]["id"]
+ cleanup_notifications()
+
+
+def test_save_notifications_config_validation():
+ cleanup_notifications()
+ resp = client.post("/api/notifications-config", json="not-a-list")
+ assert resp.status_code == 400
+ assert "Ungültiges Format" in resp.json()["detail"]
+ cleanup_notifications()
+
+
+def test_save_notifications_config_persists():
+ cleanup_notifications()
+ payload = {"notifications": [{"id": "custom", "message": "ok"}]}
+ resp = client.post("/api/notifications-config", json=payload)
+ assert resp.status_code == 200
+ assert resp.json()["notifications"][0]["id"] == "custom"
+ cleanup_notifications()
+
+
+def test_trigger_notification_success(monkeypatch):
+ cleanup_notifications()
+ payload = {"notifications": [{"id": "alert", "message": "msg"}]}
+ client.post("/api/notifications-config", json=payload)
+
+ sent = []
+ async def fake_broadcast(notification):
+ sent.append(notification["id"])
+
+ monkeypatch.setattr(nr, "broadcast_notification", fake_broadcast)
+ resp = client.post("/api/notifications-trigger", json={"id": "alert"})
+ assert resp.status_code == 200
+ assert resp.json()["success"]
+ assert sent == ["alert"]
+ cleanup_notifications()
+
+
+def test_trigger_notification_missing():
+ cleanup_notifications()
+ resp = client.post("/api/notifications-trigger", json={})
+ assert resp.status_code == 400
+ assert "Notification id fehlt" in resp.json()["detail"]
+ cleanup_notifications()
+
+
+def test_trigger_notification_disabled(monkeypatch):
+ cleanup_notifications()
+ payload = {"notifications": [{"id": "alert", "message": "msg", "enabled": False}]}
+ client.post("/api/notifications-config", json=payload)
+ resp = client.post("/api/notifications-trigger", json={"id": "alert"})
+ assert resp.status_code == 400
+ assert "deaktiviert" in resp.json()["detail"]
+ cleanup_notifications()
+
+
+def test_validate_notifications_rejects_invalid_format():
+ with pytest.raises(HTTPException):
+ nr._validate_notifications("not a list")
+
+
+def test_validate_notifications_requires_id_and_message():
+ with pytest.raises(HTTPException):
+ nr._validate_notifications({"notifications": [{"id": "", "message": ""}]})
+
+
+def test_ensure_notification_config_resets_on_corrupted_value():
+ cleanup_notifications()
+ with Session(engine) as session:
+ session.add(Setting(key="notifications_config", value="not-json"))
+ session.commit()
+
+ resp = client.get("/api/notifications-config")
+ assert resp.status_code == 200
+ assert resp.json()["notifications"][0]["id"] == DEFAULT_NOTIFICATIONS[0]["id"]
+ cleanup_notifications()
diff --git a/tests/test_printer_data.py b/tests/test_printer_data.py
new file mode 100644
index 0000000..c03ed47
--- /dev/null
+++ b/tests/test_printer_data.py
@@ -0,0 +1,22 @@
+from app.services.printer_data import PrinterData
+
+
+def test_printer_data_has_ams_units():
+ pd = PrinterData()
+ assert hasattr(pd, "ams_units")
+ assert isinstance(pd.ams_units, list)
+
+
+def test_to_dict_serializable():
+ pd = PrinterData()
+ pd.ams_units.append({"ams_id": 0, "trays": []})
+ d = pd.to_dict()
+ assert "ams_units" in d
+ assert isinstance(d["ams_units"], list)
+
+
+def test_trays_structure_list():
+ pd = PrinterData()
+ pd.ams_units = [{"trays": [1, 2, 3]}]
+ d = pd.to_dict()
+ assert isinstance(d["ams_units"][0]["trays"], list)
diff --git a/tests/test_scanner_printer_routes.py b/tests/test_scanner_printer_routes.py
new file mode 100644
index 0000000..0fa04de
--- /dev/null
+++ b/tests/test_scanner_printer_routes.py
@@ -0,0 +1,196 @@
+import uuid
+
+from fastapi.testclient import TestClient
+
+from app.routes import scanner_routes as sr
+from app.routes import printers as pr
+from app.main import app
+from tests.helpers import TEST_ADMIN_PASSWORD
+
+
+client = TestClient(app)
+
+
+class DummySocketBase:
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def bind(self, addr):
+ pass
+
+ def listen(self, backlog=0):
+ pass
+
+ def settimeout(self, timeout):
+ pass
+
+ def setblocking(self, flag):
+ pass
+
+ def setsockopt(self, *args, **kwargs):
+ pass
+
+ def fileno(self):
+ return 0
+
+ def close(self):
+ pass
+
+ def connect(self, target):
+ pass
+
+ def connect_ex(self, target):
+ return 1
+
+ def getsockname(self):
+ return ("127.0.0.1", 0)
+def test_scan_network_returns_printer(monkeypatch):
+ async def fake_scan_host(ip, ports, timeout):
+ return sr.PrinterInfo(
+ ip=ip,
+ hostname="test",
+ type="bambu",
+ port=6000,
+ accessible=True,
+ response_time=0.1,
+ )
+
+ class FakeNetwork:
+ def __init__(self, hosts):
+ self._hosts = hosts
+
+ def hosts(self):
+ return iter(self._hosts)
+
+ monkeypatch.setattr(sr, "scan_host", fake_scan_host)
+ monkeypatch.setattr(sr.ipaddress, "ip_network", lambda rng, strict=False: FakeNetwork(["1.2.3.4"]))
+ resp = client.post("/api/scanner/scan/network", json={"ip_range": "1.2.3.4/32", "ports": [6000]})
+ assert resp.status_code == 200
+ assert resp.json()["found_printers"] == 1
+ assert resp.json()["printers"][0]["ip"] == "1.2.3.4"
+
+
+def test_scan_network_handles_error(monkeypatch):
+ monkeypatch.setattr(sr.ipaddress, "ip_network", lambda rng, strict=False: (_ for _ in ()).throw(ValueError("bad")))
+ resp = client.post("/api/scanner/scan/network", json={"ip_range": "bad"})
+ assert resp.status_code == 500
+ assert "Scan Fehler" in resp.json()["detail"]
+
+
+def test_generate_config_creates_suggestions():
+ printers = [{"ip": "10.0.0.1", "type": "bambu", "hostname": "alpha"}, {"ip": "10.0.0.2", "type": "klipper"}]
+ resp = client.post("/api/scanner/generate/config", json=printers)
+ assert resp.status_code == 200
+ body = resp.json()
+ assert body["success"]
+ assert body["count"] == 2
+ assert any(s["type"] == "bambu" for s in body["suggestions"])
+
+
+def test_network_info_returns_expected_fields():
+ resp = client.get("/api/scanner/network/info")
+ assert resp.status_code == 200
+ body = resp.json()
+ assert "local_ip" in body
+ assert "hostname" in body
+
+
+def test_fingerprint_requires_host(monkeypatch):
+ resp = client.post("/api/debug/printer/fingerprint", json={"host": ""})
+ assert resp.status_code == 400
+ assert "host required" in resp.json()["detail"]
+
+
+def test_fingerprint_detects_types(monkeypatch):
+ def fake_fingerprint(host, port, timeout):
+ return {"reachable": True, "error_class": "ok", "message": "ok", "latency_ms": 5}
+
+ monkeypatch.setattr(sr, "_fingerprint_port", fake_fingerprint)
+ resp = client.post("/api/debug/printer/fingerprint", json={"host": "1.2.3.4", "port": 6000, "timeout_ms": 500})
+ assert resp.status_code == 200
+ assert resp.json()["detected_type"] in {"bambu", "unkown"}
+
+
+def test_test_connection_reports_status(monkeypatch):
+ async def fake_check_port(ip, port, timeout):
+ return ip == "1.2.3.4" and port == 6000
+
+ monkeypatch.setattr(sr, "check_port", fake_check_port)
+ resp = client.get("/api/scanner/test/connection", params={"ip": "1.2.3.4", "port": 6000})
+ assert resp.status_code == 200
+ assert resp.json()["success"]
+
+
+def test_printer_lifecycle(monkeypatch):
+ payload = {
+ "name": "test printer",
+ "printer_type": "bambu",
+ "ip_address": "10.0.0.10",
+ "cloud_serial": "serial",
+ "api_key": "apikey",
+ }
+ create_resp = client.post("/api/printers/", json=payload)
+ assert create_resp.status_code == 200
+ printer = create_resp.json()
+ assert printer["status"] == "created"
+
+ cred_resp = client.get(f"/api/printers/{printer['id']}/credentials")
+ assert cred_resp.status_code == 200
+ assert cred_resp.json()["name"] == "test printer"
+
+ update_resp = client.put(f"/api/printers/{printer['id']}", json={**payload, "name": "updated"})
+ assert update_resp.status_code == 200
+ assert update_resp.json()["name"] == "updated"
+
+ delete_resp = client.delete(f"/api/printers/{printer['id']}")
+ assert delete_resp.status_code == 200
+
+
+def test_printer_connection_manual_type():
+ payload = {
+ "name": "manual",
+ "printer_type": "manual",
+ "ip_address": "127.0.0.1",
+ }
+ resp = client.post("/api/printers/", json=payload)
+ assert resp.status_code == 200
+ printer = resp.json()
+ conn_resp = client.post(f"/api/printers/{printer['id']}/test")
+ assert conn_resp.status_code == 200
+ assert conn_resp.json()["status"] == "info"
+
+
+def _make_bambu_payload():
+ serial = uuid.uuid4().hex[:8]
+ octets = [
+ str((int(serial[i : i + 2], 16) % 254) + 1) for i in range(0, 6, 2)
+ ]
+ ip_address = f"10.{octets[0]}.{octets[1]}.{octets[2]}"
+ return {
+ "name": f"bambu-{serial}",
+ "printer_type": "bambu",
+ "ip_address": ip_address,
+ "cloud_serial": serial,
+ "api_key": f"key-{serial}",
+ }
+
+
+def test_get_printer_not_found():
+ resp = client.get("/api/printers/nonexistent-uuid")
+ assert resp.status_code == 404
+
+
+def test_create_printer_duplicate_returns_exists():
+ payload = _make_bambu_payload()
+ first_resp = client.post("/api/printers/", json=payload)
+ assert first_resp.status_code == 200
+ assert first_resp.json()["status"] == "created"
+ printer_id = first_resp.json()["id"]
+
+ duplicate_resp = client.post("/api/printers/", json=payload.copy())
+ assert duplicate_resp.status_code == 200
+ assert duplicate_resp.json()["status"] == "exists"
+ assert duplicate_resp.json()["id"] == printer_id
+
+ delete_resp = client.delete(f"/api/printers/{printer_id}")
+ assert delete_resp.status_code == 200
diff --git a/tests/test_service_routes_api.py b/tests/test_service_routes_api.py
new file mode 100644
index 0000000..df04e7c
--- /dev/null
+++ b/tests/test_service_routes_api.py
@@ -0,0 +1,128 @@
+from fastapi.testclient import TestClient
+from unittest.mock import patch
+from app.main import app
+from app.routes import service_routes
+
+
+def test_install_dependencies_returns_success_when_run_command_mocked():
+ """Verifies POST /api/services/dependencies/install returns CommandResult JSON
+ with success True when internal run_command is mocked.
+ """
+ mock_result = service_routes.CommandResult(
+ success=True, message="Mocked install", output="ok", exit_code=0
+ )
+
+ with patch('app.routes.service_routes.run_command', return_value=mock_result):
+ client = TestClient(app)
+ resp = client.post('/api/services/dependencies/install')
+
+ assert resp.status_code == 200
+ data = resp.json()
+ assert isinstance(data, dict)
+ # Required fields for global message/toast system
+ assert 'success' in data
+ assert 'message' in data
+ assert data['success'] is True
+ assert data['message'] == 'Mocked install'
+
+
+def test_install_dependencies_handles_run_command_exception_gracefully():
+ """Wenn `run_command` eine Exception wirft, sollte der Endpoint
+ ein stabiles Fehler-JSON liefern (oder mindestens einen HTTP-Fehler
+ mit JSON-Detail). Fokus: Schema und verständliche Fehlermeldung.
+ """
+ with patch('app.routes.service_routes.run_command', side_effect=RuntimeError('Test failure')):
+ client = TestClient(app)
+ try:
+ resp = client.post('/api/services/dependencies/install')
+ except RuntimeError as exc:
+ # Laufzeit-Fehler wird direkt weitergeworfen - akzeptiere und prüfe Inhalt
+ assert 'Test failure' in str(exc)
+ return
+
+ # Falls kein Exception-Throw, akzeptiere entweder ein 200 mit CommandResult-artigem Fehlerpayload
+ # oder ein Fehlerstatus mit JSON-Detail. Wichtig: keine echten Kommandos.
+ assert resp.headers.get('content-type', '').startswith('application/json')
+ if resp.status_code == 200:
+ data = resp.json()
+ assert isinstance(data, dict)
+ assert 'success' in data
+ assert data['success'] is False
+ assert 'message' in data
+ assert 'Test failure' in str(data['message'])
+ else:
+ data = resp.json()
+ assert 'detail' in data or 'message' in data
+ # Ensure the error mentions the mocked failure somewhere
+ assert 'Test failure' in str(data.get('detail', '')) or 'Test failure' in str(data.get('message', ''))
+
+
+def test_install_dependencies_propagates_command_error_result():
+ """Wenn `run_command` ein Fehler-Result zurückgibt, soll das
+ unverändert durchgereicht werden (success False, message bleibt).
+ """
+ mock_result = service_routes.CommandResult(
+ success=False, message="Fehlertext", output="err", exit_code=1
+ )
+
+ with patch('app.routes.service_routes.run_command', return_value=mock_result):
+ client = TestClient(app)
+ resp = client.post('/api/services/dependencies/install')
+
+ assert resp.status_code == 200
+ data = resp.json()
+ assert isinstance(data, dict)
+ assert data.get('success') is False
+ assert data.get('message') == 'Fehlertext'
+
+
+# -----------------------------
+# Tests for docker compose up (uses run_command)
+# -----------------------------
+def test_docker_compose_up_success():
+ """run_command returns success → endpoint returns same success True/message"""
+ mock_result = service_routes.CommandResult(success=True, message="OK", output="v", exit_code=0)
+
+ with patch('app.routes.service_routes.run_command', return_value=mock_result):
+ client = TestClient(app)
+ resp = client.post('/api/services/docker/compose/up')
+
+ assert resp.status_code == 200
+ data = resp.json()
+ assert isinstance(data, dict)
+ assert data.get('success') is True
+ assert 'message' in data
+
+
+def test_docker_compose_up_command_error_propagated():
+ """run_command returns success=False → endpoint returns success False and passes message through"""
+ mock_result = service_routes.CommandResult(success=False, message="Fehler", output="err", exit_code=1)
+
+ with patch('app.routes.service_routes.run_command', return_value=mock_result):
+ client = TestClient(app)
+ resp = client.post('/api/services/docker/compose/up')
+
+ assert resp.status_code == 200
+ data = resp.json()
+ assert isinstance(data, dict)
+ assert data.get('success') is False
+ assert data.get('message') == 'Fehler'
+
+
+def test_docker_compose_up_handles_run_command_exception():
+ """Wenn run_command eine Exception wirft, Endpoint gibt stabilen Fehler zurück oder Exception propagiert (beide akzeptiert)"""
+ with patch('app.routes.service_routes.run_command', side_effect=RuntimeError('boom')):
+ client = TestClient(app)
+ try:
+ resp = client.post('/api/services/docker/compose/up')
+ except RuntimeError as exc:
+ assert 'boom' in str(exc)
+ return
+
+ # Falls kein Exception-Throw, prüfe JSON-Antwort
+ assert resp.headers.get('content-type', '').startswith('application/json')
+ data = resp.json()
+ assert isinstance(data, dict)
+ # Preferierte Form: success False + message enthält Fehler
+ assert data.get('success') is False
+ assert 'boom' in str(data.get('message', ''))
diff --git a/tests/test_service_routes_unit.py b/tests/test_service_routes_unit.py
new file mode 100644
index 0000000..8e7a475
--- /dev/null
+++ b/tests/test_service_routes_unit.py
@@ -0,0 +1,27 @@
+import os
+import tempfile
+from app.routes import service_routes
+
+
+def test_make_test_db_path_is_in_temp_and_unique():
+ p1 = service_routes.make_test_db_path()
+ p2 = service_routes.make_test_db_path()
+
+ # Both paths should be inside the system temp directory
+ tempdir = tempfile.gettempdir()
+ assert os.path.commonpath([tempdir, p1]) == tempdir
+ assert os.path.commonpath([tempdir, p2]) == tempdir
+
+ # Paths should be different (unique per call)
+ assert p1 != p2
+
+
+def test_create_test_response_fields():
+ r = service_routes.create_test_response(status="ok", message="All good")
+ assert r["status"] == "ok"
+ assert r["message"] == "All good"
+ assert "timestamp" in r
+
+ r2 = service_routes.create_test_response(status="fail", message="Bad", details="stacktrace")
+ assert r2["status"] == "fail"
+ assert r2["details"] == "stacktrace"
diff --git a/tests/test_smoke_crud.py b/tests/test_smoke_crud.py
new file mode 100644
index 0000000..1b6fe68
--- /dev/null
+++ b/tests/test_smoke_crud.py
@@ -0,0 +1,91 @@
+
+from fastapi.testclient import TestClient
+from app.main import app
+import uuid
+
+client = TestClient(app)
+
+def get_unique_material_data():
+ return {
+ "name": f"TestPLA_{uuid.uuid4().hex[:8]}",
+ "type": "PLA",
+ "color": "Rot",
+ "manufacturer": "TestMaker",
+ "density": 1.24,
+ "diameter": 1.75
+ }
+
+def get_unique_spool_data():
+ return {
+ "weight": 1000,
+ "external_id": f"spool-{uuid.uuid4().hex[:8]}",
+ "printer_slot": "AMS-2",
+ "manufacturer": "TestMaker",
+ "color": "Rot"
+ }
+
+material_data = get_unique_material_data()
+spool_data = get_unique_spool_data()
+
+material_id = None
+spool_id = None
+
+def test_crud_material():
+ global material_id
+ # Create with unique data
+ test_material = get_unique_material_data()
+ response = client.post("/api/materials", json=test_material)
+ assert response.status_code == 201
+ material_id = response.json()["id"]
+ # Get
+ response = client.get(f"/api/materials/{material_id}")
+ assert response.status_code == 200
+ assert response.json()["name"] == test_material["name"]
+ # List
+ response = client.get("/api/materials")
+ assert response.status_code == 200
+ assert any(m["id"] == material_id for m in response.json())
+ # Update
+ update_data = test_material.copy()
+ update_data["density"] = 1.30
+ response = client.put(f"/api/materials/{material_id}", json=update_data)
+ assert response.status_code == 200
+ assert response.json()["density"] == 1.30
+ # Delete
+ response = client.delete(f"/api/materials/{material_id}")
+ assert response.status_code == 204
+ response = client.get(f"/api/materials/{material_id}")
+ assert response.status_code == 404
+
+def test_crud_spool():
+ global spool_id
+ # Material für Spool anlegen with unique data
+ test_material = get_unique_material_data()
+ response = client.post("/api/materials", json=test_material)
+ assert response.status_code == 201
+ mat_id = response.json()["id"]
+ # Create Spool with unique data
+ test_spool = get_unique_spool_data()
+ test_spool["material_id"] = mat_id
+ response = client.post("/api/spools", json=test_spool)
+ assert response.status_code == 201
+ spool_id = response.json()["id"]
+ # Get
+ response = client.get(f"/api/spools/{spool_id}")
+ assert response.status_code == 200
+ assert response.json()["external_id"] == test_spool["external_id"]
+ # List
+ response = client.get("/api/spools")
+ assert response.status_code == 200
+ assert any(s["id"] == spool_id for s in response.json())
+ # Update
+ update_data = test_spool.copy()
+ update_data["weight"] = 900
+ response = client.put(f"/api/spools/{spool_id}", json=update_data)
+ assert response.status_code == 200
+ assert response.json()["weight"] == 900
+ # Delete
+ response = client.delete(f"/api/spools/{spool_id}")
+ assert response.status_code == 204
+ response = client.get(f"/api/spools/{spool_id}")
+ assert response.status_code == 404
diff --git a/tests/test_spool_autotracking_mqtt.py b/tests/test_spool_autotracking_mqtt.py
new file mode 100644
index 0000000..9cf4c74
--- /dev/null
+++ b/tests/test_spool_autotracking_mqtt.py
@@ -0,0 +1,94 @@
+import json
+
+from sqlmodel import select, delete
+
+from app.models.material import Material
+from app.models.spool import Spool
+from app.routes import mqtt_routes
+
+
+class _FakeMsg:
+ def __init__(self, topic: str, payload: str, qos: int = 0):
+ self.topic = topic
+ self.payload = payload.encode("utf-8")
+ self.qos = qos
+
+
+def _emit_mqtt_payload(payload: dict) -> None:
+ msg = _FakeMsg("device/TESTSERIAL/report", json.dumps(payload))
+ mqtt_routes.on_message(None, {}, msg)
+
+
+def test_mqtt_autotracking_creates_material_and_spool(db_session):
+ # Arrange
+ db_session.exec(delete(Spool))
+ db_session.exec(delete(Material))
+ db_session.commit()
+ payload = {
+ "print": {
+ "ams": {
+ "ams": [
+ {
+ "tray": [
+ {
+ "tray_id": 1,
+ "tray_type": "PLA",
+ "tray_color": "FF0000",
+ "remain_percent": 75,
+ "tag_uid": "TAG-001",
+ }
+ ]
+ }
+ ]
+ }
+ }
+ }
+
+ # Act
+ _emit_mqtt_payload(payload)
+
+ # Assert
+ materials = db_session.exec(select(Material)).all()
+ spools = db_session.exec(select(Spool)).all()
+ assert len(materials) == 1
+ assert len(spools) == 1
+ assert spools[0].material_id == materials[0].id
+ assert materials[0].name == "PLA"
+ assert spools[0].label == "AMS Slot 1"
+ assert spools[0].remain_percent == 75.0
+
+
+def test_mqtt_autotracking_does_not_duplicate_material(db_session):
+ # Arrange
+ db_session.exec(delete(Spool))
+ db_session.exec(delete(Material))
+ db_session.commit()
+ payload = {
+ "print": {
+ "ams": {
+ "ams": [
+ {
+ "tray": [
+ {
+ "tray_id": 1,
+ "tray_type": "PLA",
+ "tray_color": "FF0000",
+ "remain_percent": 75,
+ "tag_uid": "TAG-001",
+ }
+ ]
+ }
+ ]
+ }
+ }
+ }
+
+ # Act
+ _emit_mqtt_payload(payload)
+ _emit_mqtt_payload(payload)
+
+ # Assert
+ materials = db_session.exec(select(Material)).all()
+ spools = db_session.exec(select(Spool)).all()
+ assert len(materials) == 1
+ assert len(spools) == 1
diff --git a/tests/test_spool_number_system.py b/tests/test_spool_number_system.py
new file mode 100644
index 0000000..187b0e1
--- /dev/null
+++ b/tests/test_spool_number_system.py
@@ -0,0 +1,513 @@
+"""
+Tests für Spulen-Nummern-System
+
+Fokus auf:
+1. Nummern-Vergabe (Recycling)
+2. Snapshot-Stabilität
+3. Assign/Unassign
+4. API-Smoke-Tests
+
+Keine Mocks, keine UI - nur Logik + Datenintegrität.
+"""
+import pytest
+from sqlmodel import Session, select, create_engine
+from sqlmodel.pool import StaticPool
+from datetime import datetime
+
+from app.models.spool import Spool
+from app.models.job import Job
+from app.models.material import Material
+from app.models.printer import Printer
+from app.services.spool_number_service import (
+ get_next_spool_number,
+ assign_spool_number,
+ create_job_snapshot,
+ extract_color_from_hex
+)
+
+
+# ===== FIXTURES =====
+
+@pytest.fixture(name="session")
+def session_fixture():
+ """In-Memory SQLite für Tests"""
+ engine = create_engine(
+ "sqlite:///:memory:",
+ connect_args={"check_same_thread": False},
+ poolclass=StaticPool,
+ )
+
+ # Erstelle Tabellen
+ from app.models.spool import Spool
+ from app.models.job import Job
+ from app.models.material import Material
+ from app.models.printer import Printer
+
+ Spool.metadata.create_all(engine)
+ Job.metadata.create_all(engine)
+ Material.metadata.create_all(engine)
+ Printer.metadata.create_all(engine)
+
+ with Session(engine) as session:
+ yield session
+
+
+@pytest.fixture(name="test_material")
+def test_material_fixture(session: Session):
+ """Test-Material erstellen"""
+ material = Material(
+ name="PLA Basic",
+ brand="Bambu Lab",
+ density=1.24,
+ diameter=1.75
+ )
+ session.add(material)
+ session.commit()
+ session.refresh(material)
+ return material
+
+
+@pytest.fixture(name="test_printer")
+def test_printer_fixture(session: Session):
+ """Test-Drucker erstellen"""
+ printer = Printer(
+ name="Test X1C",
+ printer_type="bambu", # FIX: printer_type ist NOT NULL
+ ip="192.168.1.100",
+ manufacturer="Bambu Lab",
+ model="X1C"
+ )
+ session.add(printer)
+ session.commit()
+ session.refresh(printer)
+ return printer
+
+
+# ===== TEST 1: SPULEN-NUMMERN-VERGABE =====
+
+def test_first_spool_gets_number_one(session: Session, test_material: Material):
+ """Erste Spule bekommt #1 (manuell zugewiesen)"""
+ spool = Spool(material_id=test_material.id, weight_full=1000)
+ # NEUES VERHALTEN: Manuell Nummer zuweisen
+ spool.spool_number = get_next_spool_number(session)
+ assign_spool_number(spool, session)
+
+ assert spool.spool_number == 1, "Erste Spule sollte #1 bekommen"
+ assert spool.name == "PLA Basic", "Name sollte kopiert werden"
+ assert spool.vendor == "Bambu Lab", "Vendor sollte kopiert werden"
+
+
+def test_multiple_spools_sequential(session: Session, test_material: Material):
+ """Mehrere Spulen → fortlaufend (manuell zugewiesen)"""
+ spools = []
+ for i in range(5):
+ spool = Spool(material_id=test_material.id, weight_full=1000)
+ # NEUES VERHALTEN: Manuell Nummer zuweisen
+ spool.spool_number = get_next_spool_number(session)
+ assign_spool_number(spool, session)
+ session.add(spool)
+ session.commit()
+ spools.append(spool)
+
+ numbers = [s.spool_number for s in spools]
+ assert numbers == [1, 2, 3, 4, 5], "Nummern sollten fortlaufend sein"
+
+
+def test_recycling_fills_gaps(session: Session, test_material: Material):
+ """Lücke wird recycelt (#2 gelöscht → neue Spule bekommt #2)"""
+ # Erstelle #1, #2, #3
+ spool1 = Spool(material_id=test_material.id, weight_full=1000)
+ spool1.spool_number = get_next_spool_number(session)
+ assign_spool_number(spool1, session)
+ session.add(spool1)
+
+ spool2 = Spool(material_id=test_material.id, weight_full=1000)
+ spool2.spool_number = get_next_spool_number(session)
+ assign_spool_number(spool2, session)
+ session.add(spool2)
+
+ spool3 = Spool(material_id=test_material.id, weight_full=1000)
+ spool3.spool_number = get_next_spool_number(session)
+ assign_spool_number(spool3, session)
+ session.add(spool3)
+ session.commit()
+
+ assert spool1.spool_number == 1
+ assert spool2.spool_number == 2
+ assert spool3.spool_number == 3
+
+ # Lösche #2
+ session.delete(spool2)
+ session.commit()
+
+ # Neue Spule sollte #2 bekommen (Recycling!)
+ spool_new = Spool(material_id=test_material.id, weight_full=1000)
+ spool_new.spool_number = get_next_spool_number(session)
+ assign_spool_number(spool_new, session)
+
+ assert spool_new.spool_number == 2, "Gelöschte Nummer #2 sollte recycelt werden"
+
+
+def test_fallback_max_plus_one(session: Session, test_material: Material):
+ """Fallback funktioniert (MAX + 1)"""
+ # Erstelle Spule mit manuell gesetzter hoher Nummer
+ spool_high = Spool(
+ material_id=test_material.id,
+ weight_full=1000,
+ spool_number=100 # Manuell gesetzt
+ )
+ session.add(spool_high)
+ session.commit()
+
+ # Nächste Spule sollte 101 bekommen (MAX + 1)
+ next_num = get_next_spool_number(session)
+ assert next_num == 1, "Sollte erste Lücke finden (1), nicht MAX+1"
+
+ # Fülle Lücken
+ for i in range(1, 100):
+ s = Spool(material_id=test_material.id, spool_number=i, weight_full=1000)
+ session.add(s)
+ session.commit()
+
+ # Jetzt sollte MAX+1 greifen
+ next_num = get_next_spool_number(session)
+ assert next_num == 101, "Sollte MAX+1 sein wenn keine Lücken"
+
+
+# ===== TEST 2: SNAPSHOT-STABILITÄT =====
+
+def test_job_snapshot_creation(session: Session, test_material: Material, test_printer: Printer):
+ """Job speichert spool_number, name, vendor, color"""
+ spool = Spool(
+ material_id=test_material.id,
+ weight_full=1000,
+ color="black",
+ created_at=datetime.utcnow().isoformat() # FIX: created_at setzen
+ )
+ # NEUES VERHALTEN: Manuell Nummer zuweisen
+ spool.spool_number = get_next_spool_number(session)
+ assign_spool_number(spool, session)
+ session.add(spool)
+ session.commit()
+ session.refresh(spool)
+
+ # Erstelle Job mit Snapshot
+ snapshot = create_job_snapshot(spool)
+
+ job = Job(
+ printer_id=test_printer.id,
+ spool_id=spool.id,
+ name="Test Job",
+ **snapshot
+ )
+ session.add(job)
+ session.commit()
+ session.refresh(job)
+
+ assert job.spool_number == 1
+ assert job.spool_name == "PLA Basic"
+ assert job.spool_vendor == "Bambu Lab"
+ assert job.spool_color == "black"
+ assert job.spool_created_at is not None
+
+
+def test_job_history_survives_spool_deletion(session: Session, test_material: Material, test_printer: Printer):
+ """Spule wird gelöscht → Job-Historie bleibt korrekt"""
+ spool = Spool(
+ material_id=test_material.id,
+ weight_full=1000,
+ color="red",
+ created_at=datetime.utcnow().isoformat() # FIX: created_at setzen
+ )
+ # NEUES VERHALTEN: Manuell Nummer zuweisen
+ spool.spool_number = get_next_spool_number(session)
+ assign_spool_number(spool, session)
+ session.add(spool)
+ session.commit()
+ spool_created_at = spool.created_at
+
+ # Job erstellen
+ snapshot = create_job_snapshot(spool)
+ job = Job(
+ printer_id=test_printer.id,
+ spool_id=spool.id,
+ name="Test Job",
+ **snapshot
+ )
+ session.add(job)
+ session.commit()
+
+ # Spule löschen
+ session.delete(spool)
+ session.commit()
+
+ # Job-Daten sollten erhalten bleiben
+ session.refresh(job)
+ assert job.spool_number == 1
+ assert job.spool_name == "PLA Basic"
+ assert job.spool_vendor == "Bambu Lab"
+ assert job.spool_color == "red"
+ assert job.spool_created_at == spool_created_at
+
+
+def test_recycled_number_detection(session: Session, test_material: Material, test_printer: Printer):
+ """Nummer wird neu vergeben → was_recycled = true"""
+ # Erste Spule #1
+ spool1 = Spool(
+ material_id=test_material.id,
+ weight_full=1000,
+ created_at=datetime.utcnow().isoformat() # FIX: created_at setzen
+ )
+ # NEUES VERHALTEN: Manuell Nummer zuweisen
+ spool1.spool_number = get_next_spool_number(session)
+ assign_spool_number(spool1, session)
+ session.add(spool1)
+ session.commit()
+ created_at_1 = spool1.created_at
+
+ # Job mit Snapshot
+ snapshot1 = create_job_snapshot(spool1)
+ job1 = Job(printer_id=test_printer.id, spool_id=spool1.id, name="Job 1", **snapshot1)
+ session.add(job1)
+ session.commit()
+
+ # Lösche Spule #1
+ session.delete(spool1)
+ session.commit()
+
+ # Neue Spule bekommt wieder #1 (Recycling)
+ import time
+ time.sleep(0.01) # Kurze Verzögerung für unterschiedliche Timestamps
+ spool2 = Spool(
+ material_id=test_material.id,
+ weight_full=1000,
+ created_at=datetime.utcnow().isoformat() # FIX: created_at setzen
+ )
+ # NEUES VERHALTEN: Manuell Nummer zuweisen (recycelt die #1)
+ spool2.spool_number = get_next_spool_number(session)
+ assign_spool_number(spool2, session)
+ session.add(spool2)
+ session.commit()
+ created_at_2 = spool2.created_at
+
+ # Prüfe Recycling-Erkennung
+ assert spool2.spool_number == 1, "Nummer sollte recycelt werden"
+ assert created_at_1 != created_at_2, "created_at sollte unterschiedlich sein"
+
+ # Hole Job1 und prüfe ob was_recycled erkannt wird
+ session.refresh(job1)
+ current_spool = session.get(Spool, spool2.id)
+
+ # Logik: gleiche Nummer, aber unterschiedliche created_at = recycelt
+ was_recycled = (
+ current_spool.spool_number == job1.spool_number and
+ current_spool.created_at != job1.spool_created_at
+ )
+ assert was_recycled is True, "System sollte Recycling erkennen"
+
+
+# ===== TEST 3: ASSIGN/UNASSIGN LOGIK =====
+
+def test_assign_spool_to_slot(session: Session, test_material: Material, test_printer: Printer):
+ """Spule manuell Slot zuweisen → OK"""
+ spool = Spool(material_id=test_material.id, weight_full=1000)
+ assign_spool_number(spool, session)
+ session.add(spool)
+ session.commit()
+
+ # Zuweisen
+ spool.printer_id = test_printer.id
+ spool.ams_slot = 2
+ session.add(spool)
+ session.commit()
+ session.refresh(spool)
+
+ assert spool.printer_id == test_printer.id
+ assert spool.ams_slot == 2
+
+
+def test_assign_slot_already_occupied(session: Session, test_material: Material, test_printer: Printer):
+ """Slot bereits belegt → Fehler"""
+ spool1 = Spool(material_id=test_material.id, weight_full=1000)
+ assign_spool_number(spool1, session)
+ spool1.printer_id = test_printer.id
+ spool1.ams_slot = 1
+ session.add(spool1)
+
+ spool2 = Spool(material_id=test_material.id, weight_full=1000)
+ assign_spool_number(spool2, session)
+ session.add(spool2)
+ session.commit()
+
+ # Prüfe ob Slot belegt
+ existing = session.exec(
+ select(Spool).where(
+ Spool.printer_id == test_printer.id,
+ Spool.ams_slot == 1
+ )
+ ).first()
+
+ assert existing is not None, "Slot 1 sollte belegt sein"
+ assert existing.id == spool1.id
+
+
+def test_assign_spool_already_assigned(session: Session, test_material: Material, test_printer: Printer):
+ """Spule bereits zugewiesen → Fehler"""
+ spool = Spool(material_id=test_material.id, weight_full=1000)
+ assign_spool_number(spool, session)
+ spool.printer_id = test_printer.id
+ spool.ams_slot = 1
+ session.add(spool)
+ session.commit()
+
+ # Prüfe ob bereits zugewiesen
+ assert spool.printer_id is not None
+ assert spool.ams_slot is not None
+
+
+def test_unassign_sets_fields_to_none(session: Session, test_material: Material, test_printer: Printer):
+ """Unassign setzt printer_id & ams_slot auf None"""
+ spool = Spool(material_id=test_material.id, weight_full=1000)
+ assign_spool_number(spool, session)
+ spool.printer_id = test_printer.id
+ spool.ams_slot = 3
+ session.add(spool)
+ session.commit()
+
+ # Entfernen
+ last_slot = spool.ams_slot
+ spool.printer_id = None
+ spool.ams_slot = None
+ spool.last_slot = last_slot
+ session.add(spool)
+ session.commit()
+ session.refresh(spool)
+
+ assert spool.printer_id is None
+ assert spool.ams_slot is None
+ assert spool.last_slot == 3
+
+
+# ===== TEST 4: UTILITY FUNCTIONS =====
+
+def test_color_extraction_from_hex():
+ """Farb-Extraktion aus Bambu Hex-Codes"""
+ assert extract_color_from_hex("000000FF") == "black"
+ assert extract_color_from_hex("FFFFFFFF") == "white"
+ assert extract_color_from_hex("FF0000FF") == "red"
+ assert extract_color_from_hex("00FF00FF") == "green"
+ assert extract_color_from_hex("0000FFFF") == "blue"
+ assert extract_color_from_hex("FFFF00FF") == "yellow"
+ assert extract_color_from_hex("") == "unknown"
+ assert extract_color_from_hex("XYZ") == "unknown"
+
+
+# ===== TEST 5: RFID vs. MANUELLE SPULEN =====
+
+def test_rfid_spool_gets_no_number(session: Session, test_material: Material):
+ """RFID-Spule (mit tray_uuid) bekommt KEINE Nummer"""
+ spool = Spool(
+ material_id=test_material.id,
+ weight_full=1000,
+ tray_uuid="some-bambu-rfid-uuid-12345", # RFID vorhanden
+ tray_color="FF0000FF"
+ )
+ assign_spool_number(spool, session)
+ session.add(spool)
+ session.commit()
+ session.refresh(spool)
+
+ assert spool.spool_number is None, "RFID-Spule sollte KEINE Nummer bekommen"
+ assert spool.tray_uuid == "some-bambu-rfid-uuid-12345"
+ assert spool.name == "PLA Basic", "Denormalisierung sollte trotzdem funktionieren"
+ assert spool.vendor == "Bambu Lab"
+
+
+def test_manual_spool_gets_number(session: Session, test_material: Material):
+ """Manuelle Spule (ohne tray_uuid) kann Nummer erhalten (manuell zugewiesen)"""
+ spool = Spool(
+ material_id=test_material.id,
+ weight_full=1000,
+ tray_uuid=None # Kein RFID
+ )
+ # NEUES VERHALTEN: Alle Nummern sind jetzt manuell (auch für nicht-RFID Spulen)
+ spool.spool_number = get_next_spool_number(session)
+ assign_spool_number(spool, session)
+ session.add(spool)
+ session.commit()
+ session.refresh(spool)
+
+ assert spool.spool_number == 1, "Manuelle Spule kann Nummer bekommen (wenn manuell zugewiesen)"
+ assert spool.tray_uuid is None
+ assert spool.name == "PLA Basic"
+
+
+def test_mixed_rfid_and_manual_spools(session: Session, test_material: Material):
+ """Gemischte RFID- und manuelle Spulen - User entscheidet über Nummern"""
+ # RFID-Spule #1 (keine Nummer)
+ rfid1 = Spool(
+ material_id=test_material.id,
+ weight_full=1000,
+ tray_uuid="rfid-uuid-1"
+ )
+ assign_spool_number(rfid1, session)
+ session.add(rfid1)
+
+ # Manuelle Spule #1 (User gibt Nummer)
+ manual1 = Spool(
+ material_id=test_material.id,
+ weight_full=1000,
+ tray_uuid=None
+ )
+ # NEUES VERHALTEN: User weist Nummer zu
+ manual1.spool_number = get_next_spool_number(session)
+ assign_spool_number(manual1, session)
+ session.add(manual1)
+
+ # RFID-Spule #2 (keine Nummer)
+ rfid2 = Spool(
+ material_id=test_material.id,
+ weight_full=1000,
+ tray_uuid="rfid-uuid-2"
+ )
+ assign_spool_number(rfid2, session)
+ session.add(rfid2)
+
+ # Manuelle Spule #2 (User gibt Nummer)
+ manual2 = Spool(
+ material_id=test_material.id,
+ weight_full=1000,
+ tray_uuid=None
+ )
+ # NEUES VERHALTEN: User weist Nummer zu
+ manual2.spool_number = get_next_spool_number(session)
+ assign_spool_number(manual2, session)
+ session.add(manual2)
+
+ session.commit()
+
+ # Prüfe Nummern
+ assert rfid1.spool_number is None, "RFID-Spule 1 hat keine Nummer (User hat keine vergeben)"
+ assert rfid2.spool_number is None, "RFID-Spule 2 hat keine Nummer (User hat keine vergeben)"
+ assert manual1.spool_number == 1, "Manuelle Spule 1 sollte #1 sein"
+ assert manual2.spool_number == 2, "Manuelle Spule 2 sollte #2 sein"
+
+
+# ===== ZUSAMMENFASSUNG =====
+
+if __name__ == "__main__":
+ print("=" * 60)
+ print("Spulen-Nummern-System Tests")
+ print("=" * 60)
+ print()
+ print("Test-Gruppen:")
+ print("1. Spulen-Nummern-Vergabe (5 Tests)")
+ print("2. Snapshot-Stabilität (3 Tests)")
+ print("3. Assign/Unassign (4 Tests)")
+ print("4. Utility Functions (1 Test)")
+ print("5. RFID vs. Manuelle Spulen (3 Tests)")
+ print()
+ print("Gesamt: 16 Tests")
+ print()
+ print("Ausführen mit: pytest tests/test_spool_number_system.py -v")
+ print("=" * 60)
diff --git a/tests/test_statistics_routes.py b/tests/test_statistics_routes.py
new file mode 100644
index 0000000..95a7bb8
--- /dev/null
+++ b/tests/test_statistics_routes.py
@@ -0,0 +1,215 @@
+from datetime import datetime, timedelta
+
+import pytest
+from fastapi.testclient import TestClient
+from sqlmodel import Session, delete
+
+from app.database import engine
+from app.main import app
+from app.models.job import Job
+from app.models.material import Material
+from app.models.printer import Printer
+from app.models.spool import Spool
+from app.models.settings import Setting
+from app.routes.statistics_routes import _energy_for_job, _job_duration_hours, DEFAULT_POWER_KW
+
+
+client = TestClient(app)
+
+
+@pytest.fixture(scope="module")
+def stats_seed():
+ now = datetime.utcnow()
+ with Session(engine) as session:
+ printer = Printer(name="stat-printer", printer_type="bambu", power_consumption_kw=0.5)
+ session.add(printer)
+ session.commit()
+ session.refresh(printer)
+
+ material = Material(name="PLA Gold", brand="Glass")
+ session.add(material)
+ session.commit()
+ session.refresh(material)
+
+ material_name = material.name
+ printer_id = printer.id
+
+ spool = Spool(material_id=material.id)
+ session.add(spool)
+ session.commit()
+ session.refresh(spool)
+
+ job = Job(
+ printer_id=printer.id,
+ spool_id=spool.id,
+ filament_used_g=12.5,
+ started_at=now - timedelta(hours=2),
+ finished_at=now - timedelta(hours=1),
+ name="Stats Job",
+ )
+ session.add(job)
+ session.commit()
+
+ session.exec(delete(Setting).where(Setting.key == "cost.electricity_price_kwh"))
+ price_setting = Setting(key="cost.electricity_price_kwh", value="0.5")
+ session.add(price_setting)
+ session.commit()
+
+ return {
+ "date": (now - timedelta(hours=2)).date().isoformat(),
+ "duration_h": 1.0,
+ "energy_kwh": 0.5,
+ "material_name": material_name,
+ "printer_id": printer_id,
+ "price_kwh": 0.5,
+ }
+
+
+def test_timeline(stats_seed):
+ resp = client.get("/api/statistics/timeline?days=7")
+ assert resp.status_code == 200
+ body = resp.json()
+ assert body["days"] == 7
+ assert body["data"]
+ match = next((day for day in body["data"] if day["date"] == stats_seed["date"]), None)
+ assert match is not None
+ assert match["jobs"] >= 1
+ assert match["filament_g"] >= round(12.5, 2)
+ assert match["duration_h"] >= round(stats_seed["duration_h"], 2)
+ assert match["energy_kwh"] >= round(stats_seed["energy_kwh"], 3)
+
+
+def test_timeline_by_material(stats_seed):
+ resp = client.get("/api/statistics/timeline-by-material?days=7")
+ assert resp.status_code == 200
+ body = resp.json()
+ assert body["days"] == 7
+ assert stats_seed["material_name"] in [entry["material"] for entry in body["datasets"]]
+
+
+def test_timeline_costs(stats_seed):
+ resp = client.get("/api/statistics/timeline-costs?days=7")
+ assert resp.status_code == 200
+ body = resp.json()
+ assert body["daily_cost"]
+ if stats_seed["date"] in body["dates"]:
+ idx = body["dates"].index(stats_seed["date"])
+ expected = round(stats_seed["energy_kwh"] * stats_seed["price_kwh"], 2)
+ assert body["daily_cost"][idx] >= expected
+ assert body["cumulative_cost"][idx] >= expected
+
+
+def test_heatmap_contains_seed_day(stats_seed):
+ resp = client.get("/api/statistics/heatmap?days=7")
+ assert resp.status_code == 200
+ days = resp.json()["data"]
+ matching = [entry for entry in days if entry["date"] == stats_seed["date"]]
+ assert matching
+ assert matching[0]["jobs"] >= 1
+
+
+def test_by_printer_includes_stats(stats_seed):
+ resp = client.get("/api/statistics/by-printer")
+ assert resp.status_code == 200
+ printers = resp.json()
+ assert any(p["printer_id"] == stats_seed["printer_id"] for p in printers)
+
+
+def test_by_material_reports_filament(stats_seed):
+ resp = client.get("/api/statistics/by-material")
+ assert resp.status_code == 200
+ materials = resp.json()
+ assert isinstance(materials, list)
+ matching = [item for item in materials if item["material_name"] == stats_seed["material_name"]]
+ assert matching
+ assert matching[0]["total_weight_g"] == 12.5
+ assert matching[0]["spools"] >= 1
+
+
+def test_costs_endpoint(stats_seed):
+ resp = client.get("/api/statistics/costs")
+ assert resp.status_code == 200
+ body = resp.json()
+ assert body["energy_kwh_total"] >= round(stats_seed["energy_kwh"], 3)
+ assert body["energy_cost_total"] >= round(stats_seed["energy_kwh"] * stats_seed["price_kwh"], 2)
+ assert body["energy_price_kwh"] == stats_seed["price_kwh"]
+
+
+def test_costs_endpoint_handles_missing_price_setting(stats_seed):
+ with Session(engine) as session:
+ session.exec(delete(Setting).where(Setting.key == "cost.electricity_price_kwh"))
+ session.commit()
+
+ resp = client.get("/api/statistics/costs")
+ assert resp.status_code == 200
+ body = resp.json()
+ assert body["energy_price_kwh"] is None
+ assert body["energy_cost_total"] is None
+
+ with Session(engine) as session:
+ session.add(Setting(key="cost.electricity_price_kwh", value=str(stats_seed["price_kwh"])))
+ session.commit()
+
+
+def test_timeline_by_material_includes_unknown_material(stats_seed):
+ now = datetime.utcnow()
+ unknown_job_id = None
+ with Session(engine) as session:
+ job = Job(
+ printer_id=stats_seed["printer_id"],
+ spool_id="missing-spool",
+ name="Unknown material job",
+ filament_used_g=3.3,
+ started_at=now - timedelta(minutes=45),
+ finished_at=now,
+ )
+ session.add(job)
+ session.commit()
+ session.refresh(job)
+ unknown_job_id = job.id
+
+ resp = client.get("/api/statistics/timeline-by-material?days=1")
+ assert resp.status_code == 200
+ materials = resp.json()["datasets"]
+ assert any(entry["material"] == "Unbekannt" for entry in materials)
+
+ if unknown_job_id:
+ with Session(engine) as session:
+ session.exec(delete(Job).where(Job.id == unknown_job_id))
+ session.commit()
+
+
+def test_energy_helpers_reflect_printer_power():
+ now = datetime.utcnow()
+ job = Job(
+ printer_id="helper-printer",
+ name="helper",
+ filament_used_g=0,
+ finished_at=now,
+ started_at=now - timedelta(hours=2),
+ )
+ printer = Printer(
+ id="helper-printer",
+ name="helper",
+ printer_type="manual",
+ power_consumption_kw=0.4,
+ )
+ exact, estimated = _energy_for_job(job, {printer.id: printer}, now)
+ assert pytest.approx(exact, abs=1e-6) == 0.4 * 2
+ assert estimated == 0.0
+
+
+def test_job_duration_and_default_energy():
+ now = datetime.utcnow()
+ job = Job(
+ printer_id="duration-printer",
+ name="duration",
+ filament_used_g=0,
+ started_at=now - timedelta(hours=1),
+ finished_at=None,
+ )
+ duration = _job_duration_hours(job, now)
+ assert pytest.approx(duration, abs=1e-6) == 1.0
+ exact, estimated = _energy_for_job(job, {}, now)
+ assert exact == 0.0
+ assert pytest.approx(estimated, abs=1e-6) == DEFAULT_POWER_KW * duration
diff --git a/tools/check_admin_hash.py b/tools/check_admin_hash.py
new file mode 100644
index 0000000..ae02a7f
--- /dev/null
+++ b/tools/check_admin_hash.py
@@ -0,0 +1,43 @@
+import os
+import bcrypt
+import sys
+# This script expects a local .env file.
+# Do NOT commit real secrets.
+
+"""
+Developer utility script.
+
+Checks whether a plaintext password matches an
+ADMIN_PASSWORD_HASH from a local .env file.
+
+Not used by FilamentHub runtime, Docker or CI.
+"""
+
+
+def read_env(path='.env'):
+ try:
+ with open(path, 'r', encoding='utf-8') as f:
+ for line in f:
+ line = line.strip()
+ if line.startswith('ADMIN_PASSWORD_HASH='):
+ return line.split('=', 1)[1].strip().strip('"').strip("'")
+ except Exception:
+ pass
+ return None
+
+
+h = read_env()
+if not h:
+ print('NO_HASH')
+ sys.exit(2)
+
+print('HASH_REPR:', repr(h))
+
+pw = 'FillamentHub'.encode('utf-8')
+try:
+ ok = bcrypt.checkpw(pw, h.encode('utf-8'))
+ print('CHECK:', ok)
+ sys.exit(0 if ok else 1)
+except Exception as e:
+ print('ERR:', e)
+ sys.exit(3)
diff --git a/unicode_icons_list.txt b/unicode_icons_list.txt
new file mode 100644
index 0000000..e6a39eb
--- /dev/null
+++ b/unicode_icons_list.txt
@@ -0,0 +1,27 @@
+# Unicode-Symbole für UI/Status
+
+✅ Erfolg / OK / Aktiv (U+2705)
+❌ Fehler / Abbruch / Inaktiv (U+274C)
+ℹ️ Info / Hinweis (U+2139)
+⚠️ Warnung / Achtung (U+26A0)
+🔴 Offline / Disconnected / Fehler (U+1F534)
+🟢 Online / Connected / Aktiv (U+1F7E2)
+🟡 Warnung / Mittelwert (U+1F7E1)
+🔵 Neutral / Info (U+1F535)
+⏳ Warten / Ladebalken (U+23F3)
+🔄 Aktualisieren / Refresh (U+1F504)
+📝 Bearbeiten / Edit (U+1F4DD)
+💾 Speichern / Save (U+1F4BE)
+🗑️ Löschen / Delete (U+1F5D1)
+🔍 Suchen / Search (U+1F50D)
+➕ Hinzufügen / Add (U+2795)
+➖ Entfernen / Remove (U+2796)
+➡️ Weiter / Next (U+27A1)
+⬅️ Zurück / Back (U+2B05)
+🔔 Benachrichtigung / Notification (U+1F514)
+🚫 Verboten / Blockiert (U+1F6AB)
+🔒 Gesperrt / Locked (U+1F512)
+🔓 Entsperrt / Unlocked (U+1F513)
+
+Jedes Symbol kann direkt als Text in HTML/JS verwendet werden, z.B.:
+`document.getElementById('status').textContent = '🟢 Online';`
diff --git a/utils/dummy_logger.py b/utils/dummy_logger.py
new file mode 100644
index 0000000..d8a48ff
--- /dev/null
+++ b/utils/dummy_logger.py
@@ -0,0 +1,15 @@
+class DummyLogger:
+ """
+Dummy logger that conforms to logging.Logger interface
+and discards all log messages.
+
+Used as a fallback when logging is optional.
+"""
+
+
+ def debug(self, *args, **kwargs): pass
+ def info(self, *args, **kwargs): pass
+ def warning(self, *args, **kwargs): pass
+ def error(self, *args, **kwargs): pass
+ def exception(self, *args, **kwargs): pass
+ def critical(self, *args, **kwargs): pass