diff --git a/CHANGELOG.md b/CHANGELOG.md index 53613bdf0..57de59549 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,67 @@ All notable changes to Bambuddy will be documented in this file. +## [0.2.2b1] - 2026-03-03 + +### Improved +- **SpoolBuddy Settings Page Redesign** — Redesigned the SpoolBuddy settings page with a tabbed layout (Device, Display, Scale, Updates). The Device tab shows an About section, NFC reader info (type, connection, status), device info (host, IP, uptime, online status), and device ID. The Display tab has a brightness slider (CSS software filter for HDMI displays) and screen blank timeout selector (Off, 1m, 2m, 5m, 10m, 30m) — the screen blanks after user inactivity (no touch) and wakes on tap. The Scale tab shows live weight with a step-indicator calibration wizard (tare → place known weight → calibrate). The Updates tab shows the daemon version and checks for updates against GitHub releases with optional beta inclusion. Display settings (brightness + blank timeout) are stored per-device in the backend and applied instantly in the frontend layout via outlet context. +- **SpoolBuddy Language & Time Format Support** — The SpoolBuddy kiosk now respects Bambuddy's configured UI language and time format. Added a `language` field to backend app settings so the UI language is persisted server-side (previously only stored in browser localStorage, inaccessible to the kiosk's separate Chromium instance). The SpoolBuddy layout fetches settings on load and syncs `i18n.changeLanguage()`. The top bar clock uses `formatTimeOnly()` with the user's time format setting (system/12h/24h). Added full SpoolBuddy settings translations for all 6 supported languages (English, German, French, Japanese, Italian, Portuguese). +- **SpoolBuddy Kiosk Stability** — Disabled Chromium's swipe-to-navigate gesture (`--overscroll-history-navigation=0`) in the install script to prevent accidental back-navigation on the touchscreen. Added the `video` group to the SpoolBuddy system user for DSI backlight access. +- **SpoolBuddy Touch-Friendly UI** — Enlarged all interactive elements across the SpoolBuddy kiosk UI for comfortable finger use on the 1024×600 RPi touchscreen. Bottom nav icons and labels increased (20→24px icons, 10→12px labels, 48→56px bar height). Top bar printer selector and clock enlarged. Dashboard stats bar compacted, printers card removed (printer selection via top bar is sufficient), section headers and device status text bumped up. AMS page single-slot cards, spool visualizations, and fill bars enlarged. AMS unit cards get larger spool previews (56→64px), bigger material/slot text, and larger humidity/temperature indicators. Inventory spool cards, settings page headers, and calibration inputs all sized up to meet 44px minimum tap targets. The AMS slot configuration modal now renders in a two-column full-screen layout on the kiosk display (filament list on left, K-profile and color picker on right) instead of the standard centered dialog, eliminating scrolling. + +- **Ethernet Connection Indicator** ([#585](https://github.com/maziggy/bambuddy/issues/585)) — Printers connected via ethernet now show a green "Ethernet" badge with a cable icon instead of the WiFi signal strength indicator. Detected via `home_flag` bit 18 from the printer's MQTT data. The printer info modal also shows "Ethernet" instead of WiFi signal details. + +### New Features +- **In-App Bug Reporting** — A floating bug report button in the bottom-right corner lets users submit bug reports directly from the Bambuddy UI. Reports include a description, optional screenshot (upload, paste, or drag & drop with automatic JPEG compression), optional contact email, and automatically collected diagnostic data. On submit, the system temporarily enables debug logging, sends push_all to all connected printers, waits 30 seconds to collect fresh logs, then submits everything to a secure relay on bambuddy.cool which creates a GitHub issue with sanitized logs uploaded as a separate file. All sensitive data (printer names, serial numbers, IPs, credentials, email addresses) is redacted from logs before submission. The expandable data privacy notice details exactly what is and isn't collected. Translated into all 7 supported languages. +- **SpoolBuddy NFC Tag Writing (OpenTag3D)** — SpoolBuddy can now write NFC tags for third-party filament spools using the OpenTag3D format on NTAG213/215/216 stickers. A new "Write" page (`/spoolbuddy/write-tag`) in the kiosk UI provides three workflows: write a tag for an existing inventory spool (no tag linked yet), create a new spool and write in one flow, or replace a damaged tag (unlinks old, writes new). The left panel shows a searchable spool list or a compact creation form (material dropdown, color picker, brand, weight); the right panel shows real-time NFC status with tag detection, a spool summary, and the write button. The backend encodes spool data as a 133-byte OpenTag3D NDEF message (MIME type `application/opentag3d`, fits NTAG213's 144-byte capacity) containing material, color, brand, weight, temperature, and RGBA color data. The write command flows through the existing heartbeat polling mechanism — the frontend queues a write, the daemon picks it up on the next heartbeat, writes page-by-page with read-back verification via the PN5180's NTAG WRITE (0xA2) command, and reports success/failure via WebSocket. On success the tag UID is automatically linked to the spool with `data_origin=opentag3d`. Written tags are readable by any OpenTag3D-compatible reader including SpoolBuddy itself. Translations added for all 6 languages. +- **SpoolBuddy On-Screen Keyboard** — Added a virtual QWERTY keyboard for the SpoolBuddy kiosk UI (and login page) since the Raspberry Pi has no physical keyboard and system-level virtual keyboards (squeekboard, wvkbd) don't auto-show/hide in the labwc/Chromium kiosk environment. Uses `react-simple-keyboard` with a dark theme matching the bambu-dark/bambu-green palette. Auto-shows when any text/password/email input is focused, supports shift, caps lock, backspace, and email-friendly keys (@, .). Inputs with `data-vkb="false"` are excluded (e.g. SpoolBuddySettingsPage's own numpad). A two-phase close prevents ghost-click passthrough to elements underneath the keyboard. +- **SpoolBuddy Inline Spool Cards** — Placing an NFC-tagged spool on the SpoolBuddy reader now shows spool info directly in the dashboard's right panel instead of a separate modal overlay. Known spools display a SpoolIcon with color/brand/material, a large remaining-weight readout with fill bar, and a weight comparison grid, with action buttons for "Assign to AMS", "Sync Weight", and "Close". Unknown tags show the tag UID, scale weight, and offer "Add to Inventory" or "Link to Spool" actions. The card stays visible if the tag is removed (for continued interaction) and won't re-appear for the same tag after dismissal — but re-placing a tag after removal shows it again. The idle spool animation displays when no tag is detected. +- **SpoolBuddy AMS Page: External Slots & Slot Configuration** — The SpoolBuddy AMS page (`/spoolbuddy/ams`) now displays external spool slots (single nozzle: "Ext", dual nozzle: "Ext-L"/"Ext-R") and AMS-HT units in a compact horizontal row below the regular AMS grid, fitting within the 1024×600 kiosk display without scrolling. Clicking any AMS, AMS-HT, or external slot opens the `ConfigureAmsSlotModal` to configure filament type and color — the same modal used on the main Printers page. Dual-nozzle printers show L/R nozzle badges on each AMS unit. Temperature and humidity are displayed with threshold-colored SVG icons (green/gold/red) matching the Bambu Lab style on the main printer cards, using the configured AMS humidity and temperature thresholds from settings. +- **SpoolBuddy Dashboard Redesign** — Redesigned the SpoolBuddy dashboard with a two-column layout: left column shows device connection status (scale and NFC with state-colored icons — green when device is online, gray when offline) and printer status badges below (compact pills with green/gray dots for online/offline, wrapping to fit without scrolling); right column shows the current spool card. Cards use a dashed border style for a cleaner look. The large weight display card was removed in favor of the inline scale reading in the device card. Unknown NFC tags now offer a quick-add modal that creates a basic PLA spool entry linked to the tag — with a hint recommending users add spools via the main Bambuddy UI first for full details. The separate SpoolBuddy inventory page was removed since inventory management belongs in the main Bambuddy frontend; the bottom nav now has three tabs (Dashboard, AMS, Settings). +- **SpoolBuddy Kiosk Auth Bypass via API Key** — When Bambuddy auth is enabled, the SpoolBuddy kiosk (Chromium on RPi) was redirected to the login page because the `ProtectedRoute` requires a user object from `GET /auth/me`, which only accepted JWT tokens. The `/auth/me` endpoint now also accepts API keys (via `Authorization: Bearer bb_xxx` or `X-API-Key` header) and returns a synthetic admin user with all permissions. The frontend's `AuthContext` reads an optional `?token=` URL parameter on first load, stores it in localStorage, and strips it from the URL to prevent leakage via browser history or referrer. The install script now includes the API key in the kiosk URL (`/spoolbuddy?token=${API_KEY}`), so the device authenticates automatically on boot without manual login. +- **Daily Beta Builds** — Added a release script (`docker-publish-daily-beta.sh`) that reads the current `APP_VERSION` from config, builds a multi-arch Docker image, pushes to both GHCR and Docker Hub, and creates/updates a GitHub prerelease with changelog notes. Daily builds overwrite the same beta version tag (e.g., `0.2.2b1`) — users pull the latest by re-pulling the tag or using Watchtower. Beta images are never tagged as `latest`. +- **Inventory Scale Weight Check Column** — Added a "Weight Check" column (hidden by default) to the inventory table that compares each spool's last scale measurement against its calculated gross weight (net remaining + core weight). Spools within a ±50g tolerance show a green checkmark; mismatched spools show a yellow warning with the difference and a sync button that trusts the scale reading and resets weight tracking. The backend stores `last_scale_weight` and `last_weighed_at` on each spool whenever weight is synced via SpoolBuddy, and the column tooltip shows scale weight, calculated weight, and difference. Edge case: when scale weight is below core weight (empty spool or not on scale), the comparison treats it as a match since sync can't correct this. + +### Fixed +- **Archive Card Shows "Source" Badge for Sliced .3mf Files** — Archive cards created from prints showed a "SOURCE" badge instead of "GCODE" when the filename was a plain `.3mf` (without `.gcode` in the name). The `isSlicedFile()` check only matched `.gcode` or `.gcode.3mf` extensions, but `.3mf` files can be either sliced (contains gcode) or raw source models. Now checks the archive's `total_layers` and `print_time_seconds` metadata — if either is present, the file is sliced. Also passes the original human-readable filename when creating archives from the file manager print flow (previously stored the UUID library filename). +- **AMS Slot Shows Wrong Material for "Support for" Profiles** — Configuring an AMS slot with a filament profile like "PLA Support for PETG PETG Basic @Bambu Lab H2D 0.4 nozzle" set the slot material to PLA instead of PETG. The name parser iterated material types in order and returned the first match ("PLA"), ignoring that "PLA Support for PETG" means the filament type is PETG. Both the frontend `parsePresetName()` and backend `_parse_material_from_name()` now detect the "X Support for Y" naming pattern and extract the material after "Support for". The frontend also prefers the corrected parsed material over the stored `filament_type` (which may have been saved with the old parser during import). +- **Firmware Check Shows Wrong Version for H2D Pro** ([#584](https://github.com/maziggy/bambuddy/issues/584)) — H2D Pro printers showed firmware as out of date because the firmware check matched against the H2D firmware track instead of the H2D Pro track. The firmware check's model-to-API-key mapping only had display names (e.g., "H2D", "H2D Pro") but not SSDP device codes (e.g., "O1E", "O2D"). Added all known SSDP model codes to the firmware check mapping so raw device codes resolve to the correct firmware track. +- **Spurious Error Notifications During Normal Printing (0300_0002)** — Some firmware versions send non-zero `print_error` values in MQTT during normal printing (e.g., `0x03000002` → short code `0300_0002`). The `print_error` parser treated any non-zero value as a real error, appending it to `hms_errors` and triggering notifications — even though the printer was printing fine. All known real HMS error codes have their low 16 bits >= `0x4000` (`0x4xxx` = fatal, `0x8xxx` = warning/pause, `0xCxxx` = prompt). Values below `0x4000` are status/phase indicators, not faults. Now skips values where the error portion is below `0x4000` in both the `print_error` and `hms` array parsers. +- **K-Profile Apply Fails With Greenlet Error on Auto-Created Spools** — When a Bambu Lab spool was detected via RFID for the first time (auto-creating a new inventory entry), the K-profile application step logged `WARNING greenlet_spawn has not been called; can't call await_only() here`. The `create_spool_from_tray()` function flushed the new spool to the database but didn't eagerly load the `k_profiles` relationship. When `auto_assign_spool()` then iterated `spool.k_profiles` to find a matching K-profile, SQLAlchemy attempted a lazy load — which requires a synchronous DB call that's illegal inside an async context. The K-profile step was silently skipped (caught by `except Exception`), so spool assignment still worked but without K-profile selection. Now eagerly sets `k_profiles = []` on newly created spools since they can never have K-profiles yet. +- **SpoolBuddy Link Tag Missing tag_type** — Linking an NFC tag to a spool via the SpoolBuddy dashboard's "Link to Spool" action only set `tag_uid` but left `tag_type` and `data_origin` empty, because it called the generic `updateSpool` API instead of the dedicated `linkTagToSpool` endpoint. The printer card's `LinkSpoolModal` already used `linkTagToSpool` correctly. Now uses `linkTagToSpool` with `tag_type: 'generic'` and `data_origin: 'nfc_link'`, which also handles conflict checks and archived tag recycling. +- **SpoolBuddy AMS Page Missing Fill Levels for Non-BL Spools** — AMS slots with non-Bambu Lab spools assigned to inventory didn't show fill level bars on the SpoolBuddy AMS page, even though the main printer card displayed them correctly. The SpoolBuddy AMS page only used the MQTT `remain` field (which is -1/unknown for non-BL spools), while the printer card had a fallback chain: Spoolman → inventory → AMS remain. Now fetches inventory spool assignments and computes fill levels from `(label_weight - weight_used) / label_weight`, falling back to AMS remain when no inventory assignment exists. +- **SpoolBuddy AMS Page Ext-R Slot Falsely Shown as Active When Idle** — On dual-nozzle printers (H2D), the Ext-R slot was incorrectly highlighted as active when the printer was idle. The ext-R tray has `id=255`, and the idle sentinel `tray_now=255` matched it via `trayNow === extTrayId`. The main printer card avoided this by clearing `effectiveTrayNow` to `undefined` when `tray_now=255`. Now guards against `tray_now=255` before any ext slot active check. +- **Printer Card Loses Info When Print Is Paused** ([#562](https://github.com/maziggy/bambuddy/issues/562)) — When a print was paused (via G-code pause command or user action), the printer card showed the print as finished — the progress bar, print name, ETA, layer count, and cover image all disappeared, replaced by the idle "Ready to Print" placeholder. The display conditions only checked for `state === 'RUNNING'` but not `'PAUSE'`, even though other parts of the same page (Skip Objects button, Stop/Resume controls) already handled both states correctly. Now shows print progress info for both `RUNNING` and `PAUSE` states, and the status label correctly reads "Paused" instead of the hardcoded "Printing" fallback. +- **SpoolBuddy "Assign to AMS" Slot Shows Empty Fields in Slicer** — After assigning a spool to an AMS slot via SpoolBuddy's "Assign to AMS" button, the slicer's slot overview showed the correct filament, but opening the slot detail card showed all fields empty/unselected. Two bugs: (1) the `assign_spool` backend called the cloud API with the raw `slicer_filament` value including its version suffix (e.g., `PFUS9ac902733670a9_07`), which returned a 404; the silent fallback sent the `setting_id` as `tray_info_idx` instead of the real `filament_id` (e.g., `PFUS9ac902733670a9` instead of `P4d64437`), and the slicer couldn't resolve the preset; (2) no `SlotPresetMapping` was saved, so Bambuddy's own ConfigureAmsSlotModal couldn't identify the active preset when reopened. Now strips version suffixes before the cloud lookup, resolves the real `filament_id` via the cloud API (with local preset and generic ID fallbacks), includes the brand name in `tray_sub_brands`, and saves the slot preset mapping from the frontend after assignment. +- **Virtual Printer Bind Server Fails With TLS-Enabled Slicers** ([#559](https://github.com/maziggy/bambuddy/issues/559)) — BambuStudio uses TLS on port 3002 for certain printer models (e.g. A1 Mini / N1), but the bind server only spoke plain TCP on both ports 3000 and 3002. The slicer's TLS ClientHello was rejected as an "invalid frame", preventing discovery and connection entirely. Port 3002 now uses TLS (using the VP's existing certificate), while port 3000 remains plain TCP for backwards compatibility. The proxy-mode bind proxy was also updated to use TLS termination on port 3002. +- **Queue Returns 500 When Cancelled Print Exists** ([#558](https://github.com/maziggy/bambuddy/issues/558)) — When a print was cancelled mid-print, the MQTT completion handler stored status `"aborted"` on the queue item, but the response schema only accepts `"pending"`, `"printing"`, `"completed"`, `"failed"`, `"skipped"`, or `"cancelled"`. Listing all queue items hit a Pydantic validation error on the invalid status, returning a 500 error. Filtering by a specific status (e.g. "pending") excluded the bad row and worked fine. Now normalises `"aborted"` to `"cancelled"` before storing. A startup fixup also converts any existing `"aborted"` rows. +- **Tests Send Real Maintenance Notifications** — Tests that call `on_print_complete(status="completed")` created background `asyncio` tasks (maintenance check, smart plug, notifications) that outlived the test's mock context. When the event loop processed these orphaned tasks, `async_session` was no longer patched and they queried the real production database — finding real printers with maintenance due and real notification providers, then sending real notifications. Tests now cancel spawned background tasks before the mock context exits. +- **Virtual Printer Config Changes Ignored Until Toggle Off/On** — Changing a virtual printer's mode (e.g. proxy → archive), model, access code, bind IP, remote interface IP, or target printer via the UI updated the database but the running VP instance was never restarted. `sync_from_db()` skipped any VP whose ID was already in the running instances dict without checking if config had changed. Now compares critical fields between the running instance and DB record and restarts the VP when a difference is detected. +- **Sidebar Navigation Ignores User Permissions** — All sidebar navigation items (Archives, Queue, Stats, Profiles, Maintenance, Projects, Inventory, Files) were visible to every user regardless of their role's permissions. Only the Settings item was permission-gated. Now each nav item is hidden when the user lacks the corresponding read permission (e.g., `archives:read`, `queue:read`, `library:read`). The Printers item remains always visible as the home page. Also added the missing `inventory:read|create|update|delete` permissions to the frontend Permission type (they existed in the backend but were absent from the frontend type definition). +- **Camera Button Clickable Without Permission & ffmpeg Process Leak** ([#550](https://github.com/maziggy/bambuddy/issues/550)) — Two camera issues in multi-user environments (e.g., classrooms with multiple printers). First, the camera button on the printer card was clickable even when the user's role lacked `camera:view` permission. Now disabled with a permission tooltip, matching the existing pattern for `printers:control` on the chamber light button. Second, ffmpeg processes (~240MB each) were never cleaned up after closing a camera stream. The `stop_camera_stream` endpoint called `terminate()` but never `wait()`ed or `kill()`ed, and HTTP disconnect detection in the streaming response only checked between frames — if the generator was blocked reading from ffmpeg stdout, disconnect was never detected (due to TCP send buffer masking the closed connection). Three fixes: (1) the stop endpoint now uses `terminate()` → `wait(2s)` → `kill()` → `wait()`; (2) each stream gets a background disconnect monitor task that polls `request.is_disconnected()` every 2 seconds independently of the frame loop, directly killing the ffmpeg process on disconnect; (3) a periodic cleanup (every 60s) scans `/proc` for any ffmpeg process with a Bambu RTSP URL (`rtsps://bblp:`) that isn't in an active stream and `SIGKILL`s it — catching orphans that survive app restarts or generator abandonment. +- **Windows Install Fails With "Syntax of the Command Is Incorrect"** ([#544](https://github.com/maziggy/bambuddy/issues/544)) — The `start_bambuddy.bat` Python hash verification used a multi-line `for /f "usebackq"` with a backtick-delimited command split across lines. Windows CMD cannot parse line breaks inside backtick-delimited `for /f` commands, causing "The syntax of the command is incorrect" immediately after downloading Python. The entire block was also redundant — it downloaded a separate checksum file from python.org and re-verified the hash, but `verify_sha256` had already checked the archive against the pinned hash on the previous line. Removed the duplicate verification block. Also had a secondary bug: always downloaded the `amd64` checksum even on `arm64` systems. +- **Queue Badge Shows on Incompatible Printers** ([#486](https://github.com/maziggy/bambuddy/issues/486)) — The purple queue counter badge in the printer card header showed on all printers of the same model when a job was scheduled for "any [model]", even if the printer didn't have the matching filament color loaded. The `PrinterQueueWidget` (which shows "Clear Plate & Start") already filtered by filament type and color, but the badge count used the raw unfiltered queue length. Now applies the same filament compatibility filter to the badge count. +- **SpoolBuddy Daemon Can't Find Hardware Drivers** — The daemon's `nfc_reader.py` and `scale_reader.py` import `read_tag` and `scale_diag` as bare modules, but these files live in `spoolbuddy/scripts/` which isn't on Python's module search path. The systemd service sets `WorkingDirectory` to `spoolbuddy/` and runs `python -m daemon.main`, so only the `spoolbuddy/` and `daemon/` directories are on `sys.path`. Added `scripts/` to `sys.path` at daemon startup, resolved relative to the module file so it works regardless of install path. Also moved the `read_tag` import inside `NFCReader.__init__`'s try/except block — it was previously outside, so a missing module crashed the entire daemon instead of gracefully skipping NFC polling. Demoted hardware-not-available log messages from ERROR to INFO since missing modules are expected when hardware isn't connected. +- **SpoolBuddy Scale Tare & Calibration Not Applied** — The SpoolBuddy scale tare and calibrate buttons on the Settings page queued commands but never executed them. Five bugs in the chain: (1) the daemon received the `tare` command via heartbeat but never called `scale.tare()` — a comment said "need cross-task communication" but the ScaleReader was already available in the shared dict; (2) no API endpoint existed for the daemon to report the new tare offset back to the backend database, so tare results were lost; (3) when calibration values changed in heartbeat responses, the daemon updated its config object but never called `scale.update_calibration()`, so the ScaleReader kept using its initial values forever; (4) the heartbeat response that delivered the tare command still contained pre-tare calibration values, which immediately overwrote the new tare offset back to zero; (5) the `set-factor` endpoint computed `calibration_factor` using the DB `tare_offset`, which could be stale or zero if the tare hadn't persisted yet — producing a wildly wrong factor (e.g., 5000g displayed with empty scale). Added a `POST /devices/{device_id}/calibration/set-tare` endpoint and `update_tare()` API client method. The heartbeat loop now executes `scale.tare()` when the tare command is received, persists the result via the new endpoint, propagates calibration changes to the ScaleReader instance, and skips calibration sync on the heartbeat cycle that delivers a tare command. The calibration flow now captures the raw ADC at tare time and sends it alongside the loaded-weight ADC in step 2, so the factor is computed from the actual tare reference rather than the DB value — making calibration self-contained and independent of the tare persistence round-trip. The calibration weight input uses a compact touch-friendly numpad since the RPi kiosk has no physical keyboard. +- **A1 Mini Shows "Unknown" Status After MQTT Payload Decode Failure** ([#549](https://github.com/maziggy/bambuddy/issues/549)) — Some printer firmware versions (observed on A1 Mini 01.07.02.00) occasionally send MQTT payloads containing non-UTF-8 bytes. The `_on_message` handler called `msg.payload.decode()` (strict UTF-8), and the resulting `UnicodeDecodeError` was not caught — only `json.JSONDecodeError` was handled. The entire message was silently dropped, causing printer status to show "unknown", temperatures to read 0°C, and AMS data to disappear. Now catches `UnicodeDecodeError` and falls back to `decode(errors="replace")`, which substitutes invalid bytes with U+FFFD while keeping the JSON structure intact. Logs a warning for diagnostics. +- **H2C Dual Nozzle Variant (O1C2) Not Recognized** ([#489](https://github.com/maziggy/bambuddy/issues/489)) — The H2C dual nozzle variant reports model code `O1C2` via MQTT, but only `O1C` was in the recognized model maps. This caused the camera to use the wrong protocol (chamber image on port 6000 instead of RTSP on port 322) — the printer immediately closed the connection, producing a reconnect loop. Also affected model display names, chamber temperature support detection, linear rail classification, and virtual printer model mapping. Added `O1C2` to all model ID maps across backend and frontend. +- **Support Package Leaks Full Subnet IPs and Misdetects Docker Network Mode** — Three support package fixes. First, the network section included full subnet addresses (e.g., `192.168.192.0/24`); now masks the first two octets (`x.x.192.0/24`). Second, `network_mode_hint` used `len(interfaces) > 2` which always reported "bridge" on single-NIC hosts even with `network_mode: host`, because `get_network_interfaces()` excludes Docker infrastructure interfaces. Now checks for the presence of Docker interfaces (`docker0`, `br-*`, `veth*`) via `socket.if_nameindex()` — these are only visible when the container shares the host network namespace. Third, `developer_mode` was still null for most users because the MQTT `fun` field was only parsed inside the `print` key; some firmware versions send it at the top level of the payload. Now also checks top-level `fun`. Also added a `virtual_printers` section with mode, model, enabled/running status, and pending file count for each configured virtual printer. +- **SpoolBuddy Scale Calibration Lost After Reboot** — The SpoolBuddy daemon generated its device ID from the MAC address of whichever network interface `Path.iterdir()` returned first, but filesystem iteration order is non-deterministic. On different boots, the daemon could pick `eth0` (MAC ending `3100`) or `wlan0` (MAC ending `3102`), producing a different `device_id` each time. Since calibration values (`tare_offset`, `calibration_factor`) are stored per device ID in the backend database, a new ID meant registering as a brand-new uncalibrated device. Fixed by sorting network interfaces alphabetically before selection, ensuring the same interface (and thus the same device ID) is always chosen. +- **SpoolBuddy NFC Reader Fails to Detect Tags** — The PN5180 NFC reader had two polling issues. First, each `activate_type_a()` call that returned `None` (no tag) corrupted the PN5180 transceive state — subsequent calls silently failed even when a tag was physically present, making it impossible to detect tags placed after startup (only tags already on the reader during init were detected). Fixed by performing a full hardware reset (RST pin toggle + RF re-init, ~240ms) before every idle poll, giving a ~1.8 Hz effective poll rate. Second, after a successful SELECT the card stayed in ACTIVE state and ignored subsequent WUPA/REQA, causing false "tag removed" events after ~1 second. Fixed with a light RF off/on cycle (13ms) before each poll when a tag is present, resetting the card to IDLE for re-selection. Also added error-based auto-recovery (full hardware reset after 10 consecutive poll exceptions), periodic status logging every 60 seconds, and accurate heartbeat reporting of NFC/scale health. + +### Improved +- **SpoolBuddy AMS Page Single-Slot Card Layout** — AMS-HT and external spool cards on the SpoolBuddy AMS page now use a responsive grid (2 cards per AMS card width) instead of auto-sized flex items, so they align with the regular AMS card columns above. Regular AMS cards no longer stretch vertically to fill available space on printers with fewer AMS units. +- **SpoolBuddy Scale Value Stabilization** — The SpoolBuddy daemon now suppresses redundant scale weight reports: only sends updates when the weight changes by ≥2g. Previously every 1-second report interval sent a reading regardless of change, and stability state flips (stable ↔ unstable) also triggered reports — when ADC noise kept the spread hovering around the 2g stability threshold, the flag toggled every cycle, forcing a report with a slightly different weight each time. Removed stability flipping as a report trigger (the stable flag is still included in each report for consumers). Also increased the NAU7802 moving average window from 5 to 20 samples (500ms → 2s) to smooth ADC noise. The frontend also applies a 3g display threshold as defense-in-depth. +- **SpoolBuddy TopBar: Online Printer Selection** — The printer selector in the SpoolBuddy top bar now only shows online printers and auto-selects the first online printer. If the currently selected printer goes offline, it automatically switches to the next available online printer. Also replaced the placeholder icon with the SpoolBuddy logo. Renamed the connection status label from "Online" to "Backend" for clarity. +- **SpoolBuddy Assign to AMS Redesign** — The "Assign to AMS" sub-modal (opened from the spool card) is now a full-screen overlay that reuses the `AmsUnitCard` component from the AMS page. Regular AMS units display in a 2-column grid with the same spool visualization, fill bars, and material labels. AMS-HT and external slots (Ext / Ext-L / Ext-R on dual-nozzle printers) appear in a compact horizontal row below. Clicking any slot auto-configures the filament via a single `assignSpool` API call — the backend handles both the DB assignment and MQTT configuration. The printer selector was removed from the modal since the top bar already provides printer selection. Dual-nozzle printers show L/R nozzle badges on each AMS unit. +- **Filament ID Conversion Utility** — Extracted filament_id ↔ setting_id conversion logic into a shared utility (`backend/app/utils/filament_ids.py`). The `assign_spool` endpoint now normalizes `slicer_filament` (which can be stored in either filament_id format like "GFL05" or setting_id format like "GFSL05_07") into the correct `tray_info_idx` and `setting_id` for the MQTT command. Previously `setting_id` was always sent as empty string, which could cause BambuStudio to not resolve the filament preset for the AMS slot. +- **Updates Card Separates Firmware and Software Settings** — The Updates card on the Settings page mixed printer firmware and Bambuddy software update toggles with no visual grouping. Now splits the card into two labeled sections ("Printer Firmware" and "Bambuddy Software") separated by a divider, making it clear which toggles control what. +- **SpoolBuddy Test Coverage** — Added integration tests for all 12 SpoolBuddy API endpoints (21 backend tests covering device registration/re-registration, heartbeat status and pending commands, NFC tag scan/match/removal, scale reading broadcast, spool weight calculation, and scale calibration including tare, set-factor, and zero-delta error handling) and component tests for the three main SpoolBuddy frontend components (20 frontend tests covering WeightDisplay weight formatting and status indicators, SpoolInfoCard spool info rendering and action callbacks, UnknownTagCard tag display, and TagDetectedModal open/close/escape behavior with known and unknown spool views). +- **Cleanup Obsolete Settings** — The startup migration now deletes orphaned settings keys from the database that are no longer used by the application (e.g., `slicer_binary_path` from earlier slicer integration research). +- **Added HUF Currency** ([#579](https://github.com/maziggy/bambuddy/issues/579)) — Added Hungarian Forint (HUF, Ft) to the supported currencies list for filament cost tracking. +- **FTP Upload Progress & Speed** — Reduced FTP upload chunk size from 1MB to 64KB for smoother progress reporting — at typical printer FTP speeds (~50-100KB/s) the progress bar now updates roughly every second instead of appearing stuck for 20+ seconds between jumps. Removed the post-upload `voidresp()` wait for all printer models (previously only skipped for A1); H2D printers delay the FTP 226 acknowledgment by 30+ seconds after data transfer completes, causing a long hang at 100%. The data is already on the SD card once the transfer finishes. Also added transfer speed logging (KB/s) and PASV+TLS handshake timing to help diagnose slow connections. +- **Wider Print & Schedule Modals** — Increased the Print and Schedule Print modal width from 512px to 672px to better accommodate long filament profile names (e.g., "PLA Support for PETG PETG Basic @Bambu Lab H2D 0.4 nozzle"). + ## [0.2.1.1] - 2026-02-28 @@ -13,7 +74,6 @@ All notable changes to Bambuddy will be documented in this file. - **Queue Badge Shows on Incompatible Printers** ([#486](https://github.com/maziggy/bambuddy/issues/486)) — The purple queue counter badge in the printer card header showed on all printers of the same model when a job was scheduled for "any [model]", even if the printer didn't have the matching filament color loaded. The `PrinterQueueWidget` (which shows "Clear Plate & Start") already filtered by filament type and color, but the badge count used the raw unfiltered queue length. Now applies the same filament compatibility filter to the badge count. - **A1 Mini Shows "Unknown" Status After MQTT Payload Decode Failure** ([#549](https://github.com/maziggy/bambuddy/issues/549)) — Some printer firmware versions (observed on A1 Mini 01.07.02.00) occasionally send MQTT payloads containing non-UTF-8 bytes. The `_on_message` handler called `msg.payload.decode()` (strict UTF-8), and the resulting `UnicodeDecodeError` was not caught — only `json.JSONDecodeError` was handled. The entire message was silently dropped, causing printer status to show "unknown", temperatures to read 0°C, and AMS data to disappear. Now catches `UnicodeDecodeError` and falls back to `decode(errors="replace")`, which substitutes invalid bytes with U+FFFD while keeping the JSON structure intact. Logs a warning for diagnostics. - ## [0.2.1] - 2026-02-27 ### Fixed diff --git a/README.md b/README.md index 333eba3de..50fdeec33 100644 --- a/README.md +++ b/README.md @@ -191,6 +191,7 @@ Perfect for remote print farms, traveling makers, or accessing your home printer - Debug logging toggle with live indicator - Live application log viewer with filtering - Support bundle generator with comprehensive diagnostics (privacy-filtered) +- **In-app bug reporting** — Submit bug reports directly from the UI with optional screenshot (upload, paste, or drag & drop), automatic diagnostic log collection (30s debug capture with printer push), and system info. Reports create GitHub issues via a secure relay. Privacy-first: all logs are sanitized and sensitive data (IPs, serials, credentials) is never included. ### 🔒 Optional Authentication - Enable/disable authentication any time diff --git a/backend/app/api/routes/archives.py b/backend/app/api/routes/archives.py index c783fbf41..9ae28b326 100644 --- a/backend/app/api/routes/archives.py +++ b/backend/app/api/routes/archives.py @@ -2,6 +2,7 @@ import json import logging import zipfile +from datetime import date, datetime, time, timezone from decimal import ROUND_HALF_UP, Decimal from pathlib import Path @@ -21,7 +22,7 @@ from backend.app.models.filament import Filament from backend.app.models.spool_usage_history import SpoolUsageHistory from backend.app.models.user import User -from backend.app.schemas.archive import ArchiveResponse, ArchiveStats, ArchiveUpdate, ReprintRequest +from backend.app.schemas.archive import ArchiveResponse, ArchiveSlim, ArchiveStats, ArchiveUpdate, ReprintRequest from backend.app.services.archive import ArchiveService from backend.app.utils.threemf_tools import extract_nozzle_mapping_from_3mf @@ -122,6 +123,8 @@ def archive_to_response( async def list_archives( printer_id: int | None = None, project_id: int | None = None, + date_from: date | None = Query(None), + date_to: date | None = Query(None), limit: int = 50, offset: int = 0, db: AsyncSession = Depends(get_db), @@ -132,6 +135,8 @@ async def list_archives( archives = await service.list_archives( printer_id=printer_id, project_id=project_id, + date_from=date_from, + date_to=date_to, limit=limit, offset=offset, ) @@ -149,6 +154,78 @@ async def list_archives( return result +@router.get("/slim", response_model=list[ArchiveSlim]) +async def list_archives_slim( + date_from: date | None = Query(None), + date_to: date | None = Query(None), + limit: int = Query(default=10000, le=50000), + offset: int = 0, + db: AsyncSession = Depends(get_db), + _: User | None = RequirePermissionIfAuthEnabled(Permission.ARCHIVES_READ), +): + """Lightweight archive listing for stats/dashboard widgets. + + Returns only the fields needed for client-side aggregation, + skipping duplicate detection, file paths, and extra_data. + """ + filters = [] + if date_from: + dt_from = datetime.combine(date_from, time.min, tzinfo=timezone.utc) + filters.append(PrintArchive.created_at >= dt_from) + if date_to: + dt_to = datetime.combine(date_to, time.max, tzinfo=timezone.utc) + filters.append(PrintArchive.created_at <= dt_to) + + query = ( + select( + PrintArchive.printer_id, + PrintArchive.print_name, + PrintArchive.print_time_seconds, + PrintArchive.started_at, + PrintArchive.completed_at, + PrintArchive.filament_used_grams, + PrintArchive.filament_type, + PrintArchive.filament_color, + PrintArchive.status, + PrintArchive.cost, + PrintArchive.quantity, + PrintArchive.created_at, + ) + .where(*filters) + .order_by(PrintArchive.created_at.desc()) + .limit(limit) + .offset(offset) + ) + result = await db.execute(query) + rows = result.all() + + return [ + { + "printer_id": r.printer_id, + "print_name": r.print_name, + "print_time_seconds": r.print_time_seconds, + "actual_time_seconds": ( + int((r.completed_at - r.started_at).total_seconds()) + if r.started_at + and r.completed_at + and r.status == "completed" + and (r.completed_at - r.started_at).total_seconds() > 0 + else None + ), + "filament_used_grams": r.filament_used_grams, + "filament_type": r.filament_type, + "filament_color": r.filament_color, + "status": r.status, + "started_at": r.started_at, + "completed_at": r.completed_at, + "cost": r.cost, + "quantity": r.quantity, + "created_at": r.created_at, + } + for r in rows + ] + + @router.get("/search", response_model=list[ArchiveResponse]) async def search_archives( q: str = Query(..., min_length=2, description="Search query"), @@ -277,7 +354,9 @@ async def rebuild_search_index( @router.get("/analysis/failures") async def analyze_failures( - days: int = 30, + days: int | None = None, + date_from: date | None = Query(None), + date_to: date | None = Query(None), printer_id: int | None = None, project_id: int | None = None, db: AsyncSession = Depends(get_db), @@ -297,6 +376,8 @@ async def analyze_failures( service = FailureAnalysisService(db) return await service.analyze_failures( days=days, + date_from=date_from, + date_to=date_to, printer_id=printer_id, project_id=project_id, ) @@ -440,25 +521,42 @@ async def export_stats( @router.get("/stats", response_model=ArchiveStats) async def get_archive_stats( + date_from: date | None = Query(None, description="Start date (inclusive), YYYY-MM-DD"), + date_to: date | None = Query(None, description="End date (inclusive), YYYY-MM-DD"), db: AsyncSession = Depends(get_db), _: User | None = RequirePermissionIfAuthEnabled(Permission.STATS_READ), ): """Get statistics across all archives.""" + # Build date filter conditions + base_conditions = [] + if date_from: + dt_from = datetime.combine(date_from, time.min, tzinfo=timezone.utc) + base_conditions.append(PrintArchive.created_at >= dt_from) + if date_to: + dt_to = datetime.combine(date_to, time.max, tzinfo=timezone.utc) + base_conditions.append(PrintArchive.created_at <= dt_to) + # Total counts - total_result = await db.execute(select(func.count(PrintArchive.id))) + total_result = await db.execute(select(func.count(PrintArchive.id)).where(*base_conditions)) total_prints = total_result.scalar() or 0 - successful_result = await db.execute(select(func.count(PrintArchive.id)).where(PrintArchive.status == "completed")) + successful_result = await db.execute( + select(func.count(PrintArchive.id)).where(PrintArchive.status == "completed", *base_conditions) + ) successful_prints = successful_result.scalar() or 0 - failed_result = await db.execute(select(func.count(PrintArchive.id)).where(PrintArchive.status == "failed")) + failed_result = await db.execute( + select(func.count(PrintArchive.id)).where(PrintArchive.status == "failed", *base_conditions) + ) failed_prints = failed_result.scalar() or 0 # Totals - use actual print time from timestamps (not slicer estimates) # For archives with both started_at and completed_at, calculate actual duration # Fall back to print_time_seconds only for archives without timestamps archives_for_time = await db.execute( - select(PrintArchive.started_at, PrintArchive.completed_at, PrintArchive.print_time_seconds) + select(PrintArchive.started_at, PrintArchive.completed_at, PrintArchive.print_time_seconds).where( + *base_conditions + ) ) total_seconds = 0 for started_at, completed_at, print_time_seconds in archives_for_time.all(): @@ -473,15 +571,17 @@ async def get_archive_stats( total_time = total_seconds / 3600 # Convert to hours # Sum filament directly - filament_used_grams already contains the total for the print job - filament_result = await db.execute(select(func.coalesce(func.sum(PrintArchive.filament_used_grams), 0))) + filament_result = await db.execute( + select(func.coalesce(func.sum(PrintArchive.filament_used_grams), 0)).where(*base_conditions) + ) total_filament = filament_result.scalar() or 0 - cost_result = await db.execute(select(func.sum(PrintArchive.cost))) + cost_result = await db.execute(select(func.sum(PrintArchive.cost)).where(*base_conditions)) total_cost = cost_result.scalar() or 0 # By filament type (split comma-separated values for multi-material prints) filament_type_result = await db.execute( - select(PrintArchive.filament_type).where(PrintArchive.filament_type.isnot(None)) + select(PrintArchive.filament_type).where(PrintArchive.filament_type.isnot(None), *base_conditions) ) prints_by_filament: dict[str, int] = {} for (filament_types,) in filament_type_result.all(): @@ -493,7 +593,9 @@ async def get_archive_stats( # By printer printer_result = await db.execute( - select(PrintArchive.printer_id, func.count(PrintArchive.id)).group_by(PrintArchive.printer_id) + select(PrintArchive.printer_id, func.count(PrintArchive.id)) + .where(*base_conditions) + .group_by(PrintArchive.printer_id) ) prints_by_printer = {str(k): v for k, v in printer_result.all()} @@ -501,7 +603,7 @@ async def get_archive_stats( # Get all completed archives with both estimated and actual times accuracy_result = await db.execute( select(PrintArchive) - .where(PrintArchive.status == "completed") + .where(PrintArchive.status == "completed", *base_conditions) .where(PrintArchive.print_time_seconds.isnot(None)) .where(PrintArchive.started_at.isnot(None)) .where(PrintArchive.completed_at.isnot(None)) @@ -575,10 +677,10 @@ async def get_archive_stats( total_energy_cost = round(total_energy_kwh * energy_cost_per_kwh, 3) else: # Print mode: sum up per-print energy from archives - energy_kwh_result = await db.execute(select(func.sum(PrintArchive.energy_kwh))) + energy_kwh_result = await db.execute(select(func.sum(PrintArchive.energy_kwh)).where(*base_conditions)) total_energy_kwh = energy_kwh_result.scalar() or 0 - energy_cost_result = await db.execute(select(func.sum(PrintArchive.energy_cost))) + energy_cost_result = await db.execute(select(func.sum(PrintArchive.energy_cost)).where(*base_conditions)) total_energy_cost = energy_cost_result.scalar() or 0 return ArchiveStats( diff --git a/backend/app/api/routes/auth.py b/backend/app/api/routes/auth.py index e97a85141..5ef012478 100644 --- a/backend/app/api/routes/auth.py +++ b/backend/app/api/routes/auth.py @@ -1,6 +1,8 @@ from datetime import timedelta +from typing import Annotated -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends, Header, HTTPException, status +from fastapi.security import HTTPAuthorizationCredentials from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import selectinload @@ -8,8 +10,11 @@ from backend.app.api.routes.settings import get_external_login_url from backend.app.core.auth import ( ACCESS_TOKEN_EXPIRE_MINUTES, + ALGORITHM, + SECRET_KEY, Permission, RequirePermissionIfAuthEnabled, + _validate_api_key, authenticate_user, authenticate_user_by_email, create_access_token, @@ -17,8 +22,10 @@ get_password_hash, get_user_by_email, get_user_by_username, + security, ) from backend.app.core.database import get_db +from backend.app.core.permissions import ALL_PERMISSIONS from backend.app.models.group import Group from backend.app.models.settings import Settings from backend.app.models.user import User @@ -61,6 +68,21 @@ def _user_to_response(user: User) -> UserResponse: ) +def _api_key_to_user_response(api_key) -> UserResponse: + """Create a synthetic admin UserResponse for a valid API key.""" + return UserResponse( + id=0, + username=f"api-key:{api_key.key_prefix}", + email=None, + role="admin", + is_active=True, + is_admin=True, + groups=[], + permissions=sorted(ALL_PERMISSIONS), + created_at=api_key.created_at.isoformat(), + ) + + router = APIRouter(prefix="/auth", tags=["authentication"]) @@ -308,14 +330,74 @@ async def login(request: LoginRequest, db: AsyncSession = Depends(get_db)): @router.get("/me", response_model=UserResponse) async def get_current_user_info( - current_user: User = Depends(get_current_active_user), + credentials: Annotated[HTTPAuthorizationCredentials | None, Depends(security)] = None, + x_api_key: Annotated[str | None, Header(alias="X-API-Key")] = None, db: AsyncSession = Depends(get_db), ): - """Get current user information.""" - # Reload user with groups for proper permission calculation - result = await db.execute(select(User).where(User.id == current_user.id).options(selectinload(User.groups))) - user = result.scalar_one() - return _user_to_response(user) + """Get current user information. + + Accepts JWT tokens (via Authorization: Bearer header) and API keys + (via X-API-Key header or Authorization: Bearer bb_xxx). + API keys return a synthetic admin user with all permissions. + """ + import jwt + from jwt.exceptions import PyJWTError as JWTError + + # Check for API key via X-API-Key header + if x_api_key: + api_key = await _validate_api_key(db, x_api_key) + if api_key: + return _api_key_to_user_response(api_key) + + # Check for Bearer token (could be JWT or API key) + if credentials is not None: + token = credentials.credentials + # Check if it's an API key (starts with bb_) + if token.startswith("bb_"): + api_key = await _validate_api_key(db, token) + if api_key: + return _api_key_to_user_response(api_key) + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid API key", + headers={"WWW-Authenticate": "Bearer"}, + ) + + # Otherwise treat as JWT + try: + payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) + username: str = payload.get("sub") + if username is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Could not validate credentials", + headers={"WWW-Authenticate": "Bearer"}, + ) + except JWTError: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Could not validate credentials", + headers={"WWW-Authenticate": "Bearer"}, + ) + + user = await get_user_by_username(db, username) + if user is None or not user.is_active: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Could not validate credentials", + headers={"WWW-Authenticate": "Bearer"}, + ) + # Reload with groups for proper permission calculation + result = await db.execute(select(User).where(User.id == user.id).options(selectinload(User.groups))) + user = result.scalar_one() + return _user_to_response(user) + + # No credentials provided + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Authentication required", + headers={"WWW-Authenticate": "Bearer"}, + ) @router.post("/logout") diff --git a/backend/app/api/routes/bug_report.py b/backend/app/api/routes/bug_report.py new file mode 100644 index 000000000..5274d3cee --- /dev/null +++ b/backend/app/api/routes/bug_report.py @@ -0,0 +1,95 @@ +"""Bug report endpoint for submitting user bug reports to GitHub.""" + +import asyncio +import logging + +from fastapi import APIRouter +from pydantic import BaseModel + +from backend.app.api.routes.support import ( + _apply_log_level, + _collect_support_info, + _get_debug_setting, + _get_recent_sanitized_logs, + _set_debug_setting, +) +from backend.app.core.database import async_session +from backend.app.services.bug_report import submit_report +from backend.app.services.printer_manager import printer_manager + +router = APIRouter(prefix="/bug-report", tags=["bug-report"]) +logger = logging.getLogger(__name__) + +LOG_COLLECTION_SECONDS = 30 + + +class BugReportRequest(BaseModel): + description: str + email: str | None = None + screenshot_base64: str | None = None + include_support_info: bool = True + + +class BugReportResponse(BaseModel): + success: bool + message: str + issue_url: str | None = None + issue_number: int | None = None + + +async def _collect_debug_logs() -> str: + """Enable debug logging, push all printers, wait, then collect logs.""" + # Check if debug was already enabled + async with async_session() as db: + was_debug, _ = await _get_debug_setting(db) + + # Enable debug logging + if not was_debug: + async with async_session() as db: + await _set_debug_setting(db, True) + _apply_log_level(True) + logger.info("Bug report: temporarily enabled debug logging") + + # Send push_all to all connected printers + for printer_id in list(printer_manager._clients.keys()): + try: + printer_manager.request_status_update(printer_id) + except Exception: + logger.debug("Failed to push_all for printer %s", printer_id) + + # Wait for logs to accumulate + await asyncio.sleep(LOG_COLLECTION_SECONDS) + + # Collect logs + logs = await _get_recent_sanitized_logs() + + # Restore previous log level if it wasn't debug before + if not was_debug: + async with async_session() as db: + await _set_debug_setting(db, False) + _apply_log_level(False) + logger.info("Bug report: restored normal logging") + + return logs + + +@router.post("/submit", response_model=BugReportResponse) +async def submit_bug_report(report: BugReportRequest): + """Submit a bug report. No auth required — anyone should be able to report bugs.""" + support_info = None + if report.include_support_info: + try: + support_info = await _collect_support_info() + logs = await _collect_debug_logs() + if logs: + support_info["recent_logs"] = logs + except Exception: + logger.exception("Failed to collect support info for bug report") + + result = await submit_report( + description=report.description, + reporter_email=report.email, + screenshot_base64=report.screenshot_base64, + support_info=support_info, + ) + return BugReportResponse(**result) diff --git a/backend/app/api/routes/cloud.py b/backend/app/api/routes/cloud.py index 439dafba0..adf6053d3 100644 --- a/backend/app/api/routes/cloud.py +++ b/backend/app/api/routes/cloud.py @@ -38,6 +38,7 @@ BambuCloudError, get_cloud_service, ) +from backend.app.utils.filament_ids import filament_id_to_setting_id logger = logging.getLogger(__name__) @@ -503,32 +504,8 @@ async def _enrich_from_local_presets( return result -def _filament_id_to_setting_id(filament_id: str) -> str: - """ - Convert filament_id to setting_id format for Bambu Cloud API. - - Printers report filament_id (e.g., GFA00, GFG02) but the API expects - setting_id format which has an "S" inserted after "GF" (e.g., GFSA00, GFSG02). - - User presets (starting with "P") and already-correct IDs are returned unchanged. - """ - if not filament_id: - return filament_id - - # User presets start with "P" - leave unchanged - if filament_id.startswith("P"): - return filament_id - - # Official Bambu presets: GFx## -> GFSx## - # Check if it matches the filament_id pattern (GF followed by letter and digits) - if filament_id.startswith("GF") and len(filament_id) >= 4: - # Check if it's already a setting_id (has S after GF) - if filament_id[2] == "S": - return filament_id - # Insert "S" after "GF": GFA00 -> GFSA00 - return f"GFS{filament_id[2:]}" - - return filament_id +# _filament_id_to_setting_id is now imported from backend.app.utils.filament_ids +_filament_id_to_setting_id = filament_id_to_setting_id @router.post("/filament-info") diff --git a/backend/app/api/routes/inventory.py b/backend/app/api/routes/inventory.py index 4c89ed378..b5cc7d815 100644 --- a/backend/app/api/routes/inventory.py +++ b/backend/app/api/routes/inventory.py @@ -30,6 +30,7 @@ SpoolUpdate, ) from backend.app.schemas.spool_usage import SpoolUsageHistoryResponse +from backend.app.utils.filament_ids import normalize_slicer_filament logger = logging.getLogger(__name__) @@ -731,18 +732,15 @@ async def assign_spool( if client: # Build filament setting from spool data tray_type = spool.material - tray_sub_brands = f"{spool.material} {spool.subtype}" if spool.subtype else spool.material + tray_sub_brands = ( + f"{spool.brand} {spool.material} {spool.subtype}".strip() + if spool.brand + else f"{spool.material} {spool.subtype}" + if spool.subtype + else spool.material + ) tray_color = spool.rgba or "FFFFFFFF" - tray_info_idx = spool.slicer_filament or "" - setting_id = "" - # Resolve tray_info_idx for the MQTT command. - # Priority: - # 1. Use the spool's own slicer_filament if set (including - # cloud-synced custom presets like PFUS* / P*). - # 2. Reuse the slot's existing tray_info_idx if it's a specific - # (non-generic) preset for the same material. - # 3. Fall back to a generic Bambu filament ID. _GENERIC_IDS = { "PLA": "GFL99", "PETG": "GFG99", @@ -761,26 +759,108 @@ async def assign_spool( } _GENERIC_ID_VALUES = set(_GENERIC_IDS.values()) - if tray_info_idx: - logger.info("Spool assign: using spool's slicer_filament=%r", tray_info_idx) - elif ( - current_tray_info_idx - and current_tray_info_idx not in _GENERIC_ID_VALUES - and fingerprint_type - and fingerprint_type.upper() == tray_type.upper() - ): - logger.info( - "Spool assign: reusing slot's existing tray_info_idx=%r (same material %r)", - current_tray_info_idx, - tray_type, - ) - tray_info_idx = current_tray_info_idx - elif tray_type: - material = tray_type.upper().strip() - generic = _GENERIC_IDS.get(material) or _GENERIC_IDS.get(material.split("-")[0].split(" ")[0]) or "" - if generic: - logger.info("Spool assign: falling back to generic %r for material %r", generic, tray_type) - tray_info_idx = generic + # Resolve tray_info_idx + setting_id for the MQTT command. + # Three sources in priority order: + # 1. Cloud profile (if cloud connected) — resolve filament_id + # from setting_id via cloud API + # 2. Local profile — use generic filament ID for material + # 3. Hard-coded fallback — generic Bambu filament IDs + tray_info_idx = "" + setting_id = "" + sf = spool.slicer_filament or "" + + if sf: + # Check if it's a cloud preset (GFS*, PFUS*, or GF* official) + base_sf = sf.split("_")[0] if "_" in sf else sf + if base_sf.startswith("GFS") or base_sf.startswith("PFUS"): + # Cloud setting_id — need to resolve real filament_id + # Use base_sf (version suffix stripped) for cloud API + MQTT + setting_id = base_sf + try: + from backend.app.services.bambu_cloud import get_cloud_service + + cloud = get_cloud_service() + if cloud.is_authenticated: + detail = await cloud.get_setting_detail(base_sf) + if detail.get("filament_id"): + tray_info_idx = detail["filament_id"] + logger.info( + "Spool assign: resolved filament_id=%r from cloud for setting_id=%r", + tray_info_idx, + sf, + ) + # Use cloud preset name for tray_sub_brands if available + cloud_name = detail.get("name", "") + if cloud_name: + tray_sub_brands = cloud_name.replace(r"@.*$", "").split("@")[0].strip() + elif detail.get("base_id"): + # Derive from base_id (e.g. "GFSL05" → "GFL05") + bid = detail["base_id"].split("_")[0] + if bid.startswith("GFS") and len(bid) >= 5: + tray_info_idx = f"GF{bid[3:]}" + else: + tray_info_idx = bid + logger.info( + "Spool assign: derived filament_id=%r from base_id=%r", + tray_info_idx, + detail["base_id"], + ) + except Exception as e: + logger.warning("Spool assign: cloud lookup failed for %r: %s", sf, e) + + if not tray_info_idx: + # Cloud lookup failed — use normalize as fallback + tray_info_idx, setting_id = normalize_slicer_filament(sf) + elif base_sf.startswith("GF"): + # Official Bambu filament_id (e.g. "GFL05") + tray_info_idx, setting_id = normalize_slicer_filament(sf) + logger.info("Spool assign: using official filament_id=%r", tray_info_idx) + else: + # Could be a local preset ID or material type — try local DB + try: + local_id = int(sf) + from backend.app.models.local_preset import LocalPreset as LP + + lp_result = await db.execute(select(LP).where(LP.id == local_id, LP.preset_type == "filament")) + lp = lp_result.scalar_one_or_none() + if lp: + mat = (spool.material or lp.filament_type or "").upper().strip() + tray_info_idx = ( + _GENERIC_IDS.get(mat) or _GENERIC_IDS.get(mat.split("-")[0].split(" ")[0]) or "" + ) + # Use local preset name for tray_sub_brands + if lp.name: + tray_sub_brands = lp.name.split("@")[0].strip() + logger.info( + "Spool assign: local preset %d, material=%r, tray_info_idx=%r", + local_id, + mat, + tray_info_idx, + ) + except (ValueError, TypeError): + # Not a numeric ID — treat as material type string + tray_info_idx, setting_id = normalize_slicer_filament(sf) + + if not tray_info_idx: + # Fallback: reuse slot's existing tray_info_idx or generic ID + if ( + current_tray_info_idx + and current_tray_info_idx not in _GENERIC_ID_VALUES + and fingerprint_type + and fingerprint_type.upper() == tray_type.upper() + ): + logger.info( + "Spool assign: reusing slot's existing tray_info_idx=%r (same material %r)", + current_tray_info_idx, + tray_type, + ) + tray_info_idx = current_tray_info_idx + elif tray_type: + material = tray_type.upper().strip() + generic = _GENERIC_IDS.get(material) or _GENERIC_IDS.get(material.split("-")[0].split(" ")[0]) or "" + if generic: + logger.info("Spool assign: falling back to generic %r for material %r", generic, tray_type) + tray_info_idx = generic # Temperature: use spool overrides if set, else material defaults temp_min, temp_max = MATERIAL_TEMPS.get(spool.material.upper(), (200, 240)) diff --git a/backend/app/api/routes/printers.py b/backend/app/api/routes/printers.py index 725e59fbf..412cc3a3a 100644 --- a/backend/app/api/routes/printers.py +++ b/backend/app/api/routes/printers.py @@ -561,6 +561,7 @@ async def get_printer_status( timelapse=state.timelapse, ipcam=state.ipcam, wifi_signal=state.wifi_signal, + wired_network=state.wired_network, nozzles=nozzles, nozzle_rack=nozzle_rack, print_options=print_options, diff --git a/backend/app/api/routes/settings.py b/backend/app/api/routes/settings.py index fa32038d2..51b337abf 100644 --- a/backend/app/api/routes/settings.py +++ b/backend/app/api/routes/settings.py @@ -105,6 +105,7 @@ async def get_settings( "ams_temp_good", "ams_temp_fair", "library_disk_warning_gb", + "low_stock_threshold", ]: settings_dict[setting.key] = float(setting.value) elif setting.key in [ diff --git a/backend/app/api/routes/spoolbuddy.py b/backend/app/api/routes/spoolbuddy.py index b3f6c25ed..617e10d6a 100644 --- a/backend/app/api/routes/spoolbuddy.py +++ b/backend/app/api/routes/spoolbuddy.py @@ -17,13 +17,17 @@ CalibrationResponse, DeviceRegisterRequest, DeviceResponse, + DisplaySettingsRequest, HeartbeatRequest, HeartbeatResponse, ScaleReadingRequest, SetCalibrationFactorRequest, + SetTareRequest, TagRemovedRequest, TagScannedRequest, UpdateSpoolWeightRequest, + WriteTagRequest, + WriteTagResultRequest, ) from backend.app.services.spool_tag_matcher import get_spool_by_tag @@ -53,6 +57,12 @@ def _device_to_response(device: SpoolBuddyDevice) -> DeviceResponse: has_scale=device.has_scale, tare_offset=device.tare_offset, calibration_factor=device.calibration_factor, + nfc_reader_type=device.nfc_reader_type, + nfc_connection=device.nfc_connection, + display_brightness=device.display_brightness, + display_blank_timeout=device.display_blank_timeout, + has_backlight=device.has_backlight, + last_calibrated_at=device.last_calibrated_at, last_seen=device.last_seen, pending_command=device.pending_command, nfc_ok=device.nfc_ok, @@ -84,6 +94,9 @@ async def register_device( device.firmware_version = req.firmware_version device.has_nfc = req.has_nfc device.has_scale = req.has_scale + device.nfc_reader_type = req.nfc_reader_type + device.nfc_connection = req.nfc_connection + device.has_backlight = req.has_backlight device.last_seen = now logger.info("SpoolBuddy device re-registered: %s (%s)", req.device_id, req.hostname) else: @@ -96,6 +109,9 @@ async def register_device( has_scale=req.has_scale, tare_offset=req.tare_offset, calibration_factor=req.calibration_factor, + nfc_reader_type=req.nfc_reader_type, + nfc_connection=req.nfc_connection, + has_backlight=req.has_backlight, last_seen=now, ) db.add(device) @@ -150,10 +166,25 @@ async def device_heartbeat( device.firmware_version = req.firmware_version if req.ip_address: device.ip_address = req.ip_address + if req.nfc_reader_type: + device.nfc_reader_type = req.nfc_reader_type + if req.nfc_connection: + device.nfc_connection = req.nfc_connection # Return and clear pending command pending = device.pending_command - device.pending_command = None + pending_write = None + if pending == "write_tag" and device.pending_write_payload: + # Parse the stored JSON payload to include in response + import json + + try: + pending_write = json.loads(device.pending_write_payload) + except (json.JSONDecodeError, TypeError): + pending_write = None + # Don't clear write_tag command — it gets cleared by write-result + else: + device.pending_command = None await db.commit() @@ -168,8 +199,11 @@ async def device_heartbeat( return HeartbeatResponse( pending_command=pending, + pending_write_payload=pending_write, tare_offset=device.tare_offset, calibration_factor=device.calibration_factor, + display_brightness=device.display_brightness, + display_blank_timeout=device.display_blank_timeout, ) @@ -236,6 +270,121 @@ async def nfc_tag_removed( return {"status": "ok"} +@router.post("/nfc/write-tag") +async def nfc_write_tag( + req: WriteTagRequest, + db: AsyncSession = Depends(get_db), + _: User | None = RequirePermissionIfAuthEnabled(Permission.INVENTORY_UPDATE), +): + """Queue an NFC tag write command for a SpoolBuddy device.""" + import json + + from backend.app.models.spool import Spool + from backend.app.services.opentag3d import encode_opentag3d + + # Find the spool + result = await db.execute(select(Spool).where(Spool.id == req.spool_id)) + spool = result.scalar_one_or_none() + if not spool: + raise HTTPException(status_code=404, detail="Spool not found") + + # Find the device + result = await db.execute(select(SpoolBuddyDevice).where(SpoolBuddyDevice.device_id == req.device_id)) + device = result.scalar_one_or_none() + if not device: + raise HTTPException(status_code=404, detail="Device not registered") + + # Encode OpenTag3D NDEF data + ndef_data = encode_opentag3d(spool) + + # Store write payload and set pending command + device.pending_write_payload = json.dumps( + { + "spool_id": spool.id, + "ndef_data_hex": ndef_data.hex(), + } + ) + device.pending_command = "write_tag" + await db.commit() + + logger.info("Write tag queued for device %s, spool %d (%d bytes)", req.device_id, spool.id, len(ndef_data)) + return {"status": "queued"} + + +@router.post("/nfc/write-result") +async def nfc_write_result( + req: WriteTagResultRequest, + db: AsyncSession = Depends(get_db), + _: User | None = RequirePermissionIfAuthEnabled(Permission.INVENTORY_UPDATE), +): + """Handle NFC tag write result from SpoolBuddy daemon.""" + # Find the device and clear pending state + result = await db.execute(select(SpoolBuddyDevice).where(SpoolBuddyDevice.device_id == req.device_id)) + device = result.scalar_one_or_none() + if not device: + raise HTTPException(status_code=404, detail="Device not registered") + + device.pending_command = None + device.pending_write_payload = None + + if req.success: + # Link the tag to the spool + from backend.app.models.spool import Spool + + result = await db.execute(select(Spool).where(Spool.id == req.spool_id)) + spool = result.scalar_one_or_none() + if spool: + spool.tag_uid = req.tag_uid.upper() + spool.tag_type = "ntag" + spool.data_origin = "opentag3d" + spool.encode_time = datetime.now(timezone.utc) + logger.info("Tag written and linked: spool %d -> tag %s", spool.id, req.tag_uid) + + await db.commit() + await ws_manager.broadcast( + { + "type": "spoolbuddy_tag_written", + "device_id": req.device_id, + "spool_id": req.spool_id, + "tag_uid": req.tag_uid, + } + ) + else: + await db.commit() + await ws_manager.broadcast( + { + "type": "spoolbuddy_tag_write_failed", + "device_id": req.device_id, + "spool_id": req.spool_id, + "message": req.message, + } + ) + logger.warning("Tag write failed for device %s: %s", req.device_id, req.message) + + return {"status": "ok"} + + +@router.post("/devices/{device_id}/cancel-write") +async def cancel_write( + device_id: str, + db: AsyncSession = Depends(get_db), + _: User | None = RequirePermissionIfAuthEnabled(Permission.INVENTORY_UPDATE), +): + """Cancel a pending write-tag command.""" + result = await db.execute(select(SpoolBuddyDevice).where(SpoolBuddyDevice.device_id == device_id)) + device = result.scalar_one_or_none() + if not device: + raise HTTPException(status_code=404, detail="Device not registered") + + if device.pending_command == "write_tag": + device.pending_command = None + device.pending_write_payload = None + await db.commit() + logger.info("Write tag cancelled for device %s", device_id) + + return {"status": "ok"} + + # --- Scale endpoints --- @@ -274,6 +423,8 @@ async def update_spool_weight( # net weight = total on scale minus empty spool core net_filament = max(0, req.weight_grams - spool.core_weight) spool.weight_used = max(0, spool.label_weight - net_filament) + spool.last_scale_weight = req.weight_grams + spool.last_weighed_at = datetime.now(timezone.utc) await db.commit() logger.info( @@ -305,6 +456,30 @@ async def tare_scale( return {"status": "ok", "message": "Tare command queued"} +@router.post("/devices/{device_id}/calibration/set-tare") +async def set_tare_offset( + device_id: str, + req: SetTareRequest, + db: AsyncSession = Depends(get_db), + _: User | None = RequirePermissionIfAuthEnabled(Permission.INVENTORY_UPDATE), +): + """Store tare offset reported by the daemon after executing a tare.""" + result = await db.execute(select(SpoolBuddyDevice).where(SpoolBuddyDevice.device_id == device_id)) + device = result.scalar_one_or_none() + if not device: + raise HTTPException(status_code=404, detail="Device not registered") + + device.tare_offset = req.tare_offset + device.last_calibrated_at = datetime.now(timezone.utc) + await db.commit() + + logger.info("SpoolBuddy %s tare offset set to %d", device_id, req.tare_offset) + return CalibrationResponse( + tare_offset=device.tare_offset, + calibration_factor=device.calibration_factor, + ) + + @router.post("/devices/{device_id}/calibration/set-factor") async def set_calibration_factor( device_id: str, @@ -318,11 +493,15 @@ async def set_calibration_factor( if not device: raise HTTPException(status_code=404, detail="Device not registered") - raw_delta = req.raw_adc - device.tare_offset + tare = req.tare_raw_adc if req.tare_raw_adc is not None else device.tare_offset + raw_delta = req.raw_adc - tare if raw_delta == 0: raise HTTPException(status_code=400, detail="Raw ADC value equals tare offset — place weight on scale") device.calibration_factor = req.known_weight_grams / raw_delta + if req.tare_raw_adc is not None: + device.tare_offset = tare + device.last_calibrated_at = datetime.now(timezone.utc) await db.commit() logger.info( @@ -331,7 +510,7 @@ async def set_calibration_factor( device.calibration_factor, req.known_weight_grams, req.raw_adc, - device.tare_offset, + tare, ) return CalibrationResponse( tare_offset=device.tare_offset, @@ -357,6 +536,106 @@ async def get_calibration( ) +# --- Display settings --- + + +@router.put("/devices/{device_id}/display") +async def update_display_settings( + device_id: str, + req: DisplaySettingsRequest, + db: AsyncSession = Depends(get_db), + _: User | None = RequirePermissionIfAuthEnabled(Permission.INVENTORY_UPDATE), +): + """Update display brightness and screen blank timeout for a device.""" + result = await db.execute(select(SpoolBuddyDevice).where(SpoolBuddyDevice.device_id == device_id)) + device = result.scalar_one_or_none() + if not device: + raise HTTPException(status_code=404, detail="Device not registered") + + device.display_brightness = req.brightness + device.display_blank_timeout = req.blank_timeout + await db.commit() + + logger.info( + "SpoolBuddy %s display updated: brightness=%d%%, blank_timeout=%ds", + device_id, + req.brightness, + req.blank_timeout, + ) + return {"status": "ok", "brightness": req.brightness, "blank_timeout": req.blank_timeout} + + +# --- Update check --- + + +@router.get("/devices/{device_id}/update-check") +async def check_daemon_update( + device_id: str, + include_beta: bool = False, + db: AsyncSession = Depends(get_db), + _: User | None = RequirePermissionIfAuthEnabled(Permission.INVENTORY_READ), +): + """Check if a newer daemon version is available on GitHub.""" + import httpx + + from backend.app.api.routes.updates import is_newer_version, parse_version + from backend.app.core.config import GITHUB_REPO + + result = await db.execute(select(SpoolBuddyDevice).where(SpoolBuddyDevice.device_id == device_id)) + device = result.scalar_one_or_none() + if not device: + raise HTTPException(status_code=404, detail="Device not registered") + + current = device.firmware_version or "0.0.0" + + try: + async with httpx.AsyncClient() as client: + response = await client.get( + f"https://api.github.com/repos/{GITHUB_REPO}/releases?per_page=20", + headers={"Accept": "application/vnd.github.v3+json"}, + timeout=10.0, + ) + response.raise_for_status() + releases = response.json() + + release_data = None + for release in releases: + tag = release.get("tag_name", "") + if include_beta: + release_data = release + break + else: + parsed = parse_version(tag) + if parsed[4] == 0: # is_prerelease == 0 + release_data = release + break + + if not release_data: + return { + "current_version": current, + "latest_version": None, + "update_available": False, + "release_url": None, + } + + latest = release_data.get("tag_name", "").lstrip("v") + return { + "current_version": current, + "latest_version": latest, + "update_available": is_newer_version(latest, current), + "release_url": release_data.get("html_url"), + } + except Exception as e: + logger.warning("Failed to check for daemon updates: %s", e) + return { + "current_version": current, + "latest_version": None, + "update_available": False, + "release_url": None, + "error": str(e), + } + + # --- Background watchdog --- diff --git a/backend/app/api/routes/support.py b/backend/app/api/routes/support.py index 07a323879..cd863ea75 100644 --- a/backend/app/api/routes/support.py +++ b/backend/app/api/routes/support.py @@ -325,6 +325,37 @@ def _sanitize_path(path: str) -> str: return path +def _detect_docker_network_mode() -> str: + """Detect Docker network mode by checking for host-level interfaces. + + In host mode the container shares the host network namespace, so Docker + infrastructure interfaces (docker0, br-*, veth*) are visible. In bridge + mode the container is isolated and only sees its own veth (named eth0). + """ + try: + import socket + + for _idx, name in socket.if_nameindex(): + if name.startswith(("docker", "br-", "veth", "virbr")): + return "host" + except Exception: + pass + return "bridge" + + +def _mask_subnet(subnet: str) -> str: + """Mask the first two octets of a subnet string. e.g. '192.168.1.0/24' -> 'x.x.1.0/24'.""" + try: + parts = subnet.split(".") + if len(parts) >= 4: + parts[0] = "x" + parts[1] = "x" + return ".".join(parts) + except Exception: + pass + return subnet + + def _anonymize_mqtt_broker(broker: str) -> str: """Anonymize MQTT broker address. IPs become [IP], hostnames become *.domain.""" if not broker: @@ -418,11 +449,10 @@ async def _collect_support_info() -> dict: if in_docker: try: mem_limit = _get_container_memory_limit() - interfaces = get_network_interfaces() info["docker"] = { "container_memory_limit_bytes": mem_limit, "container_memory_limit_formatted": _format_bytes(mem_limit) if mem_limit else None, - "network_mode_hint": "host" if len(interfaces) > 2 else "bridge", + "network_mode_hint": _detect_docker_network_mode(), } except Exception: logger.debug("Failed to collect Docker info", exc_info=True) @@ -500,6 +530,34 @@ async def _collect_support_info() -> dict: } ) + # Virtual printers + try: + from backend.app.models.virtual_printer import VirtualPrinter + from backend.app.services.virtual_printer import VIRTUAL_PRINTER_MODELS, virtual_printer_manager + + result = await db.execute(select(VirtualPrinter).order_by(VirtualPrinter.id)) + vps = result.scalars().all() + info["virtual_printers"] = [] + for vp in vps: + instance = virtual_printer_manager.get_instance(vp.id) + status = instance.get_status() if instance else None + model_code = vp.model or "C12" + info["virtual_printers"].append( + { + "index": vp.id, + "enabled": vp.enabled, + "mode": vp.mode, + "model": model_code, + "model_name": VIRTUAL_PRINTER_MODELS.get(model_code, model_code), + "has_target_printer": vp.target_printer_id is not None, + "has_bind_ip": bool(vp.bind_ip), + "running": status.get("running", False) if status else False, + "pending_files": status.get("pending_files", 0) if status else 0, + } + ) + except Exception: + logger.debug("Failed to collect virtual printer info", exc_info=True) + # Non-sensitive settings result = await db.execute(select(Settings)) all_settings = result.scalars().all() @@ -642,12 +700,12 @@ async def _collect_support_info() -> dict: except Exception: logger.debug("Failed to collect log file info", exc_info=True) - # Network interfaces (subnets only — already anonymized) + # Network interfaces (subnets with first two octets masked) try: interfaces = get_network_interfaces() info["network"] = { "interface_count": len(interfaces), - "interfaces": [{"name": iface["name"], "subnet": iface["subnet"]} for iface in interfaces], + "interfaces": [{"name": iface["name"], "subnet": _mask_subnet(iface["subnet"])} for iface in interfaces], } except Exception: logger.debug("Failed to collect network info", exc_info=True) @@ -721,6 +779,45 @@ def _get_log_content(max_bytes: int = 10 * 1024 * 1024, sensitive_strings: dict[ return content.encode("utf-8") +async def _get_recent_sanitized_logs(max_lines: int = 200) -> str: + """Get recent log lines, sanitized for inclusion in bug reports.""" + # Collect sensitive strings from DB for redaction + sensitive_strings: dict[str, str] = {} + async with async_session() as db: + result = await db.execute(select(Printer.name, Printer.serial_number, Printer.ip_address)) + for name, serial, ip_address in result.all(): + if name: + sensitive_strings[name] = "[PRINTER]" + if serial: + sensitive_strings[serial] = "[SERIAL]" + if ip_address: + sensitive_strings[ip_address] = "[IP]" + + result = await db.execute(select(User.username)) + for (username,) in result.all(): + if username: + sensitive_strings[username] = "[USER]" + + result = await db.execute(select(Settings.value).where(Settings.key == "bambu_cloud_email")) + cloud_email = result.scalar_one_or_none() + if cloud_email: + sensitive_strings[cloud_email] = "[EMAIL]" + + log_file = settings.log_dir / "bambuddy.log" + if not log_file.exists(): + return "" + + # Read last portion of log file + try: + content = log_file.read_text(encoding="utf-8", errors="replace") + lines = content.splitlines() + recent = "\n".join(lines[-max_lines:]) + return _sanitize_log_content(recent, sensitive_strings) + except Exception: + logger.debug("Failed to read logs for bug report", exc_info=True) + return "" + + @router.get("/bundle") async def generate_support_bundle( _: User | None = RequirePermissionIfAuthEnabled(Permission.SETTINGS_READ), diff --git a/backend/app/core/config.py b/backend/app/core/config.py index 65e1275eb..bec010eaa 100644 --- a/backend/app/core/config.py +++ b/backend/app/core/config.py @@ -5,8 +5,9 @@ from pydantic_settings import BaseSettings # Application version - single source of truth -APP_VERSION = "0.2.1.1" +APP_VERSION = "0.2.2b1" GITHUB_REPO = "maziggy/bambuddy" +BUG_REPORT_RELAY_URL = os.environ.get("BUG_REPORT_RELAY_URL", "https://bambuddy.cool/api/bug-report") # App directory - where the application is installed (for static files) _app_dir = Path(__file__).resolve().parent.parent.parent.parent diff --git a/backend/app/core/database.py b/backend/app/core/database.py index 8b32a0ee7..08ce35d0a 100644 --- a/backend/app/core/database.py +++ b/backend/app/core/database.py @@ -76,6 +76,7 @@ async def init_db(): ams_history, api_key, archive, + bug_report, color_catalog, external_link, filament, @@ -1230,6 +1231,16 @@ async def run_migrations(conn): except OperationalError: pass # Already applied + # Migration: Add SpoolBuddy scale weight tracking columns to spool table + try: + await conn.execute(text("ALTER TABLE spool ADD COLUMN last_scale_weight INTEGER")) + except OperationalError: + pass # Already applied + try: + await conn.execute(text("ALTER TABLE spool ADD COLUMN last_weighed_at DATETIME")) + except OperationalError: + pass # Already applied + # Migration: Add cost tracking fields to spool table try: await conn.execute(text("ALTER TABLE spool ADD COLUMN cost_per_kg REAL")) @@ -1318,6 +1329,43 @@ async def run_migrations(conn): except OperationalError: pass # Already applied + # Migration: Add NFC reader and display control columns to spoolbuddy_devices + try: + await conn.execute(text("ALTER TABLE spoolbuddy_devices ADD COLUMN nfc_reader_type VARCHAR(20)")) + except OperationalError: + pass # Already applied + try: + await conn.execute(text("ALTER TABLE spoolbuddy_devices ADD COLUMN nfc_connection VARCHAR(20)")) + except OperationalError: + pass # Already applied + try: + await conn.execute(text("ALTER TABLE spoolbuddy_devices ADD COLUMN display_brightness INTEGER DEFAULT 100")) + except OperationalError: + pass # Already applied + try: + await conn.execute(text("ALTER TABLE spoolbuddy_devices ADD COLUMN display_blank_timeout INTEGER DEFAULT 0")) + except OperationalError: + pass # Already applied + try: + await conn.execute(text("ALTER TABLE spoolbuddy_devices ADD COLUMN has_backlight BOOLEAN DEFAULT 0")) + except OperationalError: + pass # Already applied + try: + await conn.execute(text("ALTER TABLE spoolbuddy_devices ADD COLUMN last_calibrated_at DATETIME")) + except OperationalError: + pass # Already applied + + # Migration: Add NFC tag write payload column to spoolbuddy_devices + try: + await conn.execute(text("ALTER TABLE spoolbuddy_devices ADD COLUMN pending_write_payload TEXT")) + except OperationalError: + pass # Already applied + + # Cleanup: Remove obsolete settings keys that are no longer used + obsolete_keys = ["slicer_binary_path"] + for key in obsolete_keys: + await conn.execute(text("DELETE FROM settings WHERE key = :key"), {"key": key}) + async def seed_notification_templates(): """Seed default notification templates if they don't exist.""" diff --git a/backend/app/main.py b/backend/app/main.py index 60d82156c..7c4e851a1 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -16,6 +16,7 @@ archives, auth, background_dispatch as background_dispatch_routes, + bug_report, camera, cloud, discovery, @@ -2208,6 +2209,10 @@ def log_timing(section: str): queue_item = printing_items[0] if printing_items else None if queue_item: queue_status = data.get("status", "completed") + # MQTT sends "aborted" for cancelled prints; normalise to + # "cancelled" so it matches the queue schema Literal. + if queue_status == "aborted": + queue_status = "cancelled" queue_item.status = queue_status queue_item.completed_at = datetime.now(timezone.utc) await db.commit() @@ -3256,6 +3261,22 @@ async def lifespan(app: FastAPI): # Startup await init_db() + # Fix queue items stuck with invalid "aborted" status (should be "cancelled"). + # This can happen when a print was cancelled mid-print on versions before this fix. + try: + async with async_session() as db: + from backend.app.models.print_queue import PrintQueueItem + + result = await db.execute(select(PrintQueueItem).where(PrintQueueItem.status == "aborted")) + aborted_items = result.scalars().all() + if aborted_items: + for item in aborted_items: + item.status = "cancelled" + await db.commit() + logging.info("Fixed %d queue item(s) with invalid 'aborted' status → 'cancelled'", len(aborted_items)) + except Exception as e: + logging.warning("Failed to fix aborted queue items: %s", e) + # Restore debug logging state from previous session await init_debug_logging() @@ -3563,6 +3584,7 @@ async def auth_middleware(request, call_next): # API routes app.include_router(auth.router, prefix=app_settings.api_prefix) +app.include_router(bug_report.router, prefix=app_settings.api_prefix) app.include_router(users.router, prefix=app_settings.api_prefix) app.include_router(groups.router, prefix=app_settings.api_prefix) app.include_router(printers.router, prefix=app_settings.api_prefix) diff --git a/backend/app/models/bug_report.py b/backend/app/models/bug_report.py new file mode 100644 index 000000000..350a831aa --- /dev/null +++ b/backend/app/models/bug_report.py @@ -0,0 +1,20 @@ +from datetime import datetime + +from sqlalchemy import Boolean, DateTime, Integer, String, Text, func +from sqlalchemy.orm import Mapped, mapped_column + +from backend.app.core.database import Base + + +class BugReport(Base): + __tablename__ = "bug_reports" + + id: Mapped[int] = mapped_column(primary_key=True) + description: Mapped[str] = mapped_column(Text) + reporter_email: Mapped[str | None] = mapped_column(String(255), nullable=True) + github_issue_number: Mapped[int | None] = mapped_column(Integer, nullable=True) + github_issue_url: Mapped[str | None] = mapped_column(String(500), nullable=True) + status: Mapped[str] = mapped_column(String(20), default="submitted") + error_message: Mapped[str | None] = mapped_column(Text, nullable=True) + email_sent: Mapped[bool] = mapped_column(Boolean, default=False) + created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.now()) diff --git a/backend/app/models/spool.py b/backend/app/models/spool.py index 9748f45ca..91c79df45 100644 --- a/backend/app/models/spool.py +++ b/backend/app/models/spool.py @@ -24,6 +24,8 @@ class Spool(Base): ) # Reference to spool_catalog entry for core weight weight_used: Mapped[float] = mapped_column(Float, default=0) # Consumed grams weight_locked: Mapped[bool] = mapped_column(Boolean, default=False) # Lock weight from AMS auto-sync + last_scale_weight: Mapped[int | None] = mapped_column(Integer) # Last gross weight from scale (g) + last_weighed_at: Mapped[datetime | None] = mapped_column(DateTime) # When last weighed slicer_filament: Mapped[str | None] = mapped_column(String(50)) # Preset ID (e.g. "GFL99") slicer_filament_name: Mapped[str | None] = mapped_column(String(100)) # Preset name for slicer nozzle_temp_min: Mapped[int | None] = mapped_column() # Override min temp diff --git a/backend/app/models/spoolbuddy_device.py b/backend/app/models/spoolbuddy_device.py index a74c1b105..a71629cfd 100644 --- a/backend/app/models/spoolbuddy_device.py +++ b/backend/app/models/spoolbuddy_device.py @@ -1,6 +1,6 @@ from datetime import datetime -from sqlalchemy import Boolean, DateTime, Float, Integer, String, func +from sqlalchemy import Boolean, DateTime, Float, Integer, String, Text, func from sqlalchemy.orm import Mapped, mapped_column from backend.app.core.database import Base @@ -20,8 +20,15 @@ class SpoolBuddyDevice(Base): has_scale: Mapped[bool] = mapped_column(Boolean, default=True) tare_offset: Mapped[int] = mapped_column(Integer, default=0) calibration_factor: Mapped[float] = mapped_column(Float, default=1.0) + nfc_reader_type: Mapped[str | None] = mapped_column(String(20)) + nfc_connection: Mapped[str | None] = mapped_column(String(20)) + display_brightness: Mapped[int] = mapped_column(Integer, default=100) + display_blank_timeout: Mapped[int] = mapped_column(Integer, default=0) + has_backlight: Mapped[bool] = mapped_column(Boolean, default=False) + last_calibrated_at: Mapped[datetime | None] = mapped_column(DateTime) last_seen: Mapped[datetime | None] = mapped_column(DateTime) pending_command: Mapped[str | None] = mapped_column(String(50)) + pending_write_payload: Mapped[str | None] = mapped_column(Text, nullable=True) nfc_ok: Mapped[bool] = mapped_column(Boolean, default=False) scale_ok: Mapped[bool] = mapped_column(Boolean, default=False) uptime_s: Mapped[int] = mapped_column(Integer, default=0) diff --git a/backend/app/schemas/archive.py b/backend/app/schemas/archive.py index 4ba8da812..6dae2ae67 100644 --- a/backend/app/schemas/archive.py +++ b/backend/app/schemas/archive.py @@ -110,6 +110,27 @@ class Config: from_attributes = True +class ArchiveSlim(BaseModel): + """Lightweight archive response for stats/dashboard widgets.""" + + printer_id: int | None + print_name: str | None + print_time_seconds: int | None + actual_time_seconds: int | None = None + filament_used_grams: float | None + filament_type: str | None + filament_color: str | None + status: str + started_at: datetime | None + completed_at: datetime | None + cost: float | None + quantity: int = 1 + created_at: datetime + + class Config: + from_attributes = True + + class ArchiveStats(BaseModel): total_prints: int successful_prints: int diff --git a/backend/app/schemas/printer.py b/backend/app/schemas/printer.py index e66c73269..fa00278b5 100644 --- a/backend/app/schemas/printer.py +++ b/backend/app/schemas/printer.py @@ -205,6 +205,7 @@ class PrinterStatus(BaseModel): timelapse: bool = False # Timelapse recording active ipcam: bool = False # Live view enabled wifi_signal: int | None = None # WiFi signal strength in dBm + wired_network: bool = False # Ethernet connection detected nozzles: list[NozzleInfoResponse] = [] # Nozzle hardware info (index 0=left/primary, 1=right) nozzle_rack: list[NozzleRackSlot] = [] # H2C 6-nozzle tool-changer rack print_options: PrintOptionsResponse | None = None # AI detection and print options diff --git a/backend/app/schemas/settings.py b/backend/app/schemas/settings.py index b6d3fd97a..12ba45229 100644 --- a/backend/app/schemas/settings.py +++ b/backend/app/schemas/settings.py @@ -38,6 +38,7 @@ class AppSettings(BaseModel): include_beta_updates: bool = Field(default=False, description="Include beta/prerelease versions in update checks") # Language + language: str = Field(default="en", description="UI language (en, de, fr, ja, it, pt-BR)") notification_language: str = Field(default="en", description="Language for push notifications (en, de)") # Bed cooled notification threshold @@ -148,6 +149,14 @@ class AppSettings(BaseModel): default="", description="Bearer token for Prometheus metrics authentication (optional)" ) + # Inventory low stock threshold + low_stock_threshold: float = Field( + default=20.0, + ge=0.1, + le=99.9, + description="Low stock threshold percentage (%) for inventory filtering and display", + ) + class AppSettingsUpdate(BaseModel): """Schema for updating settings (all fields optional).""" @@ -167,6 +176,7 @@ class AppSettingsUpdate(BaseModel): check_updates: bool | None = None check_printer_firmware: bool | None = None include_beta_updates: bool | None = None + language: str | None = None notification_language: str | None = None bed_cooled_threshold: float | None = None ams_humidity_good: int | None = None @@ -208,3 +218,4 @@ class AppSettingsUpdate(BaseModel): preferred_slicer: str | None = None prometheus_enabled: bool | None = None prometheus_token: str | None = None + low_stock_threshold: float | None = Field(default=None, ge=0.1, le=99.9) diff --git a/backend/app/schemas/spool.py b/backend/app/schemas/spool.py index 829d24382..0a2394b8f 100644 --- a/backend/app/schemas/spool.py +++ b/backend/app/schemas/spool.py @@ -24,6 +24,8 @@ class SpoolBase(BaseModel): tag_type: str | None = None cost_per_kg: float | None = Field(default=None, ge=0) weight_locked: bool = False + last_scale_weight: int | None = None + last_weighed_at: datetime | None = None class SpoolCreate(SpoolBase): diff --git a/backend/app/schemas/spoolbuddy.py b/backend/app/schemas/spoolbuddy.py index 51292e11e..6a1aafbb3 100644 --- a/backend/app/schemas/spoolbuddy.py +++ b/backend/app/schemas/spoolbuddy.py @@ -14,6 +14,9 @@ class DeviceRegisterRequest(BaseModel): has_scale: bool = True tare_offset: int = 0 calibration_factor: float = 1.0 + nfc_reader_type: str | None = None + nfc_connection: str | None = None + has_backlight: bool = False class DeviceResponse(BaseModel): @@ -26,6 +29,12 @@ class DeviceResponse(BaseModel): has_scale: bool tare_offset: int calibration_factor: float + nfc_reader_type: str | None = None + nfc_connection: str | None = None + display_brightness: int = 100 + display_blank_timeout: int = 0 + has_backlight: bool = False + last_calibrated_at: datetime | None = None last_seen: datetime | None = None pending_command: str | None = None nfc_ok: bool @@ -45,12 +54,17 @@ class HeartbeatRequest(BaseModel): uptime_s: int = 0 firmware_version: str | None = None ip_address: str | None = None + nfc_reader_type: str | None = None + nfc_connection: str | None = None class HeartbeatResponse(BaseModel): pending_command: str | None = None + pending_write_payload: dict | None = None tare_offset: int calibration_factor: float + display_brightness: int = 100 + display_blank_timeout: int = 0 # --- NFC schemas --- @@ -92,11 +106,37 @@ class TareRequest(BaseModel): pass +class SetTareRequest(BaseModel): + tare_offset: int + + class SetCalibrationFactorRequest(BaseModel): known_weight_grams: float = Field(..., gt=0) raw_adc: int + tare_raw_adc: int | None = None class CalibrationResponse(BaseModel): tare_offset: int calibration_factor: float + + +# --- Display schemas --- + + +class WriteTagRequest(BaseModel): + device_id: str + spool_id: int + + +class WriteTagResultRequest(BaseModel): + device_id: str + spool_id: int + tag_uid: str + success: bool + message: str | None = None + + +class DisplaySettingsRequest(BaseModel): + brightness: int = Field(ge=0, le=100) + blank_timeout: int = Field(ge=0) diff --git a/backend/app/services/archive.py b/backend/app/services/archive.py index 859b45844..8c4430fcb 100644 --- a/backend/app/services/archive.py +++ b/backend/app/services/archive.py @@ -4,7 +4,7 @@ import re import shutil import zipfile -from datetime import datetime, timezone +from datetime import date, datetime, time, timezone from pathlib import Path from defusedxml import ElementTree as ET @@ -996,6 +996,8 @@ async def list_archives( self, printer_id: int | None = None, project_id: int | None = None, + date_from: date | None = None, + date_to: date | None = None, limit: int = 50, offset: int = 0, ) -> list[PrintArchive]: @@ -1014,6 +1016,14 @@ async def list_archives( if project_id: query = query.where(PrintArchive.project_id == project_id) + if date_from: + dt_from = datetime.combine(date_from, time.min, tzinfo=timezone.utc) + query = query.where(PrintArchive.created_at >= dt_from) + + if date_to: + dt_to = datetime.combine(date_to, time.max, tzinfo=timezone.utc) + query = query.where(PrintArchive.created_at <= dt_to) + query = query.limit(limit).offset(offset) result = await self.db.execute(query) return list(result.scalars().all()) diff --git a/backend/app/services/background_dispatch.py b/backend/app/services/background_dispatch.py index 9d758708c..03a5e47f8 100644 --- a/backend/app/services/background_dispatch.py +++ b/backend/app/services/background_dispatch.py @@ -715,6 +715,7 @@ async def _run_print_library_file(self, job: PrintDispatchJob): archive = await archive_service.archive_print( printer_id=job.printer_id, source_file=file_path, + original_filename=lib_file.filename, ) if not archive: raise RuntimeError("Failed to create archive") diff --git a/backend/app/services/bambu_ftp.py b/backend/app/services/bambu_ftp.py index 56a829723..ebf965965 100644 --- a/backend/app/services/bambu_ftp.py +++ b/backend/app/services/bambu_ftp.py @@ -4,6 +4,7 @@ import os import socket import ssl +import time from collections.abc import Awaitable, Callable from ftplib import FTP, FTP_TLS # nosec B402 from io import BytesIO @@ -81,11 +82,10 @@ class BambuFTPClient: # Models that may need SSL mode fallback (try prot_p first, fall back to prot_c) # These models have varying FTP SSL behavior depending on firmware version A1_MODELS = ("A1", "A1 Mini") - # Chunk size for manual upload transfer (1MB) - # Larger chunks reduce overhead and work better with A1 printers - CHUNK_SIZE = 1024 * 1024 - # Per-chunk data socket timeout during upload. - UPLOAD_CHUNK_TIMEOUT = 120 + # Chunk size for manual upload transfer (64KB) + # Smaller chunks provide smoother progress reporting — at typical printer FTP + # speeds (~50-100KB/s) this gives a progress update roughly every second. + CHUNK_SIZE = 64 * 1024 # Cache for working FTP modes per printer IP # Maps IP -> "prot_p" or "prot_c" @@ -368,11 +368,16 @@ def upload_file( # A1 printers have issues with storbinary's voidresp() hanging after transfer with open(local_path, "rb") as f: logger.debug("FTP STOR command starting for %s", remote_path) + t0 = time.monotonic() conn = self._ftp.transfercmd(f"STOR {remote_path}") + logger.info( + "FTP data channel ready in %.1fs (PASV + TLS handshake)", + time.monotonic() - t0, + ) # Set explicit socket options for reliable transfer conn.setblocking(True) - conn.settimeout(self.UPLOAD_CHUNK_TIMEOUT) + conn.settimeout(self.timeout) try: while True: @@ -408,14 +413,11 @@ def upload_file( except OSError: pass - # Skip voidresp() for A1 models — they hang after transfercmd uploads - if self.printer_model not in self.A1_MODELS: - try: - self._ftp.voidresp() - except (OSError, ftplib.Error) as e: - # Data transfer already completed — voidresp() failure is just a noisy - # 226 acknowledgment issue, not an actual upload failure. Log and continue. - logger.warning("FTP upload response for %s was not clean (data already sent): %s", remote_path, e) + # Skip voidresp() for all models — the data transfer is already complete. + # A1 models hang indefinitely on voidresp(). H2D printers (vsFTPd) delay + # the 226 response by 30+ seconds after data is fully sent. Even X1C/P1S + # gain nothing from waiting — the file is on the SD card once sendall() returns. + # Verified via direct curl upload: 226 arrives ~32s after data channel closes. if callback_exception is not None: cleanup_ok = False @@ -432,7 +434,15 @@ def upload_file( f"Upload cancelled but failed to remove partial file {remote_path} from printer" ) from callback_exception - logger.info("FTP upload complete: %s", remote_path) + elapsed = time.monotonic() - t0 + speed_kbs = (file_size / 1024) / elapsed if elapsed > 0 else 0 + logger.info( + "FTP upload complete: %s (%s bytes in %.1fs, %.0f KB/s)", + remote_path, + file_size, + elapsed, + speed_kbs, + ) return True except ftplib.error_perm as e: # Permanent FTP error (4xx/5xx response) @@ -462,7 +472,7 @@ def upload_bytes(self, data: bytes, remote_path: str) -> bool: # Use manual transfer instead of storbinary() for A1 compatibility conn = self._ftp.transfercmd(f"STOR {remote_path}") conn.setblocking(True) - conn.settimeout(self.UPLOAD_CHUNK_TIMEOUT) + conn.settimeout(self.timeout) try: # Send data in chunks diff --git a/backend/app/services/bambu_mqtt.py b/backend/app/services/bambu_mqtt.py index b92590202..ca7329605 100644 --- a/backend/app/services/bambu_mqtt.py +++ b/backend/app/services/bambu_mqtt.py @@ -113,6 +113,7 @@ class PrinterState: timelapse: bool = False # Timelapse recording active ipcam: bool = False # Live view / camera streaming enabled wifi_signal: int | None = None # WiFi signal strength in dBm + wired_network: bool = False # Ethernet connection detected (home_flag bit 18) # Nozzle hardware info (for dual nozzle printers, index 0 = left, 1 = right) nozzles: list = field(default_factory=lambda: [NozzleInfo(), NozzleInfo()]) # AI detection and print options @@ -538,6 +539,16 @@ def _process_message(self, payload: dict): except ValueError: pass # Ignore unparseable wifi_signal strings; field is non-critical + # Parse developer LAN mode from top-level "fun" field + # Some firmware versions send "fun" at the top level, others inside "print" + if "fun" in payload and self.state.developer_mode is None: + try: + fun_val = payload["fun"] + fun_int = fun_val if isinstance(fun_val, int) else int(fun_val, 16) + self.state.developer_mode = (fun_int & 0x20000000) == 0 + except (ValueError, TypeError): + pass + if "print" in payload: print_data = payload["print"] @@ -1844,6 +1855,11 @@ def parse_fan_speed(value: str | int | None) -> int | None: severity = (attr >> 8) & 0xF # Module is in attr byte 3 (bits 24-31) module = (attr >> 24) & 0xFF + # Skip non-error status codes — all real HMS errors + # have code >= 0x4000. Lower values are status/phase + # indicators that some firmware sends during normal printing. + if code < 0x4000: + continue self.state.hms_errors.append( HMSError( code=f"0x{code:x}" if code else "0x0", @@ -1865,32 +1881,38 @@ def parse_fan_speed(value: str | int | None) -> int | None: module = (print_error >> 16) & 0xFFFF # High 16 bits (e.g., 0x0500) error = print_error & 0xFFFF # Low 16 bits (e.g., 0x8061) - # Store in a format that matches the community error database - # attr stores the full 32-bit value for reconstruction - # code stores the short format string for lookup - short_code = f"{module:04X}_{error:04X}" + # Values below 0x4000 are status/phase indicators, not real errors. + # All known HMS errors use 0x4xxx (fatal), 0x8xxx (warning), 0xCxxx (prompt). + # Some firmware sends low values like 0x0002 during normal printing. + if error < 0x4000: + pass # Skip — not a real error + else: + # Store in a format that matches the community error database + # attr stores the full 32-bit value for reconstruction + # code stores the short format string for lookup + short_code = f"{module:04X}_{error:04X}" - logger.debug( - f"[{self.serial_number}] print_error: {print_error} (0x{print_error:08x}) -> short_code={short_code}" - ) + logger.debug( + f"[{self.serial_number}] print_error: {print_error} (0x{print_error:08x}) -> short_code={short_code}" + ) + + # Only add if not already in HMS errors (avoid duplicates) + existing_short_codes = set() + for e in self.state.hms_errors: + # Extract short code from existing errors + e_module = (e.attr >> 16) & 0xFFFF + e_error = int(e.code.replace("0x", ""), 16) if e.code else 0 + existing_short_codes.add(f"{e_module:04X}_{e_error:04X}") - # Only add if not already in HMS errors (avoid duplicates) - existing_short_codes = set() - for e in self.state.hms_errors: - # Extract short code from existing errors - e_module = (e.attr >> 16) & 0xFFFF - e_error = int(e.code.replace("0x", ""), 16) if e.code else 0 - existing_short_codes.add(f"{e_module:04X}_{e_error:04X}") - - if short_code not in existing_short_codes: - self.state.hms_errors.append( - HMSError( - code=f"0x{error:x}", - attr=print_error, # Store full value for display - module=module >> 8, # High byte of module (e.g., 0x05) - severity=3, # Warning level for print_error + if short_code not in existing_short_codes: + self.state.hms_errors.append( + HMSError( + code=f"0x{error:x}", + attr=print_error, # Store full value for display + module=module >> 8, # High byte of module (e.g., 0x05) + severity=3, # Warning level for print_error + ) ) - ) # Parse SD card status if "sdcard" in data: @@ -1909,6 +1931,8 @@ def parse_fan_speed(value: str | int | None) -> int | None: f"[{self.serial_number}] store_to_sdcard changed: {self.state.store_to_sdcard} -> {store_to_sdcard}" ) self.state.store_to_sdcard = store_to_sdcard + # Bit 18 (0x00040000) indicates wired/ethernet connection + self.state.wired_network = bool((home_flag >> 18) & 1) # Parse timelapse status (recording active during print) if "timelapse" in data: diff --git a/backend/app/services/bug_report.py b/backend/app/services/bug_report.py new file mode 100644 index 000000000..e385cca07 --- /dev/null +++ b/backend/app/services/bug_report.py @@ -0,0 +1,142 @@ +"""Bug report service — posts to the bambuddy.cool relay which holds the GitHub PAT.""" + +import logging +import time + +import httpx + +from backend.app.core.config import BUG_REPORT_RELAY_URL +from backend.app.core.database import async_session +from backend.app.models.bug_report import BugReport + +logger = logging.getLogger(__name__) + +# Rate limiting: max 5 reports per hour +_rate_limit_window = 3600 +_rate_limit_max = 5 +_rate_limit_timestamps: list[float] = [] + + +def _check_rate_limit() -> bool: + """Check if rate limit allows a new report. Returns True if allowed.""" + now = time.time() + _rate_limit_timestamps[:] = [t for t in _rate_limit_timestamps if now - t < _rate_limit_window] + if len(_rate_limit_timestamps) >= _rate_limit_max: + return False + _rate_limit_timestamps.append(now) + return True + + +async def submit_report( + description: str, + reporter_email: str | None, + screenshot_base64: str | None, + support_info: dict | None, +) -> dict: + """Submit a bug report via the bambuddy.cool relay.""" + if not _check_rate_limit(): + return { + "success": False, + "message": "Rate limit exceeded. Please try again later.", + "issue_url": None, + "issue_number": None, + } + + if not BUG_REPORT_RELAY_URL: + return { + "success": False, + "message": "Bug reporting is not configured. BUG_REPORT_RELAY_URL is not set.", + "issue_url": None, + "issue_number": None, + } + + # Build relay payload — email is sent to relay for maintainer notification + issue body + payload: dict = {"description": description} + if reporter_email: + payload["reporter_email"] = reporter_email + if screenshot_base64: + payload["screenshot_base64"] = screenshot_base64 + if support_info: + payload["support_info"] = support_info + + try: + async with httpx.AsyncClient(timeout=60.0) as client: + resp = await client.post(BUG_REPORT_RELAY_URL, json=payload) + if resp.status_code != 200: + error_msg = f"Relay returned HTTP {resp.status_code}" + logger.error("%s at %s", error_msg, BUG_REPORT_RELAY_URL) + async with async_session() as db: + report = BugReport( + description=description, + reporter_email=reporter_email, + status="failed", + error_message=error_msg, + ) + db.add(report) + await db.commit() + return { + "success": False, + "message": "Bug report relay is not available. Please try again later.", + "issue_url": None, + "issue_number": None, + } + relay_data = resp.json() + except Exception: + logger.exception("Failed to reach bug report relay at %s", BUG_REPORT_RELAY_URL) + async with async_session() as db: + report = BugReport( + description=description, + reporter_email=reporter_email, + status="failed", + error_message="Failed to reach bug report relay", + ) + db.add(report) + await db.commit() + + return { + "success": False, + "message": "Failed to submit bug report. Please try again later.", + "issue_url": None, + "issue_number": None, + } + + if not relay_data.get("success"): + async with async_session() as db: + report = BugReport( + description=description, + reporter_email=reporter_email, + status="failed", + error_message=relay_data.get("message", "Relay returned failure"), + ) + db.add(report) + await db.commit() + + return { + "success": False, + "message": relay_data.get("message", "Failed to create bug report."), + "issue_url": None, + "issue_number": None, + } + + issue_number = relay_data["issue_number"] + issue_url = relay_data["issue_url"] + + # Save to DB + async with async_session() as db: + report = BugReport( + description=description, + reporter_email=reporter_email, + github_issue_number=issue_number, + github_issue_url=issue_url, + status="submitted", + email_sent=True, + ) + db.add(report) + await db.commit() + + return { + "success": True, + "message": "Bug report submitted successfully!", + "issue_url": issue_url, + "issue_number": issue_number, + } diff --git a/backend/app/services/failure_analysis.py b/backend/app/services/failure_analysis.py index 348a98464..6a78bcbaf 100644 --- a/backend/app/services/failure_analysis.py +++ b/backend/app/services/failure_analysis.py @@ -1,5 +1,5 @@ from collections import defaultdict -from datetime import datetime, timedelta, timezone +from datetime import date, datetime, time, timedelta, timezone from sqlalchemy import and_, func, select from sqlalchemy.ext.asyncio import AsyncSession @@ -16,28 +16,47 @@ def __init__(self, db: AsyncSession): async def analyze_failures( self, - days: int = 30, + days: int | None = None, + date_from: date | None = None, + date_to: date | None = None, printer_id: int | None = None, project_id: int | None = None, ) -> dict: """Analyze failure patterns across archives. Args: - days: Number of days to analyze + days: Number of days to analyze (fallback when no date range) + date_from: Start date filter (inclusive) + date_to: End date filter (inclusive) printer_id: Optional filter by printer project_id: Optional filter by project Returns: Dictionary with failure analysis results """ - cutoff_date = datetime.now(timezone.utc) - timedelta(days=days) - - # Build base query - base_filter = [PrintArchive.created_at >= cutoff_date] + # Build base query — separate date vs non-date filters for trend reuse + base_filter = [] + non_date_filter = [] + if date_from or date_to: + if date_from: + dt_from = datetime.combine(date_from, time.min, tzinfo=timezone.utc) + base_filter.append(PrintArchive.created_at >= dt_from) + if date_to: + dt_to = datetime.combine(date_to, time.max, tzinfo=timezone.utc) + base_filter.append(PrintArchive.created_at <= dt_to) + # Compute effective span for trend + range_start = dt_from if date_from else datetime.now(timezone.utc) - timedelta(days=365) + range_end = dt_to if date_to else datetime.now(timezone.utc) + effective_days = max((range_end - range_start).days, 1) + else: + effective_days = days if days is not None else 30 + cutoff_date = datetime.now(timezone.utc) - timedelta(days=effective_days) + base_filter.append(PrintArchive.created_at >= cutoff_date) if printer_id: - base_filter.append(PrintArchive.printer_id == printer_id) + non_date_filter.append(PrintArchive.printer_id == printer_id) if project_id: - base_filter.append(PrintArchive.project_id == project_id) + non_date_filter.append(PrintArchive.project_id == project_id) + base_filter.extend(non_date_filter) # Total counts total_result = await self.db.execute(select(func.count(PrintArchive.id)).where(and_(*base_filter))) @@ -141,15 +160,16 @@ async def analyze_failures( # Failure rate trend (by week) trend_data = [] - for i in range(min(days // 7, 12)): # Up to 12 weeks + num_weeks = max(effective_days // 7, 1) + for i in range(num_weeks): week_end = datetime.now(timezone.utc) - timedelta(weeks=i) week_start = week_end - timedelta(weeks=1) - week_filter = base_filter.copy() - week_filter[0] = and_( + week_filter = [ PrintArchive.created_at >= week_start, PrintArchive.created_at < week_end, - ) + *non_date_filter, + ] week_total = await self.db.execute(select(func.count(PrintArchive.id)).where(and_(*week_filter))) week_failed = await self.db.execute( @@ -174,7 +194,7 @@ async def analyze_failures( trend_data.reverse() # Oldest first return { - "period_days": days, + "period_days": effective_days, "total_prints": total_prints, "failed_prints": failed_prints, "failure_rate": round(failure_rate, 1), diff --git a/backend/app/services/firmware_check.py b/backend/app/services/firmware_check.py index 412c295d2..4be0b1b53 100644 --- a/backend/app/services/firmware_check.py +++ b/backend/app/services/firmware_check.py @@ -49,6 +49,22 @@ "H2D Pro": "h2d-pro", "H2D-Pro": "h2d-pro", "H2DPRO": "h2d-pro", + # SSDP model codes (DevModel header) — in case raw codes are stored + "O1D": "h2d", + "O1E": "h2d-pro", + "O2D": "h2d-pro", + "O1C": "h2c", + "O1C2": "h2c", + "O1S": "h2s", + "BL-P001": "x1", + "BL-P002": "x1", + "BL-P003": "x1e", + "C11": "p1", + "C12": "p1", + "C13": "p2s", + "N2S": "a1", + "N1": "a1-mini", + "N7": "p2s", } # Reverse mapping: API key to model codes diff --git a/backend/app/services/opentag3d.py b/backend/app/services/opentag3d.py new file mode 100644 index 000000000..a726b2442 --- /dev/null +++ b/backend/app/services/opentag3d.py @@ -0,0 +1,103 @@ +"""OpenTag3D NDEF encoder for NTAG tags. + +Encodes spool data as an OpenTag3D NDEF message ready to write to NTAG +starting at page 4 (after the manufacturer pages). + +NDEF structure: + [CC: E1 10 12 00] - Capability Container (4 bytes, page 4) + [TLV: 03 len] - NDEF Message TLV (2 bytes) + [NDEF record header] - D2 15 payload_len (3 bytes: MB|ME|SR, TNF=MIME, type_len=21) + [Type: "application/opentag3d"] - 21 bytes + [Payload: OpenTag3D fields] - 102 bytes + [Terminator: FE] - 1 byte +""" + +import struct + +from backend.app.models.spool import Spool + +OPENTAG3D_MIME_TYPE = b"application/opentag3d" +PAYLOAD_SIZE = 102 +TAG_VERSION = 1000 # v1.000 + + +def _build_payload(spool: Spool) -> bytes: + """Build 102-byte OpenTag3D core payload from spool fields.""" + buf = bytearray(PAYLOAD_SIZE) + + # 0x00: Tag Version (2 bytes, big-endian) + struct.pack_into(">H", buf, 0x00, TAG_VERSION) + + # 0x02: Base Material (5 bytes, UTF-8, space-padded) + material = (spool.material or "")[:5].ljust(5) + buf[0x02:0x07] = material.encode("utf-8")[:5] + + # 0x07: Material Modifiers (5 bytes, UTF-8, space-padded) + modifiers = (spool.subtype or "")[:5].ljust(5) + buf[0x07:0x0C] = modifiers.encode("utf-8")[:5] + + # 0x0C: Reserved (15 bytes, zero-fill) — already zero + + # 0x1B: Manufacturer (16 bytes, UTF-8, space-padded) + brand = (spool.brand or "")[:16].ljust(16) + buf[0x1B:0x2B] = brand.encode("utf-8")[:16] + + # 0x2B: Color Name (32 bytes, UTF-8, space-padded) + color_name = (spool.color_name or "")[:32].ljust(32) + buf[0x2B:0x4B] = color_name.encode("utf-8")[:32] + + # 0x4B: Color 1 RGBA (4 bytes) + rgba_hex = spool.rgba or "00000000" + try: + rgba_bytes = bytes.fromhex(rgba_hex[:8].ljust(8, "0")) + except ValueError: + rgba_bytes = b"\x00\x00\x00\x00" + buf[0x4B:0x4F] = rgba_bytes[:4] + + # 0x4F: Colors 2-4 (12 bytes, zero-fill) — already zero + + # 0x5C: Target Diameter (2 bytes, big-endian) — 1750 = 1.75mm + struct.pack_into(">H", buf, 0x5C, 1750) + + # 0x5E: Target Weight (2 bytes, big-endian) + struct.pack_into(">H", buf, 0x5E, spool.label_weight or 0) + + # 0x60: Print Temp (1 byte) — nozzle_temp_min / 5 + buf[0x60] = (spool.nozzle_temp_min or 0) // 5 + + # 0x61: Bed Temp (1 byte) — not tracked + # 0x62: Density (2 bytes) — not tracked + # 0x64: Transmission Distance (2 bytes) — not tracked + # All zero — already zero + + return bytes(buf) + + +def encode_opentag3d(spool: Spool) -> bytes: + """Encode spool data as OpenTag3D NDEF message (CC + TLV + record + terminator). + + Returns raw bytes ready to write to NTAG starting at page 4. + """ + payload = _build_payload(spool) + mime_type = OPENTAG3D_MIME_TYPE + + # NDEF record: MB|ME|SR (0xD0) | TNF=MIME (0x02) => 0xD2 + # Type length = 21 + # Payload length = 102 (fits in SR single byte) + record_header = bytes([0xD2, len(mime_type), len(payload)]) + ndef_record = record_header + mime_type + payload + + # TLV: type=0x03 (NDEF Message), length + ndef_len = len(ndef_record) + if ndef_len < 0xFF: + tlv = bytes([0x03, ndef_len]) + else: + tlv = bytes([0x03, 0xFF, (ndef_len >> 8) & 0xFF, ndef_len & 0xFF]) + + # Capability Container (page 4) + cc = bytes([0xE1, 0x10, 0x12, 0x00]) + + # Terminator TLV + terminator = bytes([0xFE]) + + return cc + tlv + ndef_record + terminator diff --git a/backend/app/services/orca_profiles.py b/backend/app/services/orca_profiles.py index 31fa56ea5..726c11fd1 100644 --- a/backend/app/services/orca_profiles.py +++ b/backend/app/services/orca_profiles.py @@ -230,10 +230,23 @@ def extract_core_fields(data: dict) -> dict: def _parse_material_from_name(name: str) -> str | None: - """Extract filament material type from preset name, e.g. 'Overture PLA Matte' -> 'PLA'.""" + """Extract filament material type from preset name, e.g. 'Overture PLA Matte' -> 'PLA'. + + Handles 'X Support for Y' patterns where the filament type is Y, not X. + e.g. 'PLA Support for PETG PETG Basic @Bambu Lab H2D' -> 'PETG'. + """ import re upper = name.upper() + + # Handle "X Support for Y" pattern: the filament type is Y, not X. + support_match = re.search(r"\bSUPPORT\s+FOR\s+", upper) + if support_match: + after_support = upper[support_match.end() :] + for mat in MATERIAL_TYPES: + if re.search(rf"\b{mat}\b", after_support): + return mat + for mat in MATERIAL_TYPES: if re.search(rf"\b{mat}\b", upper): return mat diff --git a/backend/app/services/printer_manager.py b/backend/app/services/printer_manager.py index 760c7145f..17b49070a 100644 --- a/backend/app/services/printer_manager.py +++ b/backend/app/services/printer_manager.py @@ -664,6 +664,7 @@ def printer_state_to_dict(state: PrinterState, printer_id: int | None = None, mo "ams_extruder_map": ams_extruder_map, # WiFi signal strength "wifi_signal": state.wifi_signal, + "wired_network": state.wired_network, # Calibration stage tracking "stg_cur": state.stg_cur, "stg_cur_name": get_derived_status_name(state, model), diff --git a/backend/app/services/spool_tag_matcher.py b/backend/app/services/spool_tag_matcher.py index 812f1a4d2..009acc4ff 100644 --- a/backend/app/services/spool_tag_matcher.py +++ b/backend/app/services/spool_tag_matcher.py @@ -145,6 +145,10 @@ async def create_spool_from_tray(db: AsyncSession, tray_data: dict) -> Spool: db.add(spool) await db.flush() + # Eagerly set k_profiles so callers (auto_assign_spool) don't trigger + # a lazy load in async context (greenlet_spawn error). + spool.k_profiles = [] + logger.info( "Auto-created spool %d from AMS tray data: %s %s %s (tag=%s uuid=%s)", spool.id, diff --git a/backend/app/services/virtual_printer/bind_server.py b/backend/app/services/virtual_printer/bind_server.py index 22a0bcab1..9f9faaa08 100644 --- a/backend/app/services/virtual_printer/bind_server.py +++ b/backend/app/services/virtual_printer/bind_server.py @@ -2,9 +2,12 @@ Bambu slicers (BambuStudio, OrcaSlicer) connect to a printer on port 3000 or 3002 to perform the "bind with access code" handshake before using -MQTT/FTP. The port varies by slicer version, so we listen on both. +MQTT/FTP. -Protocol: +Port 3000: plain TCP (legacy / some printer models). +Port 3002: TLS (newer firmware, e.g. A1 Mini 01.07.x). + +Protocol (same on both ports, only transport differs): - Framing: 0xA5A5 + uint16_le(total_msg_size) + JSON payload + 0xA7A7 - Slicer sends: {"login":{"command":"detect","sequence_id":"20000"}} - Printer replies: {"login":{"bind":"free","command":"detect","connect":"lan", @@ -16,11 +19,15 @@ import asyncio import json import logging +import ssl import struct +from pathlib import Path logger = logging.getLogger(__name__) -BIND_PORTS = [3000, 3002] +BIND_PORT_PLAIN = 3000 +BIND_PORT_TLS = 3002 +BIND_PORTS = [BIND_PORT_PLAIN, BIND_PORT_TLS] FRAME_HEADER = b"\xa5\xa5" FRAME_TRAILER = b"\xa7\xa7" HEADER_SIZE = 4 # 2 bytes magic + 2 bytes length @@ -33,8 +40,8 @@ class BindServer: In server mode, Bambuddy IS the printer — it responds with its own identity so the slicer can discover and bind to it. - Different BambuStudio versions connect on different ports (3000 or 3002), - so we listen on both to ensure compatibility. + Port 3000 is plain TCP, port 3002 is TLS. BambuStudio chooses which + port to use based on the printer model discovered via SSDP. """ def __init__( @@ -44,39 +51,66 @@ def __init__( name: str, version: str = "01.00.00.00", bind_address: str = "0.0.0.0", # nosec B104 + cert_path: Path | None = None, + key_path: Path | None = None, ): self.serial = serial self.model = model self.name = name self.version = version self.bind_address = bind_address + self.cert_path = cert_path + self.key_path = key_path self._servers: list[asyncio.Server] = [] self._running = False + def _create_tls_context(self) -> ssl.SSLContext | None: + """Create SSL context for the TLS bind port (3002).""" + if not self.cert_path or not self.key_path: + return None + ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) + ctx.load_cert_chain(str(self.cert_path), str(self.key_path)) + ctx.minimum_version = ssl.TLSVersion.TLSv1_2 + ctx.verify_mode = ssl.CERT_NONE + return ctx + async def start(self) -> None: - """Start the bind server on ports 3000 and 3002.""" + """Start the bind server on ports 3000 (plain) and 3002 (TLS).""" if self._running: return self._running = True + + tls_ctx = self._create_tls_context() + if not tls_ctx: + logger.warning("Bind server: no TLS cert provided, port %s will be plain TCP", BIND_PORT_TLS) + logger.info( - "Starting bind server on ports %s (serial=%s, model=%s)", + "Starting bind server on ports %s (serial=%s, model=%s, tls=%s)", BIND_PORTS, self.serial, self.model, + tls_ctx is not None, ) try: for port in BIND_PORTS: + use_tls = port == BIND_PORT_TLS and tls_ctx is not None try: server = await asyncio.start_server( self._handle_client, self.bind_address, port, + ssl=tls_ctx if use_tls else None, ) self._servers.append(server) - logger.info("Bind server listening on %s:%s", self.bind_address, port) + logger.info( + "Bind server listening on %s:%s (%s)", + self.bind_address, + port, + "TLS" if use_tls else "plain", + ) except OSError as e: if e.errno == 98: logger.warning("Bind server port %s already in use, skipping", port) diff --git a/backend/app/services/virtual_printer/manager.py b/backend/app/services/virtual_printer/manager.py index a8920f65b..ac8e90701 100644 --- a/backend/app/services/virtual_printer/manager.py +++ b/backend/app/services/virtual_printer/manager.py @@ -410,6 +410,8 @@ async def run_with_logging(coro, svc_name): model=self.model or DEFAULT_VIRTUAL_PRINTER_MODEL, name=self.name, bind_address=bind_addr, + cert_path=cert_path, + key_path=key_path, ) self._tasks.append( asyncio.create_task( @@ -626,6 +628,30 @@ async def sync_from_db(self) -> None: if printer: proxy_ips[pvp.id] = (printer.ip_address, printer.serial_number) + # Detect config changes on running instances and restart if needed + for vp in enabled_vps: + instance = self._instances.get(vp.id) + if not instance: + continue + + changed = ( + instance.mode != vp.mode + or instance.model != (vp.model or DEFAULT_VIRTUAL_PRINTER_MODEL) + or instance.access_code != (vp.access_code or "") + or instance.bind_ip != (vp.bind_ip or "") + or instance.remote_interface_ip != (vp.remote_interface_ip or "") + or instance.target_printer_id != vp.target_printer_id + ) + + if changed: + logger.info( + "VP %s config changed (mode: %s→%s), restarting", + instance.name, + instance.mode, + vp.mode, + ) + await self.remove_instance(vp.id) + # Start instances for all enabled VPs (skip already running) for vp in enabled_vps: if vp.id in self._instances: diff --git a/backend/app/services/virtual_printer/tcp_proxy.py b/backend/app/services/virtual_printer/tcp_proxy.py index 3ecd653bc..b640a4c76 100644 --- a/backend/app/services/virtual_printer/tcp_proxy.py +++ b/backend/app/services/virtual_printer/tcp_proxy.py @@ -346,7 +346,7 @@ async def _forward( class TCPProxy: """Raw TCP proxy that forwards data without TLS termination. - Used for protocols where the printer doesn't use TLS (e.g., port 3002 + Used for protocols where the printer doesn't use TLS (e.g., port 3000 binding/authentication protocol). """ @@ -1123,18 +1123,30 @@ async def start(self) -> None: bind_address=self.bind_address, ) - # Bind/auth proxy (ports 3000 + 3002) - raw TCP, no TLS - # Different BambuStudio versions use different ports + # Bind/auth proxy — port 3000 plain TCP, port 3002 TLS for bind_port in self.PRINTER_BIND_PORTS: - proxy = TCPProxy( - name="Bind", - listen_port=bind_port, - target_host=self.target_host, - target_port=bind_port, - on_connect=lambda cid: self._log_activity("Bind", f"connected: {cid}"), - on_disconnect=lambda cid: self._log_activity("Bind", f"disconnected: {cid}"), - bind_address=self.bind_address, - ) + if bind_port == 3002: + proxy = TLSProxy( + name="Bind-TLS", + listen_port=bind_port, + target_host=self.target_host, + target_port=bind_port, + server_cert_path=self.cert_path, + server_key_path=self.key_path, + on_connect=lambda cid: self._log_activity("Bind", f"connected: {cid}"), + on_disconnect=lambda cid: self._log_activity("Bind", f"disconnected: {cid}"), + bind_address=self.bind_address, + ) + else: + proxy = TCPProxy( + name="Bind", + listen_port=bind_port, + target_host=self.target_host, + target_port=bind_port, + on_connect=lambda cid: self._log_activity("Bind", f"connected: {cid}"), + on_disconnect=lambda cid: self._log_activity("Bind", f"disconnected: {cid}"), + bind_address=self.bind_address, + ) self._bind_proxies.append(proxy) # Start as background tasks diff --git a/backend/app/utils/filament_ids.py b/backend/app/utils/filament_ids.py new file mode 100644 index 000000000..d221eba59 --- /dev/null +++ b/backend/app/utils/filament_ids.py @@ -0,0 +1,78 @@ +"""Utility functions for converting between filament_id and setting_id formats. + +Bambu printers use two ID formats for filament presets: + - **filament_id** (aka tray_info_idx): e.g. "GFL05", "GFG02", "GFA00" + Reported by printer firmware (RFID tags, AMS status). + - **setting_id**: e.g. "GFSL05", "GFSG02", "GFSA00" + Used by BambuStudio / Bambu Cloud API to resolve presets. + +The only difference for official Bambu filaments is an "S" inserted after "GF". +User presets (starting with "P") use the same ID in both contexts. +""" + + +def filament_id_to_setting_id(filament_id: str) -> str: + """Convert filament_id → setting_id (e.g. "GFL05" → "GFSL05"). + + - Already a setting_id ("GFS…") → returned unchanged. + - User presets ("P…") → returned unchanged. + - Empty / unknown → returned unchanged. + """ + if not filament_id: + return filament_id + + # User presets start with "P" - leave unchanged + if filament_id.startswith("P"): + return filament_id + + # Official Bambu presets: GFx## -> GFSx## + if filament_id.startswith("GF") and len(filament_id) >= 4: + # Already a setting_id (has S after GF) + if filament_id[2] == "S": + return filament_id + return f"GFS{filament_id[2:]}" + + return filament_id + + +def setting_id_to_filament_id(setting_id: str) -> str: + """Convert setting_id → filament_id (e.g. "GFSL05" → "GFL05"). + + - Already a filament_id ("GF" without "S") → returned unchanged. + - User presets ("P…") → returned unchanged. + - Empty / unknown → returned unchanged. + """ + if not setting_id: + return setting_id + + # User presets start with "P" - leave unchanged + if setting_id.startswith("P"): + return setting_id + + # Setting_id format: GFSx## -> GFx## (remove the "S") + if setting_id.startswith("GFS") and len(setting_id) >= 5: + return f"GF{setting_id[3:]}" + + return setting_id + + +def normalize_slicer_filament(slicer_filament: str | None) -> tuple[str, str]: + """Normalize a slicer_filament value into (tray_info_idx, setting_id). + + The slicer_filament field on a spool can be stored in either format: + - filament_id: "GFL05" (from RFID tag scan) + - setting_id: "GFSL05" or "GFSL05_07" (from cloud preset picker) + + Returns (tray_info_idx, setting_id) with version suffixes stripped. + """ + raw = slicer_filament or "" + if not raw: + return ("", "") + + # Strip version suffix (e.g. "GFSL05_07" → "GFSL05") + base = raw.split("_")[0] if "_" in raw else raw + + tray_info_idx = setting_id_to_filament_id(base) + sid = filament_id_to_setting_id(base) + + return (tray_info_idx, sid) diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py index fd31c9ba7..249ed1135 100644 --- a/backend/tests/conftest.py +++ b/backend/tests/conftest.py @@ -50,8 +50,12 @@ def event_loop(): """Create an instance of the default event loop for each test session.""" loop = asyncio.get_event_loop_policy().new_event_loop() yield loop - # Drain pending callbacks so aiosqlite threads can finish before loop closes - loop.run_until_complete(asyncio.sleep(0.1)) + # Dispose the module-level engine so aiosqlite worker threads finish + # before the event loop closes, preventing "Event loop is closed" errors. + from backend.app.core.database import engine + + loop.run_until_complete(engine.dispose()) + loop.run_until_complete(asyncio.sleep(0.05)) loop.close() diff --git a/backend/tests/integration/test_archives_api.py b/backend/tests/integration/test_archives_api.py index 59c8a30d4..f3afa1ae6 100644 --- a/backend/tests/integration/test_archives_api.py +++ b/backend/tests/integration/test_archives_api.py @@ -227,6 +227,156 @@ async def test_get_archive_stats(self, async_client: AsyncClient, archive_factor assert "successful_prints" in result +class TestArchivesSlimAPI: + """Integration tests for /api/v1/archives/slim endpoint.""" + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_slim_empty(self, async_client: AsyncClient): + """Verify empty list when no archives exist.""" + response = await async_client.get("/api/v1/archives/slim") + + assert response.status_code == 200 + assert response.json() == [] + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_slim_returns_only_expected_fields( + self, async_client: AsyncClient, archive_factory, printer_factory, db_session + ): + """Verify response contains only slim fields, not full archive data.""" + printer = await printer_factory() + await archive_factory( + printer.id, + print_name="Slim Test", + status="completed", + filament_type="PLA", + filament_color="#FF0000", + filament_used_grams=50.0, + print_time_seconds=3600, + cost=1.50, + quantity=2, + ) + + response = await async_client.get("/api/v1/archives/slim") + + assert response.status_code == 200 + data = response.json() + assert len(data) == 1 + item = data[0] + + # Expected fields present + assert item["printer_id"] == printer.id + assert item["print_name"] == "Slim Test" + assert item["status"] == "completed" + assert item["filament_type"] == "PLA" + assert item["filament_color"] == "#FF0000" + assert item["filament_used_grams"] == 50.0 + assert item["print_time_seconds"] == 3600 + assert item["cost"] == 1.50 + assert item["quantity"] == 2 + assert "created_at" in item + + # Full archive fields must NOT be present + assert "id" not in item + assert "filename" not in item + assert "file_path" not in item + assert "file_size" not in item + assert "extra_data" not in item + assert "notes" not in item + assert "tags" not in item + assert "photos" not in item + assert "thumbnail_path" not in item + assert "content_hash" not in item + assert "duplicates" not in item + assert "duplicate_count" not in item + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_slim_computes_actual_time( + self, async_client: AsyncClient, archive_factory, printer_factory, db_session + ): + """Verify actual_time_seconds is computed from started_at/completed_at.""" + from datetime import datetime, timezone + + printer = await printer_factory() + started = datetime(2024, 1, 1, 10, 0, 0, tzinfo=timezone.utc) + completed = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) # 2 hours = 7200s + await archive_factory( + printer.id, + status="completed", + started_at=started, + completed_at=completed, + ) + + response = await async_client.get("/api/v1/archives/slim") + + assert response.status_code == 200 + item = response.json()[0] + assert item["actual_time_seconds"] == 7200 + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_slim_actual_time_null_for_failed( + self, async_client: AsyncClient, archive_factory, printer_factory, db_session + ): + """Verify actual_time_seconds is null for non-completed prints.""" + from datetime import datetime, timezone + + printer = await printer_factory() + await archive_factory( + printer.id, + status="failed", + started_at=datetime(2024, 1, 1, 10, 0, 0, tzinfo=timezone.utc), + completed_at=datetime(2024, 1, 1, 11, 0, 0, tzinfo=timezone.utc), + ) + + response = await async_client.get("/api/v1/archives/slim") + + assert response.status_code == 200 + item = response.json()[0] + assert item["actual_time_seconds"] is None + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_slim_date_filtering(self, async_client: AsyncClient, archive_factory, printer_factory, db_session): + """Verify date_from and date_to filters work.""" + from datetime import datetime, timezone + + printer = await printer_factory() + await archive_factory( + printer.id, + print_name="Old Print", + created_at=datetime(2024, 1, 1, tzinfo=timezone.utc), + ) + await archive_factory( + printer.id, + print_name="New Print", + created_at=datetime(2024, 6, 15, tzinfo=timezone.utc), + ) + + # Filter to only June 2024 + response = await async_client.get("/api/v1/archives/slim?date_from=2024-06-01&date_to=2024-06-30") + + assert response.status_code == 200 + data = response.json() + assert len(data) == 1 + assert data[0]["print_name"] == "New Print" + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_slim_pagination(self, async_client: AsyncClient, archive_factory, printer_factory, db_session): + """Verify limit and offset work.""" + printer = await printer_factory() + for i in range(5): + await archive_factory(printer.id, print_name=f"Print {i}") + + response = await async_client.get("/api/v1/archives/slim?limit=2&offset=0") + + assert response.status_code == 200 + assert len(response.json()) == 2 + + class TestArchiveDataIntegrity: """Tests for archive data integrity.""" diff --git a/backend/tests/integration/test_auth_api.py b/backend/tests/integration/test_auth_api.py index 97d717484..db47f2956 100644 --- a/backend/tests/integration/test_auth_api.py +++ b/backend/tests/integration/test_auth_api.py @@ -180,6 +180,68 @@ async def test_me_with_valid_token(self, async_client: AsyncClient): assert result["role"] == "admin" assert result["is_active"] is True + @pytest.mark.asyncio + @pytest.mark.integration + async def test_me_with_api_key_bearer(self, async_client: AsyncClient, db_session): + """Verify /me returns synthetic admin user when using API key via Bearer token.""" + from backend.app.core.auth import generate_api_key + from backend.app.models.api_key import APIKey + + # Create an API key directly in the database + full_key, key_hash, key_prefix = generate_api_key() + api_key = APIKey(name="test-kiosk", key_hash=key_hash, key_prefix=key_prefix, enabled=True) + db_session.add(api_key) + await db_session.commit() + + # Call /me with the API key as Bearer token + response = await async_client.get( + "/api/v1/auth/me", + headers={"Authorization": f"Bearer {full_key}"}, + ) + + assert response.status_code == 200 + result = response.json() + assert result["id"] == 0 + assert result["username"].startswith("api-key:") + assert result["role"] == "admin" + assert result["is_admin"] is True + assert result["is_active"] is True + assert len(result["permissions"]) > 0 + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_me_with_api_key_header(self, async_client: AsyncClient, db_session): + """Verify /me returns synthetic admin user when using X-API-Key header.""" + from backend.app.core.auth import generate_api_key + from backend.app.models.api_key import APIKey + + full_key, key_hash, key_prefix = generate_api_key() + api_key = APIKey(name="test-kiosk-header", key_hash=key_hash, key_prefix=key_prefix, enabled=True) + db_session.add(api_key) + await db_session.commit() + + response = await async_client.get( + "/api/v1/auth/me", + headers={"X-API-Key": full_key}, + ) + + assert response.status_code == 200 + result = response.json() + assert result["id"] == 0 + assert result["username"].startswith("api-key:") + assert result["is_admin"] is True + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_me_with_invalid_api_key(self, async_client: AsyncClient): + """Verify /me rejects invalid API key.""" + response = await async_client.get( + "/api/v1/auth/me", + headers={"Authorization": "Bearer bb_invalid_key_value"}, + ) + + assert response.status_code == 401 + class TestUsersAPI: """Integration tests for /api/v1/users/ endpoints.""" diff --git a/backend/tests/integration/test_print_lifecycle.py b/backend/tests/integration/test_print_lifecycle.py index bc166aa32..e0ce19ee1 100644 --- a/backend/tests/integration/test_print_lifecycle.py +++ b/backend/tests/integration/test_print_lifecycle.py @@ -65,15 +65,26 @@ class TestPrintCompleteLogic: @pytest.mark.asyncio async def test_print_complete_no_import_errors(self, capture_logs): """Verify on_print_complete doesn't have import shadowing issues.""" + # Snapshot tasks before the call so we can cancel orphans afterwards. + # on_print_complete fires background tasks (maintenance check, notifications, + # smart-plug) via asyncio.create_task. If those tasks outlive the mock + # context they use the *real* async_session and can send real notifications. + tasks_before = set(asyncio.all_tasks()) + with ( patch("backend.app.main.async_session") as mock_session_maker, patch("backend.app.main.notification_service") as mock_notif, patch("backend.app.main.smart_plug_manager") as mock_plug, patch("backend.app.main.ws_manager") as mock_ws, + patch("backend.app.main.mqtt_relay") as mock_relay, + patch("backend.app.main.printer_manager") as mock_pm, ): mock_notif.on_print_complete = AsyncMock() mock_plug.on_print_complete = AsyncMock() mock_ws.send_print_complete = AsyncMock() + mock_ws.broadcast = AsyncMock() + mock_relay.on_print_complete = AsyncMock() + mock_pm.get_printer.return_value = None # Mock the database session mock_session = AsyncMock() @@ -94,6 +105,16 @@ async def test_print_complete_no_import_errors(self, capture_logs): }, ) + # Cancel background tasks spawned by on_print_complete before + # leaving the mock context — prevents them from running with + # the real async_session and sending real notifications. + for task in asyncio.all_tasks() - tasks_before: + task.cancel() + try: + await task + except (asyncio.CancelledError, Exception): + pass + # Verify no import shadowing errors - this would have caught the ArchiveService bug errors = [r for r in capture_logs.get_errors() if "cannot access local variable" in str(r.message)] assert not errors, f"Import shadowing error: {capture_logs.format_errors()}" @@ -215,7 +236,7 @@ def on_complete(data): { "print": { "gcode_state": "RUNNING", - "hms": [{"attr": 0x07000002, "code": 0x1234}], # Filament module error + "hms": [{"attr": 0x07000002, "code": 0x8001}], # Filament module error (code must be >= 0x4000) } } ) diff --git a/backend/tests/integration/test_print_queue_api.py b/backend/tests/integration/test_print_queue_api.py index a8737a0cc..42d500999 100644 --- a/backend/tests/integration/test_print_queue_api.py +++ b/backend/tests/integration/test_print_queue_api.py @@ -1176,3 +1176,250 @@ async def test_clear_target_location(self, async_client: AsyncClient, queue_item assert response.status_code == 200 result = response.json() assert result["target_location"] is None + + +class TestAbortedStatusNormalisation: + """Tests for issue #558: 'aborted' queue status causes 500 error.""" + + @pytest.fixture + async def printer_factory(self, db_session): + """Factory to create test printers.""" + _counter = [0] + + async def _create_printer(**kwargs): + from backend.app.models.printer import Printer + + _counter[0] += 1 + counter = _counter[0] + + defaults = { + "name": f"Abort Test Printer {counter}", + "ip_address": f"192.168.1.{60 + counter}", + "serial_number": f"TESTABORT{counter:04d}", + "access_code": "12345678", + "model": "P1S", + } + defaults.update(kwargs) + + printer = Printer(**defaults) + db_session.add(printer) + await db_session.commit() + await db_session.refresh(printer) + return printer + + return _create_printer + + @pytest.fixture + async def archive_factory(self, db_session): + """Factory to create test archives.""" + _counter = [0] + + async def _create_archive(**kwargs): + from backend.app.models.archive import PrintArchive + + _counter[0] += 1 + counter = _counter[0] + + defaults = { + "filename": f"abort_test_{counter}.3mf", + "print_name": f"Abort Test Print {counter}", + "file_path": f"/tmp/abort_test_{counter}.3mf", + "file_size": 1024, + "content_hash": f"aborthash{counter:06d}", + "status": "completed", + } + defaults.update(kwargs) + + archive = PrintArchive(**defaults) + db_session.add(archive) + await db_session.commit() + await db_session.refresh(archive) + return archive + + return _create_archive + + @pytest.fixture + async def queue_item_factory(self, db_session, printer_factory, archive_factory): + """Factory to create test queue items.""" + _counter = [0] + + async def _create_queue_item(**kwargs): + from backend.app.models.print_queue import PrintQueueItem + + _counter[0] += 1 + counter = _counter[0] + + if "printer_id" not in kwargs: + printer = await printer_factory() + kwargs["printer_id"] = printer.id + if "archive_id" not in kwargs: + archive = await archive_factory() + kwargs["archive_id"] = archive.id + + defaults = { + "status": "pending", + "position": counter, + } + defaults.update(kwargs) + + item = PrintQueueItem(**defaults) + db_session.add(item) + await db_session.commit() + await db_session.refresh(item) + return item + + return _create_queue_item + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_on_print_complete_normalises_aborted_to_cancelled(self, queue_item_factory, db_session): + """Verify the completion handler maps 'aborted' → 'cancelled' for queue items.""" + import asyncio + from unittest.mock import AsyncMock, MagicMock, patch + + item = await queue_item_factory(status="printing") + + # Build a mock session whose execute returns our item + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [item] + + mock_session = AsyncMock() + mock_session.__aenter__ = AsyncMock(return_value=mock_session) + mock_session.__aexit__ = AsyncMock(return_value=False) + mock_session.execute = AsyncMock(return_value=mock_result) + mock_session.commit = AsyncMock() + + tasks_before = set(asyncio.all_tasks()) + + with ( + patch("backend.app.main.async_session", return_value=mock_session), + patch("backend.app.main.ws_manager") as mock_ws, + patch("backend.app.main.mqtt_relay") as mock_relay, + patch("backend.app.main.notification_service") as mock_notif, + patch("backend.app.main.smart_plug_manager") as mock_plug, + patch("backend.app.main.printer_manager") as mock_pm, + ): + mock_ws.send_print_complete = AsyncMock() + mock_ws.broadcast = AsyncMock() + mock_relay.on_print_complete = AsyncMock() + mock_relay.on_queue_job_completed = AsyncMock() + mock_notif.on_print_complete = AsyncMock() + mock_plug.on_print_complete = AsyncMock() + mock_pm.get_printer.return_value = None + + from backend.app.main import on_print_complete + + await on_print_complete( + item.printer_id, + { + "status": "aborted", + "filename": "test.gcode", + "subtask_name": "Test", + "timelapse_was_active": False, + }, + ) + + # Cancel background tasks before leaving mock context + for task in asyncio.all_tasks() - tasks_before: + task.cancel() + try: + await task + except (asyncio.CancelledError, Exception): + pass + + # The item status should be normalised to 'cancelled', not 'aborted' + assert item.status == "cancelled" + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_startup_fixup_converts_aborted_to_cancelled(self, queue_item_factory, db_session): + """Verify the startup fixup converts existing 'aborted' rows to 'cancelled'.""" + from sqlalchemy import select + + from backend.app.models.print_queue import PrintQueueItem + + # Create items with various statuses including 'aborted' + item_aborted = await queue_item_factory(status="pending") + item_pending = await queue_item_factory(status="pending") + + # Manually set the invalid status + item_aborted.status = "aborted" + db_session.add(item_aborted) + await db_session.commit() + + # Run the fixup query (same logic as lifespan) + result = await db_session.execute(select(PrintQueueItem).where(PrintQueueItem.status == "aborted")) + aborted_items = result.scalars().all() + for i in aborted_items: + i.status = "cancelled" + await db_session.commit() + + # Verify: no more 'aborted' items + result = await db_session.execute(select(PrintQueueItem).where(PrintQueueItem.status == "aborted")) + assert len(result.scalars().all()) == 0 + + # The previously aborted item should now be 'cancelled' + await db_session.refresh(item_aborted) + assert item_aborted.status == "cancelled" + + # The pending item should be unchanged + await db_session.refresh(item_pending) + assert item_pending.status == "pending" + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_completed_status_passes_through_unchanged(self, queue_item_factory, db_session): + """Verify normal statuses like 'completed' are not affected by normalisation.""" + import asyncio + from unittest.mock import AsyncMock, MagicMock, patch + + item = await queue_item_factory(status="printing") + + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [item] + + mock_session = AsyncMock() + mock_session.__aenter__ = AsyncMock(return_value=mock_session) + mock_session.__aexit__ = AsyncMock(return_value=False) + mock_session.execute = AsyncMock(return_value=mock_result) + mock_session.commit = AsyncMock() + + tasks_before = set(asyncio.all_tasks()) + + with ( + patch("backend.app.main.async_session", return_value=mock_session), + patch("backend.app.main.ws_manager") as mock_ws, + patch("backend.app.main.mqtt_relay") as mock_relay, + patch("backend.app.main.notification_service") as mock_notif, + patch("backend.app.main.smart_plug_manager") as mock_plug, + patch("backend.app.main.printer_manager") as mock_pm, + ): + mock_ws.send_print_complete = AsyncMock() + mock_ws.broadcast = AsyncMock() + mock_relay.on_print_complete = AsyncMock() + mock_relay.on_queue_job_completed = AsyncMock() + mock_notif.on_print_complete = AsyncMock() + mock_plug.on_print_complete = AsyncMock() + mock_pm.get_printer.return_value = None + + from backend.app.main import on_print_complete + + await on_print_complete( + item.printer_id, + { + "status": "completed", + "filename": "test.gcode", + "subtask_name": "Test", + "timelapse_was_active": False, + }, + ) + + # Cancel background tasks before leaving mock context + for task in asyncio.all_tasks() - tasks_before: + task.cancel() + try: + await task + except (asyncio.CancelledError, Exception): + pass + + assert item.status == "completed" diff --git a/backend/tests/integration/test_settings_api.py b/backend/tests/integration/test_settings_api.py index c2fec9fcb..598d98081 100644 --- a/backend/tests/integration/test_settings_api.py +++ b/backend/tests/integration/test_settings_api.py @@ -166,6 +166,27 @@ async def test_update_ams_thresholds(self, async_client: AsyncClient): assert result["ams_temp_good"] == 25.0 assert result["ams_temp_fair"] == 32.0 + @pytest.mark.asyncio + @pytest.mark.integration + async def test_update_low_stock_threshold(self, async_client: AsyncClient): + """Verify low stock threshold setting can be updated.""" + # Get default value + response = await async_client.get("/api/v1/settings/") + assert response.status_code == 200 + assert response.json()["low_stock_threshold"] == 20.0 + + # Update to custom value + response = await async_client.put("/api/v1/settings/", json={"low_stock_threshold": 15.5}) + + assert response.status_code == 200 + result = response.json() + assert result["low_stock_threshold"] == 15.5 + + # Verify persistence + response = await async_client.get("/api/v1/settings/") + assert response.status_code == 200 + assert response.json()["low_stock_threshold"] == 15.5 + @pytest.mark.asyncio @pytest.mark.integration async def test_update_notification_language(self, async_client: AsyncClient): diff --git a/backend/tests/integration/test_spoolbuddy.py b/backend/tests/integration/test_spoolbuddy.py new file mode 100644 index 000000000..768f91fd0 --- /dev/null +++ b/backend/tests/integration/test_spoolbuddy.py @@ -0,0 +1,743 @@ +"""Integration tests for SpoolBuddy API endpoints.""" + +from datetime import datetime, timedelta, timezone +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from httpx import AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession + +from backend.app.models.spool import Spool +from backend.app.models.spoolbuddy_device import SpoolBuddyDevice + +API = "/api/v1/spoolbuddy" + + +@pytest.fixture +def device_factory(db_session: AsyncSession): + """Factory to create SpoolBuddyDevice records.""" + _counter = [0] + + async def _create(**kwargs): + _counter[0] += 1 + n = _counter[0] + defaults = { + "device_id": f"sb-{n:04d}", + "hostname": f"spoolbuddy-{n}", + "ip_address": f"10.0.0.{n}", + "firmware_version": "1.0.0", + "has_nfc": True, + "has_scale": True, + "tare_offset": 0, + "calibration_factor": 1.0, + "last_seen": datetime.now(timezone.utc), + } + defaults.update(kwargs) + device = SpoolBuddyDevice(**defaults) + db_session.add(device) + await db_session.commit() + await db_session.refresh(device) + return device + + return _create + + +@pytest.fixture +def spool_factory(db_session: AsyncSession): + """Factory to create Spool records.""" + _counter = [0] + + async def _create(**kwargs): + _counter[0] += 1 + defaults = { + "material": "PLA", + "subtype": "Basic", + "brand": "Polymaker", + "color_name": "Red", + "rgba": "FF0000FF", + "label_weight": 1000, + "core_weight": 250, + "weight_used": 0, + } + defaults.update(kwargs) + spool = Spool(**defaults) + db_session.add(spool) + await db_session.commit() + await db_session.refresh(spool) + return spool + + return _create + + +# ============================================================================ +# Device endpoints +# ============================================================================ + + +class TestDeviceEndpoints: + @pytest.mark.asyncio + @pytest.mark.integration + async def test_register_new_device(self, async_client: AsyncClient): + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + resp = await async_client.post( + f"{API}/devices/register", + json={ + "device_id": "sb-new", + "hostname": "spoolbuddy-new", + "ip_address": "10.0.0.99", + "firmware_version": "1.2.0", + }, + ) + + assert resp.status_code == 200 + data = resp.json() + assert data["device_id"] == "sb-new" + assert data["hostname"] == "spoolbuddy-new" + assert data["online"] is True + mock_ws.broadcast.assert_called_once() + msg = mock_ws.broadcast.call_args[0][0] + assert msg["type"] == "spoolbuddy_online" + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_re_register_existing_device(self, async_client: AsyncClient, device_factory): + device = await device_factory( + device_id="sb-exist", + tare_offset=12345, + calibration_factor=0.0042, + ) + + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + resp = await async_client.post( + f"{API}/devices/register", + json={ + "device_id": "sb-exist", + "hostname": "updated-host", + "ip_address": "10.0.0.200", + "firmware_version": "2.0.0", + }, + ) + + assert resp.status_code == 200 + data = resp.json() + assert data["id"] == device.id + assert data["hostname"] == "updated-host" + assert data["ip_address"] == "10.0.0.200" + assert data["firmware_version"] == "2.0.0" + # Calibration preserved on re-register + assert data["tare_offset"] == 12345 + assert data["calibration_factor"] == pytest.approx(0.0042) + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_list_devices_empty(self, async_client: AsyncClient): + resp = await async_client.get(f"{API}/devices") + assert resp.status_code == 200 + assert resp.json() == [] + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_list_devices(self, async_client: AsyncClient, device_factory): + await device_factory(device_id="sb-a", hostname="alpha") + await device_factory(device_id="sb-b", hostname="beta") + + resp = await async_client.get(f"{API}/devices") + assert resp.status_code == 200 + devices = resp.json() + assert len(devices) == 2 + hostnames = {d["hostname"] for d in devices} + assert hostnames == {"alpha", "beta"} + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_heartbeat_updates_status(self, async_client: AsyncClient, device_factory): + device = await device_factory(device_id="sb-hb") + + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + resp = await async_client.post( + f"{API}/devices/sb-hb/heartbeat", + json={"nfc_ok": True, "scale_ok": True, "uptime_s": 600}, + ) + + assert resp.status_code == 200 + data = resp.json() + assert data["tare_offset"] == device.tare_offset + assert data["calibration_factor"] == pytest.approx(device.calibration_factor) + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_heartbeat_returns_pending_command(self, async_client: AsyncClient, device_factory): + await device_factory(device_id="sb-cmd", pending_command="tare") + + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + resp = await async_client.post( + f"{API}/devices/sb-cmd/heartbeat", + json={"nfc_ok": True, "scale_ok": True, "uptime_s": 10}, + ) + + assert resp.status_code == 200 + assert resp.json()["pending_command"] == "tare" + + # Second heartbeat should have no pending command (cleared) + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + resp2 = await async_client.post( + f"{API}/devices/sb-cmd/heartbeat", + json={"nfc_ok": True, "scale_ok": True, "uptime_s": 20}, + ) + + assert resp2.json()["pending_command"] is None + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_heartbeat_unknown_device_404(self, async_client: AsyncClient): + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + resp = await async_client.post( + f"{API}/devices/nonexistent/heartbeat", + json={"nfc_ok": False, "scale_ok": False, "uptime_s": 0}, + ) + + assert resp.status_code == 404 + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_heartbeat_broadcasts_online_when_was_offline(self, async_client: AsyncClient, device_factory): + # Create device with last_seen far in the past (offline) + await device_factory( + device_id="sb-offline", + last_seen=datetime.now(timezone.utc) - timedelta(seconds=120), + ) + + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + resp = await async_client.post( + f"{API}/devices/sb-offline/heartbeat", + json={"nfc_ok": True, "scale_ok": True, "uptime_s": 5}, + ) + + assert resp.status_code == 200 + # Should broadcast online since device was offline + mock_ws.broadcast.assert_called_once() + msg = mock_ws.broadcast.call_args[0][0] + assert msg["type"] == "spoolbuddy_online" + assert msg["device_id"] == "sb-offline" + + +# ============================================================================ +# NFC endpoints +# ============================================================================ + + +class TestNfcEndpoints: + @pytest.mark.asyncio + @pytest.mark.integration + async def test_tag_scanned_matched(self, async_client: AsyncClient, spool_factory): + spool = await spool_factory(tag_uid="AABB1122", material="PLA") + mock_spool = MagicMock() + mock_spool.id = spool.id + mock_spool.material = spool.material + mock_spool.subtype = spool.subtype + mock_spool.color_name = spool.color_name + mock_spool.rgba = spool.rgba + mock_spool.brand = spool.brand + mock_spool.label_weight = spool.label_weight + mock_spool.core_weight = spool.core_weight + mock_spool.weight_used = spool.weight_used + + with ( + patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws, + patch("backend.app.api.routes.spoolbuddy.get_spool_by_tag", new_callable=AsyncMock) as mock_lookup, + ): + mock_ws.broadcast = AsyncMock() + mock_lookup.return_value = mock_spool + + resp = await async_client.post( + f"{API}/nfc/tag-scanned", + json={"device_id": "sb-1", "tag_uid": "AABB1122"}, + ) + + assert resp.status_code == 200 + data = resp.json() + assert data["matched"] is True + assert data["spool_id"] == spool.id + msg = mock_ws.broadcast.call_args[0][0] + assert msg["type"] == "spoolbuddy_tag_matched" + assert msg["spool"]["id"] == spool.id + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_tag_scanned_unmatched(self, async_client: AsyncClient): + with ( + patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws, + patch("backend.app.api.routes.spoolbuddy.get_spool_by_tag", new_callable=AsyncMock) as mock_lookup, + ): + mock_ws.broadcast = AsyncMock() + mock_lookup.return_value = None + + resp = await async_client.post( + f"{API}/nfc/tag-scanned", + json={"device_id": "sb-1", "tag_uid": "DEADBEEF"}, + ) + + assert resp.status_code == 200 + data = resp.json() + assert data["matched"] is False + assert data["spool_id"] is None + msg = mock_ws.broadcast.call_args[0][0] + assert msg["type"] == "spoolbuddy_unknown_tag" + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_tag_removed(self, async_client: AsyncClient): + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + resp = await async_client.post( + f"{API}/nfc/tag-removed", + json={"device_id": "sb-1", "tag_uid": "AABB1122"}, + ) + + assert resp.status_code == 200 + msg = mock_ws.broadcast.call_args[0][0] + assert msg["type"] == "spoolbuddy_tag_removed" + assert msg["device_id"] == "sb-1" + assert msg["tag_uid"] == "AABB1122" + + +# ============================================================================ +# NFC write-tag endpoints +# ============================================================================ + + +class TestWriteTagEndpoints: + @pytest.mark.asyncio + @pytest.mark.integration + async def test_write_tag_queues_command(self, async_client: AsyncClient, device_factory, spool_factory): + device = await device_factory(device_id="sb-wt") + spool = await spool_factory(material="PLA", brand="Polymaker", color_name="Red", rgba="FF0000FF") + + resp = await async_client.post( + f"{API}/nfc/write-tag", + json={"device_id": device.device_id, "spool_id": spool.id}, + ) + + assert resp.status_code == 200 + assert resp.json()["status"] == "queued" + + # Verify heartbeat returns write_tag command with payload + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + hb = await async_client.post( + f"{API}/devices/{device.device_id}/heartbeat", + json={"nfc_ok": True, "scale_ok": True, "uptime_s": 10}, + ) + + hb_data = hb.json() + assert hb_data["pending_command"] == "write_tag" + assert hb_data["pending_write_payload"] is not None + assert hb_data["pending_write_payload"]["spool_id"] == spool.id + assert "ndef_data_hex" in hb_data["pending_write_payload"] + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_write_tag_heartbeat_not_cleared(self, async_client: AsyncClient, device_factory, spool_factory): + """write_tag command persists across heartbeats until write-result clears it.""" + device = await device_factory(device_id="sb-wt-persist") + spool = await spool_factory(material="PETG") + + await async_client.post( + f"{API}/nfc/write-tag", + json={"device_id": device.device_id, "spool_id": spool.id}, + ) + + # First heartbeat — command present + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + hb1 = await async_client.post( + f"{API}/devices/{device.device_id}/heartbeat", + json={"nfc_ok": True, "scale_ok": True, "uptime_s": 10}, + ) + assert hb1.json()["pending_command"] == "write_tag" + + # Second heartbeat — should still be present (not cleared like tare) + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + hb2 = await async_client.post( + f"{API}/devices/{device.device_id}/heartbeat", + json={"nfc_ok": True, "scale_ok": True, "uptime_s": 20}, + ) + assert hb2.json()["pending_command"] == "write_tag" + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_write_tag_missing_spool_404(self, async_client: AsyncClient, device_factory): + device = await device_factory(device_id="sb-wt-nospool") + + resp = await async_client.post( + f"{API}/nfc/write-tag", + json={"device_id": device.device_id, "spool_id": 99999}, + ) + assert resp.status_code == 404 + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_write_tag_missing_device_404(self, async_client: AsyncClient, spool_factory): + spool = await spool_factory() + + resp = await async_client.post( + f"{API}/nfc/write-tag", + json={"device_id": "nonexistent", "spool_id": spool.id}, + ) + assert resp.status_code == 404 + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_write_result_success_links_tag(self, async_client: AsyncClient, device_factory, spool_factory): + device = await device_factory(device_id="sb-wr", pending_command="write_tag") + spool = await spool_factory(material="PLA", tag_uid=None) + + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + resp = await async_client.post( + f"{API}/nfc/write-result", + json={ + "device_id": device.device_id, + "spool_id": spool.id, + "tag_uid": "04AABB11223344", + "success": True, + }, + ) + + assert resp.status_code == 200 + msg = mock_ws.broadcast.call_args[0][0] + assert msg["type"] == "spoolbuddy_tag_written" + assert msg["spool_id"] == spool.id + assert msg["tag_uid"] == "04AABB11223344" + + # Verify spool got tag linked + spool_resp = await async_client.get(f"/api/v1/inventory/spools/{spool.id}") + spool_data = spool_resp.json() + assert spool_data["tag_uid"] == "04AABB11223344" + assert spool_data["tag_type"] == "ntag" + assert spool_data["data_origin"] == "opentag3d" + assert spool_data["encode_time"] is not None + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_write_result_failure_broadcasts_error( + self, async_client: AsyncClient, device_factory, spool_factory + ): + device = await device_factory(device_id="sb-wr-fail", pending_command="write_tag") + spool = await spool_factory(material="PLA", tag_uid=None) + + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + resp = await async_client.post( + f"{API}/nfc/write-result", + json={ + "device_id": device.device_id, + "spool_id": spool.id, + "tag_uid": "04AABB", + "success": False, + "message": "Write or verification failed", + }, + ) + + assert resp.status_code == 200 + msg = mock_ws.broadcast.call_args[0][0] + assert msg["type"] == "spoolbuddy_tag_write_failed" + assert msg["message"] == "Write or verification failed" + + # Verify spool NOT linked + spool_resp = await async_client.get(f"/api/v1/inventory/spools/{spool.id}") + assert spool_resp.json()["tag_uid"] is None + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_write_result_clears_pending_command(self, async_client: AsyncClient, device_factory, spool_factory): + device = await device_factory( + device_id="sb-wr-clear", + pending_command="write_tag", + pending_write_payload='{"spool_id": 1, "ndef_data_hex": "E110120003"}', + ) + spool = await spool_factory() + + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + await async_client.post( + f"{API}/nfc/write-result", + json={ + "device_id": device.device_id, + "spool_id": spool.id, + "tag_uid": "AABB", + "success": True, + }, + ) + + # Heartbeat should have no pending command + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + hb = await async_client.post( + f"{API}/devices/{device.device_id}/heartbeat", + json={"nfc_ok": True, "scale_ok": True, "uptime_s": 30}, + ) + assert hb.json()["pending_command"] is None + assert hb.json()["pending_write_payload"] is None + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_cancel_write(self, async_client: AsyncClient, device_factory, spool_factory): + device = await device_factory(device_id="sb-cancel") + spool = await spool_factory() + + # Queue a write + await async_client.post( + f"{API}/nfc/write-tag", + json={"device_id": device.device_id, "spool_id": spool.id}, + ) + + # Cancel it + resp = await async_client.post(f"{API}/devices/{device.device_id}/cancel-write", json={}) + assert resp.status_code == 200 + + # Heartbeat should have no pending command + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + hb = await async_client.post( + f"{API}/devices/{device.device_id}/heartbeat", + json={"nfc_ok": True, "scale_ok": True, "uptime_s": 10}, + ) + assert hb.json()["pending_command"] is None + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_cancel_write_unknown_device_404(self, async_client: AsyncClient): + resp = await async_client.post(f"{API}/devices/ghost/cancel-write", json={}) + assert resp.status_code == 404 + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_write_tag_ndef_data_is_valid(self, async_client: AsyncClient, device_factory, spool_factory): + """Verify the NDEF data in the heartbeat is a valid OpenTag3D message.""" + device = await device_factory(device_id="sb-wt-ndef") + spool = await spool_factory( + material="PLA", + brand="Polymaker", + color_name="White", + rgba="FFFFFFFF", + label_weight=1000, + ) + + await async_client.post( + f"{API}/nfc/write-tag", + json={"device_id": device.device_id, "spool_id": spool.id}, + ) + + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + hb = await async_client.post( + f"{API}/devices/{device.device_id}/heartbeat", + json={"nfc_ok": True, "scale_ok": True, "uptime_s": 10}, + ) + + payload = hb.json()["pending_write_payload"] + ndef_bytes = bytes.fromhex(payload["ndef_data_hex"]) + + # CC bytes + assert ndef_bytes[:4] == bytes([0xE1, 0x10, 0x12, 0x00]) + # TLV type + assert ndef_bytes[4] == 0x03 + # NDEF record: TNF=MIME, type=application/opentag3d + assert ndef_bytes[6] == 0xD2 + assert ndef_bytes[9:30] == b"application/opentag3d" + # Terminator + assert ndef_bytes[-1] == 0xFE + # Total size fits NTAG213 + assert len(ndef_bytes) <= 144 + + +# ============================================================================ +# Scale endpoints +# ============================================================================ + + +class TestScaleEndpoints: + @pytest.mark.asyncio + @pytest.mark.integration + async def test_scale_reading_broadcast(self, async_client: AsyncClient): + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + resp = await async_client.post( + f"{API}/scale/reading", + json={ + "device_id": "sb-1", + "weight_grams": 823.5, + "stable": True, + "raw_adc": 456789, + }, + ) + + assert resp.status_code == 200 + msg = mock_ws.broadcast.call_args[0][0] + assert msg["type"] == "spoolbuddy_weight" + assert msg["device_id"] == "sb-1" + assert msg["weight_grams"] == 823.5 + assert msg["stable"] is True + assert msg["raw_adc"] == 456789 + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_update_spool_weight_calculates_correctly(self, async_client: AsyncClient, spool_factory): + # label=1000g, core=250g, scale reads 750g + # net_filament = max(0, 750 - 250) = 500 + # weight_used = max(0, 1000 - 500) = 500 + spool = await spool_factory(label_weight=1000, core_weight=250, weight_used=0) + + resp = await async_client.post( + f"{API}/scale/update-spool-weight", + json={"spool_id": spool.id, "weight_grams": 750}, + ) + + assert resp.status_code == 200 + data = resp.json() + assert data["weight_used"] == 500 + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_update_spool_weight_full_spool(self, async_client: AsyncClient, spool_factory): + # label=1000g, core=250g, scale reads 1250g (full spool) + # net_filament = max(0, 1250 - 250) = 1000 + # weight_used = max(0, 1000 - 1000) = 0 + spool = await spool_factory(label_weight=1000, core_weight=250, weight_used=200) + + resp = await async_client.post( + f"{API}/scale/update-spool-weight", + json={"spool_id": spool.id, "weight_grams": 1250}, + ) + + assert resp.status_code == 200 + data = resp.json() + assert data["weight_used"] == 0 + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_update_spool_weight_stores_scale_reading(self, async_client: AsyncClient, spool_factory): + """Verify last_scale_weight and last_weighed_at are stored after weight sync.""" + spool = await spool_factory(label_weight=1000, core_weight=250, weight_used=0) + + resp = await async_client.post( + f"{API}/scale/update-spool-weight", + json={"spool_id": spool.id, "weight_grams": 750}, + ) + assert resp.status_code == 200 + + # Fetch the spool via inventory API to verify stored fields + spool_resp = await async_client.get(f"/api/v1/inventory/spools/{spool.id}") + assert spool_resp.status_code == 200 + spool_data = spool_resp.json() + assert spool_data["last_scale_weight"] == 750 + assert spool_data["last_weighed_at"] is not None + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_update_spool_weight_missing_spool_404(self, async_client: AsyncClient): + resp = await async_client.post( + f"{API}/scale/update-spool-weight", + json={"spool_id": 99999, "weight_grams": 500}, + ) + assert resp.status_code == 404 + + +# ============================================================================ +# Calibration endpoints +# ============================================================================ + + +class TestCalibrationEndpoints: + @pytest.mark.asyncio + @pytest.mark.integration + async def test_tare_queues_command(self, async_client: AsyncClient, device_factory): + await device_factory(device_id="sb-tare") + + resp = await async_client.post(f"{API}/devices/sb-tare/calibration/tare", json={}) + assert resp.status_code == 200 + assert resp.json()["status"] == "ok" + + # Verify pending_command via heartbeat + with patch("backend.app.api.routes.spoolbuddy.ws_manager") as mock_ws: + mock_ws.broadcast = AsyncMock() + hb = await async_client.post( + f"{API}/devices/sb-tare/heartbeat", + json={"nfc_ok": True, "scale_ok": True, "uptime_s": 1}, + ) + assert hb.json()["pending_command"] == "tare" + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_tare_unknown_device_404(self, async_client: AsyncClient): + resp = await async_client.post(f"{API}/devices/ghost/calibration/tare", json={}) + assert resp.status_code == 404 + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_set_tare_offset(self, async_client: AsyncClient, device_factory): + await device_factory(device_id="sb-st", calibration_factor=0.005) + + resp = await async_client.post( + f"{API}/devices/sb-st/calibration/set-tare", + json={"tare_offset": 54321}, + ) + + assert resp.status_code == 200 + data = resp.json() + assert data["tare_offset"] == 54321 + assert data["calibration_factor"] == pytest.approx(0.005) + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_set_calibration_factor(self, async_client: AsyncClient, device_factory): + # known_weight=200g, raw_adc=50000, tare=10000 → factor=200/(50000-10000)=0.005 + await device_factory(device_id="sb-cf", tare_offset=10000) + + resp = await async_client.post( + f"{API}/devices/sb-cf/calibration/set-factor", + json={"known_weight_grams": 200, "raw_adc": 50000}, + ) + + assert resp.status_code == 200 + data = resp.json() + assert data["calibration_factor"] == pytest.approx(0.005) + assert data["tare_offset"] == 10000 + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_set_calibration_factor_zero_delta_400(self, async_client: AsyncClient, device_factory): + # raw_adc == tare_offset → delta is 0 → 400 error + await device_factory(device_id="sb-zero", tare_offset=5000) + + resp = await async_client.post( + f"{API}/devices/sb-zero/calibration/set-factor", + json={"known_weight_grams": 100, "raw_adc": 5000}, + ) + + assert resp.status_code == 400 + + @pytest.mark.asyncio + @pytest.mark.integration + async def test_get_calibration(self, async_client: AsyncClient, device_factory): + await device_factory( + device_id="sb-gcal", + tare_offset=11111, + calibration_factor=0.0042, + ) + + resp = await async_client.get(f"{API}/devices/sb-gcal/calibration") + + assert resp.status_code == 200 + data = resp.json() + assert data["tare_offset"] == 11111 + assert data["calibration_factor"] == pytest.approx(0.0042) diff --git a/backend/tests/unit/services/test_bambu_ftp.py b/backend/tests/unit/services/test_bambu_ftp.py index 71aca7f89..a84b6b205 100644 --- a/backend/tests/unit/services/test_bambu_ftp.py +++ b/backend/tests/unit/services/test_bambu_ftp.py @@ -28,7 +28,7 @@ ) # Brief delay to allow pyftpdlib to flush uploaded files to disk. -# Needed because upload_file() skips voidresp() for A1 compatibility, +# Needed because upload_file() skips voidresp() for all models, # so the server may still be processing the data channel close event. _UPLOAD_FLUSH_DELAY = 0.3 @@ -306,8 +306,8 @@ def test_upload_success(self, ftp_client_factory, ftp_server, tmp_path): result = client.upload_file(local, "/cache/upload.3mf") assert result is True client.disconnect() - # Verify via fresh connection (upload_file skips voidresp() - # so the original session can't be reused for download) + # Verify via fresh connection (upload_file skips voidresp() for all + # models, so the original session can't be reused for download) time.sleep(_UPLOAD_FLUSH_DELAY) client2 = ftp_client_factory() client2.connect() @@ -403,11 +403,11 @@ def test_upload_bytes_failure(self, ftp_client_factory, ftp_server): def test_upload_large_chunked(self, ftp_client_factory, ftp_server, tmp_path): """Large file upload in chunks completes without error. - Uses 2.5MB to trigger multiple chunks with 1MB CHUNK_SIZE. - Content verification skipped because upload_file() doesn't call - voidresp() (for A1 compatibility), so the server may still be - flushing when we check. The upload result=True confirms the - client sent all chunks without error. + Uses 2.5MB to trigger multiple chunks with 64KB CHUNK_SIZE. + Content verification skipped because upload_file() skips + voidresp() for all models, so the server may still be flushing + when we check. The upload result=True confirms the client sent + all chunks without error. """ content = b"C" * (1024 * 1024 * 2 + 512 * 1024) local = tmp_path / "large.bin" @@ -422,8 +422,8 @@ def on_progress(uploaded, total): client.connect() result = client.upload_file(local, "/cache/large.bin", on_progress) assert result is True - # Verify multiple chunks were sent - assert len(progress_calls) >= 3 # 2.5MB / 1MB = at least 3 chunks + # Verify many chunks were sent (2.5MB / 64KB = 40 chunks) + assert len(progress_calls) >= 38 assert progress_calls[-1][0] == len(content) client.disconnect() @@ -871,46 +871,27 @@ def test_failure_injection_count_decrements(self, ftp_client_factory, ftp_server assert result2 == b"data after retry" client.disconnect() - def test_upload_succeeds_despite_voidresp_error(self, ftp_client_factory, ftp_server, tmp_path): - """Upload returns True even when voidresp() gets a non-clean response. + def test_upload_skips_voidresp(self, ftp_client_factory, ftp_server, tmp_path): + """Upload returns True without calling voidresp() for any model. - Regression: Previously, a voidresp() error after successful data transfer - returned False, which caused with_ftp_retry to re-upload the entire file - in a loop. + voidresp() is skipped for all models: A1 printers hang on it, + H2D printers delay the 226 response by 30+ seconds, and X1C/P1S + gain nothing from waiting. The file is on the SD card once + sendall() returns. """ content = b"voidresp test data" local = tmp_path / "voidresp_test.3mf" local.write_bytes(content) - client = ftp_client_factory(printer_model="X1C") - client.connect() - result = client.upload_file(local, "/cache/voidresp_test.3mf") - assert result is True - client.disconnect() - # Verify the file is actually on the server - time.sleep(_UPLOAD_FLUSH_DELAY) - client2 = ftp_client_factory() - client2.connect() - downloaded = client2.download_file("/cache/voidresp_test.3mf") - assert downloaded == content - client2.disconnect() - - def test_upload_a1_skips_voidresp(self, ftp_client_factory, ftp_server, tmp_path): - """A1 models skip voidresp() entirely and still return True. - - Regression: A1 printers hang on voidresp() after transfercmd uploads. - """ - content = b"A1 upload test" - local = tmp_path / "a1_test.3mf" - local.write_bytes(content) - client = ftp_client_factory(printer_model="A1") - client.connect() - result = client.upload_file(local, "/cache/a1_test.3mf") - assert result is True - client.disconnect() - # Verify the file is actually on the server - time.sleep(_UPLOAD_FLUSH_DELAY) - client2 = ftp_client_factory() - client2.connect() - downloaded = client2.download_file("/cache/a1_test.3mf") - assert downloaded == content - client2.disconnect() + for model in ("X1C", "A1", "H2D", None): + client = ftp_client_factory(printer_model=model) + client.connect() + result = client.upload_file(local, "/cache/voidresp_test.3mf") + assert result is True, f"Upload failed for model={model}" + client.disconnect() + # Verify the file is actually on the server + time.sleep(_UPLOAD_FLUSH_DELAY) + client2 = ftp_client_factory() + client2.connect() + downloaded = client2.download_file("/cache/voidresp_test.3mf") + assert downloaded == content, f"Content mismatch for model={model}" + client2.disconnect() diff --git a/backend/tests/unit/services/test_virtual_printer.py b/backend/tests/unit/services/test_virtual_printer.py index 950bef09e..a6d0b3db4 100644 --- a/backend/tests/unit/services/test_virtual_printer.py +++ b/backend/tests/unit/services/test_virtual_printer.py @@ -310,6 +310,192 @@ async def test_manager_stop_all(self, manager, tmp_path): await manager.stop_all() assert len(manager._instances) == 0 + # ======================================================================== + # Tests for sync_from_db config change detection + # ======================================================================== + + def _make_db_vp(self, **overrides): + """Create a mock VirtualPrinter DB object.""" + defaults = { + "id": 1, + "name": "TestVP", + "enabled": True, + "mode": "immediate", + "model": "C11", + "access_code": "12345678", + "serial_suffix": "391800001", + "bind_ip": "", + "remote_interface_ip": "", + "target_printer_id": None, + "position": 0, + } + defaults.update(overrides) + vp = MagicMock() + for k, v in defaults.items(): + setattr(vp, k, v) + return vp + + def _setup_sync_mocks(self, manager, enabled_vps, tmp_path): + """Wire up session_factory mock for sync_from_db.""" + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = enabled_vps + + mock_db = AsyncMock() + mock_db.execute = AsyncMock(return_value=mock_result) + mock_db.__aenter__ = AsyncMock(return_value=mock_db) + mock_db.__aexit__ = AsyncMock(return_value=False) + + manager._session_factory = MagicMock(return_value=mock_db) + manager._base_dir = tmp_path + + @pytest.mark.asyncio + async def test_sync_from_db_restarts_on_mode_change(self, manager, tmp_path): + """Verify sync_from_db restarts VP when mode changes.""" + from backend.app.services.virtual_printer.manager import VirtualPrinterInstance + + inst = VirtualPrinterInstance( + vp_id=1, + name="TestVP", + mode="immediate", + model="C11", + access_code="12345678", + serial_suffix="391800001", + base_dir=tmp_path, + ) + inst.stop_server = AsyncMock() + manager._instances[1] = inst + + # DB says mode changed to "archive" + db_vp = self._make_db_vp(mode="archive") + self._setup_sync_mocks(manager, [db_vp], tmp_path) + + with patch.object(manager, "remove_instance", new_callable=AsyncMock) as mock_remove: + # Patch VirtualPrinterInstance to prevent actual start + with patch("backend.app.services.virtual_printer.manager.VirtualPrinterInstance") as MockInst: + mock_new = MagicMock() + mock_new.start_server = AsyncMock() + MockInst.return_value = mock_new + + await manager.sync_from_db() + + mock_remove.assert_called_once_with(1) + + @pytest.mark.asyncio + async def test_sync_from_db_restarts_on_access_code_change(self, manager, tmp_path): + """Verify sync_from_db restarts VP when access_code changes.""" + from backend.app.services.virtual_printer.manager import VirtualPrinterInstance + + inst = VirtualPrinterInstance( + vp_id=1, + name="TestVP", + mode="immediate", + model="C11", + access_code="12345678", + serial_suffix="391800001", + base_dir=tmp_path, + ) + inst.stop_server = AsyncMock() + manager._instances[1] = inst + + db_vp = self._make_db_vp(access_code="newcode99") + self._setup_sync_mocks(manager, [db_vp], tmp_path) + + with patch.object(manager, "remove_instance", new_callable=AsyncMock) as mock_remove: + with patch("backend.app.services.virtual_printer.manager.VirtualPrinterInstance") as MockInst: + mock_new = MagicMock() + mock_new.start_server = AsyncMock() + MockInst.return_value = mock_new + + await manager.sync_from_db() + + mock_remove.assert_called_once_with(1) + + @pytest.mark.asyncio + async def test_sync_from_db_skips_unchanged_instance(self, manager, tmp_path): + """Verify sync_from_db does NOT restart when config is identical.""" + from backend.app.services.virtual_printer.manager import VirtualPrinterInstance + + inst = VirtualPrinterInstance( + vp_id=1, + name="TestVP", + mode="immediate", + model="C11", + access_code="12345678", + serial_suffix="391800001", + base_dir=tmp_path, + ) + manager._instances[1] = inst + + # DB matches running config exactly + db_vp = self._make_db_vp() + self._setup_sync_mocks(manager, [db_vp], tmp_path) + + with patch.object(manager, "remove_instance", new_callable=AsyncMock) as mock_remove: + await manager.sync_from_db() + + mock_remove.assert_not_called() + + @pytest.mark.asyncio + async def test_sync_from_db_restarts_on_bind_ip_change(self, manager, tmp_path): + """Verify sync_from_db restarts VP when bind_ip changes.""" + from backend.app.services.virtual_printer.manager import VirtualPrinterInstance + + inst = VirtualPrinterInstance( + vp_id=1, + name="TestVP", + mode="immediate", + model="C11", + access_code="12345678", + serial_suffix="391800001", + bind_ip="192.168.1.10", + base_dir=tmp_path, + ) + inst.stop_server = AsyncMock() + manager._instances[1] = inst + + db_vp = self._make_db_vp(bind_ip="192.168.1.20") + self._setup_sync_mocks(manager, [db_vp], tmp_path) + + with patch.object(manager, "remove_instance", new_callable=AsyncMock) as mock_remove: + with patch("backend.app.services.virtual_printer.manager.VirtualPrinterInstance") as MockInst: + mock_new = MagicMock() + mock_new.start_server = AsyncMock() + MockInst.return_value = mock_new + + await manager.sync_from_db() + + mock_remove.assert_called_once_with(1) + + @pytest.mark.asyncio + async def test_sync_from_db_restarts_on_model_change(self, manager, tmp_path): + """Verify sync_from_db restarts VP when model changes.""" + from backend.app.services.virtual_printer.manager import VirtualPrinterInstance + + inst = VirtualPrinterInstance( + vp_id=1, + name="TestVP", + mode="immediate", + model="C11", + access_code="12345678", + serial_suffix="391800001", + base_dir=tmp_path, + ) + inst.stop_server = AsyncMock() + manager._instances[1] = inst + + db_vp = self._make_db_vp(model="C12") + self._setup_sync_mocks(manager, [db_vp], tmp_path) + + with patch.object(manager, "remove_instance", new_callable=AsyncMock) as mock_remove: + with patch("backend.app.services.virtual_printer.manager.VirtualPrinterInstance") as MockInst: + mock_new = MagicMock() + mock_new.start_server = AsyncMock() + MockInst.return_value = mock_new + + await manager.sync_from_db() + + mock_remove.assert_called_once_with(1) + class TestFTPSession: """Tests for FTP session handling.""" @@ -1219,4 +1405,6 @@ async def test_start_server_creates_bind_server(self, tmp_path): model="3DPrinter-X1-Carbon", name="Bambuddy", bind_address="192.168.1.50", + cert_path=Path("/tmp/cert.pem"), # nosec B108 + key_path=Path("/tmp/key.pem"), # nosec B108 ) diff --git a/backend/tests/unit/test_bug_report.py b/backend/tests/unit/test_bug_report.py new file mode 100644 index 000000000..124c7054d --- /dev/null +++ b/backend/tests/unit/test_bug_report.py @@ -0,0 +1,336 @@ +"""Unit tests for bug report service and route.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + + +class TestBugReportService: + """Tests for bug_report.submit_report().""" + + @pytest.mark.asyncio + @pytest.mark.unit + async def test_submit_success(self): + """Successful relay call saves report and returns issue details.""" + from backend.app.services.bug_report import submit_report + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = { + "success": True, + "message": "Created", + "issue_url": "https://github.com/maziggy/bambuddy/issues/99", + "issue_number": 99, + } + + mock_db = AsyncMock() + mock_db.add = MagicMock() + mock_db.commit = AsyncMock() + + with ( + patch("backend.app.services.bug_report.httpx.AsyncClient") as mock_client_cls, + patch("backend.app.services.bug_report.async_session") as mock_session, + patch("backend.app.services.bug_report._rate_limit_timestamps", []), + patch("backend.app.services.bug_report.BUG_REPORT_RELAY_URL", "https://example.com/api/bug-report"), + ): + mock_client = AsyncMock() + mock_client.post = AsyncMock(return_value=mock_response) + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=False) + mock_client_cls.return_value = mock_client + + mock_session.return_value.__aenter__ = AsyncMock(return_value=mock_db) + mock_session.return_value.__aexit__ = AsyncMock(return_value=False) + + result = await submit_report( + description="Test bug", + reporter_email="user@test.com", + screenshot_base64=None, + support_info=None, + ) + + assert result["success"] is True + assert result["issue_number"] == 99 + assert result["issue_url"] == "https://github.com/maziggy/bambuddy/issues/99" + mock_db.add.assert_called_once() + + @pytest.mark.asyncio + @pytest.mark.unit + async def test_submit_rate_limited(self): + """Returns failure when rate limit exceeded.""" + import time + + from backend.app.services.bug_report import submit_report + + timestamps = [time.time()] * 5 # Already at limit + + with patch("backend.app.services.bug_report._rate_limit_timestamps", timestamps): + result = await submit_report( + description="Test", + reporter_email=None, + screenshot_base64=None, + support_info=None, + ) + + assert result["success"] is False + assert "Rate limit" in result["message"] + + @pytest.mark.asyncio + @pytest.mark.unit + async def test_submit_no_relay_url(self): + """Returns failure when relay URL is not configured.""" + from backend.app.services.bug_report import submit_report + + with ( + patch("backend.app.services.bug_report._rate_limit_timestamps", []), + patch("backend.app.services.bug_report.BUG_REPORT_RELAY_URL", ""), + ): + result = await submit_report( + description="Test", + reporter_email=None, + screenshot_base64=None, + support_info=None, + ) + + assert result["success"] is False + assert "not configured" in result["message"] + + @pytest.mark.asyncio + @pytest.mark.unit + async def test_submit_relay_http_error(self): + """Non-200 relay response saves failed report.""" + from backend.app.services.bug_report import submit_report + + mock_response = MagicMock() + mock_response.status_code = 500 + + mock_db = AsyncMock() + mock_db.add = MagicMock() + mock_db.commit = AsyncMock() + + with ( + patch("backend.app.services.bug_report.httpx.AsyncClient") as mock_client_cls, + patch("backend.app.services.bug_report.async_session") as mock_session, + patch("backend.app.services.bug_report._rate_limit_timestamps", []), + patch("backend.app.services.bug_report.BUG_REPORT_RELAY_URL", "https://example.com/api/bug-report"), + ): + mock_client = AsyncMock() + mock_client.post = AsyncMock(return_value=mock_response) + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=False) + mock_client_cls.return_value = mock_client + + mock_session.return_value.__aenter__ = AsyncMock(return_value=mock_db) + mock_session.return_value.__aexit__ = AsyncMock(return_value=False) + + result = await submit_report( + description="Test", + reporter_email=None, + screenshot_base64=None, + support_info=None, + ) + + assert result["success"] is False + assert "not available" in result["message"] + mock_db.add.assert_called_once() + + @pytest.mark.asyncio + @pytest.mark.unit + async def test_submit_relay_connection_error(self): + """Connection failure saves failed report.""" + from backend.app.services.bug_report import submit_report + + mock_db = AsyncMock() + mock_db.add = MagicMock() + mock_db.commit = AsyncMock() + + with ( + patch("backend.app.services.bug_report.httpx.AsyncClient") as mock_client_cls, + patch("backend.app.services.bug_report.async_session") as mock_session, + patch("backend.app.services.bug_report._rate_limit_timestamps", []), + patch("backend.app.services.bug_report.BUG_REPORT_RELAY_URL", "https://example.com/api/bug-report"), + ): + mock_client = AsyncMock() + mock_client.post = AsyncMock(side_effect=ConnectionError("Connection refused")) + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=False) + mock_client_cls.return_value = mock_client + + mock_session.return_value.__aenter__ = AsyncMock(return_value=mock_db) + mock_session.return_value.__aexit__ = AsyncMock(return_value=False) + + result = await submit_report( + description="Test", + reporter_email=None, + screenshot_base64=None, + support_info=None, + ) + + assert result["success"] is False + assert "Failed to submit" in result["message"] + + @pytest.mark.asyncio + @pytest.mark.unit + async def test_submit_relay_failure_response(self): + """Relay returns success=false in JSON body.""" + from backend.app.services.bug_report import submit_report + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = { + "success": False, + "message": "GitHub API error", + } + + mock_db = AsyncMock() + mock_db.add = MagicMock() + mock_db.commit = AsyncMock() + + with ( + patch("backend.app.services.bug_report.httpx.AsyncClient") as mock_client_cls, + patch("backend.app.services.bug_report.async_session") as mock_session, + patch("backend.app.services.bug_report._rate_limit_timestamps", []), + patch("backend.app.services.bug_report.BUG_REPORT_RELAY_URL", "https://example.com/api/bug-report"), + ): + mock_client = AsyncMock() + mock_client.post = AsyncMock(return_value=mock_response) + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=False) + mock_client_cls.return_value = mock_client + + mock_session.return_value.__aenter__ = AsyncMock(return_value=mock_db) + mock_session.return_value.__aexit__ = AsyncMock(return_value=False) + + result = await submit_report( + description="Test", + reporter_email=None, + screenshot_base64=None, + support_info=None, + ) + + assert result["success"] is False + assert "GitHub API error" in result["message"] + + +class TestCollectDebugLogs: + """Tests for _collect_debug_logs().""" + + @pytest.mark.asyncio + @pytest.mark.unit + async def test_enables_debug_when_not_already_enabled(self): + """Debug logging is enabled, then restored after collection.""" + from backend.app.api.routes.bug_report import _collect_debug_logs + + apply_calls = [] + + mock_db = AsyncMock() + + with ( + patch("backend.app.api.routes.bug_report.async_session") as mock_session, + patch("backend.app.api.routes.bug_report._get_debug_setting", return_value=(False, None)), + patch("backend.app.api.routes.bug_report._set_debug_setting", new_callable=AsyncMock) as mock_set, + patch( + "backend.app.api.routes.bug_report._apply_log_level", + side_effect=lambda v: apply_calls.append(v), + ), + patch("backend.app.api.routes.bug_report.printer_manager") as mock_pm, + patch("backend.app.api.routes.bug_report._get_recent_sanitized_logs", return_value="DEBUG log line"), + patch("backend.app.api.routes.bug_report.asyncio.sleep", new_callable=AsyncMock), + patch("backend.app.api.routes.bug_report.LOG_COLLECTION_SECONDS", 0), + ): + mock_pm._clients = {} + mock_session.return_value.__aenter__ = AsyncMock(return_value=mock_db) + mock_session.return_value.__aexit__ = AsyncMock(return_value=False) + + result = await _collect_debug_logs() + + assert result == "DEBUG log line" + assert apply_calls == [True, False] # enabled then restored + assert mock_set.call_count == 2 + + @pytest.mark.asyncio + @pytest.mark.unit + async def test_skips_enable_when_already_debug(self): + """Debug logging not toggled when already enabled.""" + from backend.app.api.routes.bug_report import _collect_debug_logs + + with ( + patch("backend.app.api.routes.bug_report.async_session") as mock_session, + patch("backend.app.api.routes.bug_report._get_debug_setting", return_value=(True, None)), + patch("backend.app.api.routes.bug_report._set_debug_setting", new_callable=AsyncMock) as mock_set, + patch("backend.app.api.routes.bug_report._apply_log_level") as mock_apply, + patch("backend.app.api.routes.bug_report.printer_manager") as mock_pm, + patch("backend.app.api.routes.bug_report._get_recent_sanitized_logs", return_value="logs"), + patch("backend.app.api.routes.bug_report.asyncio.sleep", new_callable=AsyncMock), + patch("backend.app.api.routes.bug_report.LOG_COLLECTION_SECONDS", 0), + ): + mock_pm._clients = {} + mock_db = AsyncMock() + mock_session.return_value.__aenter__ = AsyncMock(return_value=mock_db) + mock_session.return_value.__aexit__ = AsyncMock(return_value=False) + + result = await _collect_debug_logs() + + assert result == "logs" + mock_apply.assert_not_called() + mock_set.assert_not_called() + + @pytest.mark.asyncio + @pytest.mark.unit + async def test_pushes_all_connected_printers(self): + """Sends status update request to all connected printers.""" + from backend.app.api.routes.bug_report import _collect_debug_logs + + with ( + patch("backend.app.api.routes.bug_report.async_session") as mock_session, + patch("backend.app.api.routes.bug_report._get_debug_setting", return_value=(True, None)), + patch("backend.app.api.routes.bug_report._set_debug_setting", new_callable=AsyncMock), + patch("backend.app.api.routes.bug_report._apply_log_level"), + patch("backend.app.api.routes.bug_report.printer_manager") as mock_pm, + patch("backend.app.api.routes.bug_report._get_recent_sanitized_logs", return_value=""), + patch("backend.app.api.routes.bug_report.asyncio.sleep", new_callable=AsyncMock), + patch("backend.app.api.routes.bug_report.LOG_COLLECTION_SECONDS", 0), + ): + mock_pm._clients = {"printer1": MagicMock(), "printer2": MagicMock()} + mock_db = AsyncMock() + mock_session.return_value.__aenter__ = AsyncMock(return_value=mock_db) + mock_session.return_value.__aexit__ = AsyncMock(return_value=False) + + await _collect_debug_logs() + + assert mock_pm.request_status_update.call_count == 2 + + +class TestRateLimit: + """Tests for rate limiting in bug report service.""" + + def test_check_rate_limit_allows_first(self): + """First request within window is allowed.""" + from backend.app.services.bug_report import _check_rate_limit + + with patch("backend.app.services.bug_report._rate_limit_timestamps", []): + assert _check_rate_limit() is True + + def test_check_rate_limit_blocks_at_max(self): + """Requests at max limit are blocked.""" + import time + + from backend.app.services.bug_report import _check_rate_limit + + now = time.time() + timestamps = [now] * 5 + + with patch("backend.app.services.bug_report._rate_limit_timestamps", timestamps): + assert _check_rate_limit() is False + + def test_check_rate_limit_clears_old(self): + """Old timestamps outside window are cleared.""" + import time + + from backend.app.services.bug_report import _check_rate_limit + + old_time = time.time() - 7200 # 2 hours ago + timestamps = [old_time] * 5 + + with patch("backend.app.services.bug_report._rate_limit_timestamps", timestamps): + assert _check_rate_limit() is True diff --git a/backend/tests/unit/test_opentag3d.py b/backend/tests/unit/test_opentag3d.py new file mode 100644 index 000000000..d9f2a0bbb --- /dev/null +++ b/backend/tests/unit/test_opentag3d.py @@ -0,0 +1,142 @@ +"""Unit tests for OpenTag3D NDEF encoder.""" + +import struct +from unittest.mock import MagicMock + +from backend.app.services.opentag3d import ( + OPENTAG3D_MIME_TYPE, + PAYLOAD_SIZE, + _build_payload, + encode_opentag3d, +) + + +def _make_spool(**kwargs): + """Create a mock Spool with default values.""" + defaults = { + "material": "PLA", + "subtype": "Matte", + "brand": "Polymaker", + "color_name": "Jade White", + "rgba": "00AE42FF", + "label_weight": 1000, + "nozzle_temp_min": 220, + } + defaults.update(kwargs) + spool = MagicMock() + for k, v in defaults.items(): + setattr(spool, k, v) + return spool + + +class TestBuildPayload: + def test_payload_is_102_bytes(self): + spool = _make_spool() + payload = _build_payload(spool) + assert len(payload) == PAYLOAD_SIZE + + def test_tag_version(self): + payload = _build_payload(_make_spool()) + version = struct.unpack_from(">H", payload, 0x00)[0] + assert version == 1000 + + def test_material_field(self): + payload = _build_payload(_make_spool(material="PETG")) + material = payload[0x02:0x07].decode("utf-8") + assert material == "PETG " + + def test_material_truncated(self): + payload = _build_payload(_make_spool(material="SUPERLONG")) + material = payload[0x02:0x07].decode("utf-8") + assert material == "SUPER" + + def test_modifiers_field(self): + payload = _build_payload(_make_spool(subtype="Silk")) + mods = payload[0x07:0x0C].decode("utf-8") + assert mods == "Silk " + + def test_modifiers_none(self): + payload = _build_payload(_make_spool(subtype=None)) + mods = payload[0x07:0x0C].decode("utf-8") + assert mods == " " + + def test_reserved_is_zero(self): + payload = _build_payload(_make_spool()) + assert payload[0x0C:0x1B] == b"\x00" * 15 + + def test_brand_field(self): + payload = _build_payload(_make_spool(brand="Polymaker")) + brand = payload[0x1B:0x2B].decode("utf-8") + assert brand == "Polymaker " + + def test_color_name_field(self): + payload = _build_payload(_make_spool(color_name="Jade White")) + cn = payload[0x2B:0x4B].decode("utf-8") + assert cn.startswith("Jade White") + assert len(cn) == 32 + + def test_rgba_field(self): + payload = _build_payload(_make_spool(rgba="FF0000FF")) + assert payload[0x4B:0x4F] == bytes([0xFF, 0x00, 0x00, 0xFF]) + + def test_rgba_none(self): + payload = _build_payload(_make_spool(rgba=None)) + assert payload[0x4B:0x4F] == b"\x00\x00\x00\x00" + + def test_target_diameter(self): + payload = _build_payload(_make_spool()) + diameter = struct.unpack_from(">H", payload, 0x5C)[0] + assert diameter == 1750 + + def test_target_weight(self): + payload = _build_payload(_make_spool(label_weight=750)) + weight = struct.unpack_from(">H", payload, 0x5E)[0] + assert weight == 750 + + def test_print_temp(self): + payload = _build_payload(_make_spool(nozzle_temp_min=220)) + assert payload[0x60] == 44 # 220 / 5 + + def test_print_temp_none(self): + payload = _build_payload(_make_spool(nozzle_temp_min=None)) + assert payload[0x60] == 0 + + +class TestEncodeOpentag3d: + def test_starts_with_cc(self): + data = encode_opentag3d(_make_spool()) + assert data[:4] == bytes([0xE1, 0x10, 0x12, 0x00]) + + def test_tlv_header(self): + data = encode_opentag3d(_make_spool()) + # TLV type = 0x03 + assert data[4] == 0x03 + # TLV length = 3 (record header) + 21 (mime type) + 102 (payload) = 126 + assert data[5] == 126 + + def test_ndef_record_header(self): + data = encode_opentag3d(_make_spool()) + # Record starts after CC(4) + TLV(2) = offset 6 + assert data[6] == 0xD2 # MB|ME|SR + TNF=MIME + assert data[7] == len(OPENTAG3D_MIME_TYPE) # type length = 21 + assert data[8] == PAYLOAD_SIZE # payload length = 102 + + def test_mime_type(self): + data = encode_opentag3d(_make_spool()) + mime = data[9:30] + assert mime == b"application/opentag3d" + + def test_ends_with_terminator(self): + data = encode_opentag3d(_make_spool()) + assert data[-1] == 0xFE + + def test_total_size(self): + data = encode_opentag3d(_make_spool()) + # CC(4) + TLV(2) + header(3) + type(21) + payload(102) + terminator(1) = 133 + assert len(data) == 133 + + def test_fits_ntag213(self): + """NTAG213 has 36 writable pages (144 bytes). Our data must fit.""" + data = encode_opentag3d(_make_spool()) + ntag213_capacity = 36 * 4 # 144 bytes + assert len(data) <= ntag213_capacity diff --git a/backend/tests/unit/test_orca_profiles.py b/backend/tests/unit/test_orca_profiles.py index 12760bda5..6902fa825 100644 --- a/backend/tests/unit/test_orca_profiles.py +++ b/backend/tests/unit/test_orca_profiles.py @@ -194,6 +194,14 @@ def test_pa_in_name(self): assert _parse_material_from_name("Fiberlogy PA12+CF15") is None assert _parse_material_from_name("Fiberlogy PA @BBL X1C") == "PA" + def test_support_for_pattern(self): + from backend.app.services.orca_profiles import _parse_material_from_name + + # "PLA Support for PETG" — filament type is PETG, not PLA + assert _parse_material_from_name("PLA Support for PETG PETG Basic @Bambu Lab H2D 0.4 nozzle") == "PETG" + assert _parse_material_from_name("PLA Support for ABS @BBL X1C") == "ABS" + assert _parse_material_from_name("PVA Support for PLA @BBL X1C") == "PLA" + class TestParseVendorFromName: """Tests for _parse_vendor_from_name().""" diff --git a/backend/tests/unit/test_support_helpers.py b/backend/tests/unit/test_support_helpers.py index aa9019fb7..26f9d2fd3 100644 --- a/backend/tests/unit/test_support_helpers.py +++ b/backend/tests/unit/test_support_helpers.py @@ -373,6 +373,7 @@ async def test_docker_section_present_when_in_docker(self): with ( patch("backend.app.api.routes.support.is_running_in_docker", return_value=True), patch("backend.app.api.routes.support._get_container_memory_limit", return_value=1073741824), + patch("backend.app.api.routes.support._detect_docker_network_mode", return_value="bridge"), patch("backend.app.api.routes.support.async_session") as mock_session_ctx, patch("backend.app.api.routes.support.printer_manager") as mock_pm, patch( @@ -529,10 +530,11 @@ async def test_network_section(self): assert info["network"]["interface_count"] == 2 assert info["network"]["interfaces"][0]["name"] == "eth0" - assert info["network"]["interfaces"][0]["subnet"] == "192.168.1.0/24" - # Verify IP addresses are NOT included + assert info["network"]["interfaces"][0]["subnet"] == "x.x.1.0/24" + # Verify IP addresses are NOT included (first two octets masked) for iface in info["network"]["interfaces"]: assert "ip" not in iface + assert iface["subnet"].startswith("x.x.") @pytest.mark.asyncio @pytest.mark.unit diff --git a/docker-publish-daily-beta.sh b/docker-publish-daily-beta.sh new file mode 100755 index 000000000..4e5af64ee --- /dev/null +++ b/docker-publish-daily-beta.sh @@ -0,0 +1,392 @@ +#!/bin/bash +# Daily beta build: build Docker image, push to registries, create/update GitHub prerelease +# +# Usage: +# ./docker-publish-daily-beta.sh [--parallel] [--ghcr-only] [--dockerhub-only] [--skip-release] +# +# Examples: +# ./docker-publish-daily-beta.sh # Full daily beta workflow +# ./docker-publish-daily-beta.sh --parallel # Build both archs simultaneously +# ./docker-publish-daily-beta.sh --ghcr-only # Only push to GHCR +# ./docker-publish-daily-beta.sh --dockerhub-only # Only push to Docker Hub +# ./docker-publish-daily-beta.sh --skip-release # Build+push without GitHub release +# +# Reads APP_VERSION from backend/app/core/config.py (must be a beta version like 0.2.2b1). +# Builds and pushes a multi-arch Docker image tagged with that version, overwriting any +# previous image with the same tag. Optionally creates/updates a GitHub prerelease. +# +# Beta versions are never tagged as 'latest'. Users update by pulling the same tag +# (e.g., docker pull ghcr.io/maziggy/bambuddy:0.2.2b1) or using Watchtower. +# +# Prerequisites: +# 1. Log in to ghcr.io: +# echo $GITHUB_TOKEN | docker login ghcr.io -u YOUR_USERNAME --password-stdin +# +# 2. Log in to Docker Hub: +# docker login -u YOUR_USERNAME +# +# 3. GitHub CLI (gh) authenticated for creating releases +# +# Supported architectures: +# - linux/amd64 (x86_64, most servers/desktops) +# - linux/arm64 (Raspberry Pi 4/5, Apple Silicon via emulation) + +set -e + +# Configuration +GHCR_REGISTRY="ghcr.io" +DOCKERHUB_REGISTRY="docker.io" +IMAGE_NAME="maziggy/bambuddy" +GHCR_IMAGE="${GHCR_REGISTRY}/${IMAGE_NAME}" +DOCKERHUB_IMAGE="${DOCKERHUB_REGISTRY}/${IMAGE_NAME}" +PLATFORMS="linux/amd64,linux/arm64" +BUILDER_NAME="bambuddy-builder" +CONFIG_FILE="backend/app/core/config.py" +CHANGELOG_FILE="CHANGELOG.md" + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +# Parse arguments +PARALLEL=false +PUSH_GHCR=true +PUSH_DOCKERHUB=true +SKIP_RELEASE=false +for arg in "$@"; do + case $arg in + --parallel) + PARALLEL=true + ;; + --ghcr-only) + PUSH_DOCKERHUB=false + ;; + --dockerhub-only) + PUSH_GHCR=false + ;; + --skip-release) + SKIP_RELEASE=true + ;; + --help|-h) + echo "Usage: $0 [--parallel] [--ghcr-only] [--dockerhub-only] [--skip-release]" + echo "" + echo "Build and publish a daily beta Docker image using the APP_VERSION from config.py." + echo "" + echo "Options:" + echo " --parallel Build both architectures simultaneously" + echo " --ghcr-only Only push to GitHub Container Registry" + echo " --dockerhub-only Only push to Docker Hub" + echo " --skip-release Build+push without creating/updating GitHub release" + echo " --help, -h Show this help" + exit 0 + ;; + *) + echo -e "${RED}Unknown argument: $arg${NC}" + echo "Run $0 --help for usage" + exit 1 + ;; + esac +done + +# ============================================================ +# Step 1: Read and validate APP_VERSION +# ============================================================ +echo -e "${BLUE}[1/4] Validating APP_VERSION...${NC}" + +VERSION=$(grep -oP 'APP_VERSION = "\K[^"]+' "$CONFIG_FILE") + +if [ -z "$VERSION" ]; then + echo -e "${RED}Error: Could not read APP_VERSION from ${CONFIG_FILE}${NC}" + exit 1 +fi + +if ! [[ "$VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+b[0-9]+$ ]]; then + echo -e "${RED}Error: APP_VERSION '${VERSION}' is not a beta version (expected X.Y.Zb)${NC}" + exit 1 +fi + +echo -e "${GREEN} APP_VERSION: ${VERSION}${NC}" + +# ============================================================ +# Step 2: Build & push Docker images +# ============================================================ +echo "" + +# Get CPU count +CPU_COUNT=$(nproc 2>/dev/null || sysctl -n hw.ncpu 2>/dev/null || echo 4) + +echo -e "${GREEN}================================================${NC}" +echo -e "${GREEN} Daily beta build${NC}" +echo -e "${GREEN} Version: ${VERSION}${NC}" +echo -e "${GREEN} Platforms: ${PLATFORMS}${NC}" +echo -e "${GREEN} CPU cores: ${CPU_COUNT}${NC}" +if [ "$PARALLEL" = true ]; then + echo -e "${GREEN} Mode: PARALLEL (both archs simultaneously)${NC}" +else + echo -e "${GREEN} Mode: Sequential (amd64 → arm64)${NC}" +fi +echo -e "${GREEN} Registries:${NC}" +if [ "$PUSH_GHCR" = true ]; then + echo -e "${GREEN} - ${GHCR_IMAGE}${NC}" +fi +if [ "$PUSH_DOCKERHUB" = true ]; then + echo -e "${GREEN} - ${DOCKERHUB_IMAGE}${NC}" +fi +echo -e "${GREEN}================================================${NC}" +echo "" + +# Check registry logins +if [ "$PUSH_GHCR" = true ]; then + if ! grep -q "ghcr.io" ~/.docker/config.json 2>/dev/null; then + echo -e "${YELLOW}Warning: You may not be logged in to ghcr.io${NC}" + echo "Run: echo \$GITHUB_TOKEN | docker login ghcr.io -u YOUR_USERNAME --password-stdin" + echo "" + fi +fi + +if [ "$PUSH_DOCKERHUB" = true ]; then + if ! grep -q "index.docker.io\|docker.io" ~/.docker/config.json 2>/dev/null; then + echo -e "${RED}Error: You are not logged in to Docker Hub${NC}" + echo "Run: docker login -u YOUR_USERNAME" + echo "" + exit 1 + fi +fi + +# Setup buildx builder if not exists +echo -e "${BLUE}[2/4] Setting up Docker Buildx and building...${NC}" +if ! docker buildx inspect "$BUILDER_NAME" >/dev/null 2>&1; then + echo "Creating new buildx builder: $BUILDER_NAME (optimized for ${CPU_COUNT} cores)" + docker buildx create \ + --name "$BUILDER_NAME" \ + --driver docker-container \ + --driver-opt network=host \ + --driver-opt "env.BUILDKIT_STEP_LOG_MAX_SIZE=10000000" \ + --buildkitd-flags "--allow-insecure-entitlement network.host --oci-worker-gc=false" \ + --config /dev/stdin <&1 | sed 's/^/[amd64] /' + echo -e "${GREEN}[amd64] Complete!${NC}" + ) & + PID_AMD64=$! + + # Build arm64 in background + ( + echo -e "${BLUE}[arm64] Starting build...${NC}" + docker buildx build \ + --platform linux/arm64 \ + ${ARCH_TAGS_ARM64} \ + ${BUILD_ARGS} \ + --push \ + . 2>&1 | sed 's/^/[arm64] /' + echo -e "${GREEN}[arm64] Complete!${NC}" + ) & + PID_ARM64=$! + + # Wait for both builds + echo "Waiting for parallel builds to complete..." + wait $PID_AMD64 + wait $PID_ARM64 + + # Create multi-arch manifests per registry (no cross-registry blob copies) + echo -e "${BLUE}Creating multi-arch manifests...${NC}" + + if [ "$PUSH_GHCR" = true ]; then + echo -e "${BLUE} Creating GHCR manifest...${NC}" + docker buildx imagetools create \ + -t "${GHCR_IMAGE}:${VERSION}" \ + "${GHCR_IMAGE}:${VERSION}-amd64" \ + "${GHCR_IMAGE}:${VERSION}-arm64" + fi + if [ "$PUSH_DOCKERHUB" = true ]; then + echo -e "${BLUE} Creating Docker Hub manifest...${NC}" + docker buildx imagetools create \ + -t "${DOCKERHUB_IMAGE}:${VERSION}" \ + "${DOCKERHUB_IMAGE}:${VERSION}-amd64" \ + "${DOCKERHUB_IMAGE}:${VERSION}-arm64" + fi +else + # Sequential build (default): Build both platforms in one command + echo -e "${YELLOW}Building sequentially with ${CPU_COUNT} cores (no cache)...${NC}" + DOCKER_BUILDKIT=1 docker buildx build \ + --platform "$PLATFORMS" \ + ${BUILD_ARGS} \ + $TAGS \ + --push \ + . +fi + +# ============================================================ +# Step 3: Create/update GitHub release +# ============================================================ +if [ "$SKIP_RELEASE" = true ]; then + echo -e "${YELLOW}[3/4] Skipping GitHub release (--skip-release)${NC}" +else + echo -e "${BLUE}[3/4] Creating/updating GitHub release...${NC}" + + # Extract release notes from CHANGELOG: content between ## [] and the next ## [ heading + CHANGELOG_NOTES=$(sed -n "/^## \[${VERSION}\]/,/^## \[/{/^## \[/!p}" "$CHANGELOG_FILE" | sed '/^$/d; 1{/^$/d}') + + if [ -z "$CHANGELOG_NOTES" ]; then + echo -e "${YELLOW} Warning: No changelog notes found for ${VERSION}${NC}" + CHANGELOG_NOTES="No changelog notes available for this release." + fi + + # Build pull commands for the release body + PULL_COMMANDS="" + if [ "$PUSH_GHCR" = true ]; then + PULL_COMMANDS="docker pull ghcr.io/maziggy/bambuddy:${VERSION}" + fi + if [ "$PUSH_DOCKERHUB" = true ]; then + if [ -n "$PULL_COMMANDS" ]; then + PULL_COMMANDS="${PULL_COMMANDS} +# or +docker pull maziggy/bambuddy:${VERSION}" + else + PULL_COMMANDS="docker pull maziggy/bambuddy:${VERSION}" + fi + fi + + # Create the release body + TODAY=$(date +%Y-%m-%d) + RELEASE_BODY=$(cat < [!NOTE] +> This is a **daily beta build** (${TODAY}). It contains the latest fixes and improvements but may have undiscovered issues. +> +> **Docker users:** Update by pulling the new image: +> \`\`\` +> ${PULL_COMMANDS} +> \`\`\` +> +> **Tip:** Use [Watchtower](https://containrrr.dev/watchtower/) to automatically update when new daily builds are pushed. + +--- + +${CHANGELOG_NOTES} +EOF + ) + + # Delete existing release so the new one gets today's date + # (gh release edit only updates title/notes, not the creation timestamp) + if gh release view "v${VERSION}" >/dev/null 2>&1; then + echo " Deleting old release v${VERSION} (will recreate with today's date)..." + gh release delete "v${VERSION}" --yes --cleanup-tag + fi + + # Create/move tag to current HEAD and push + echo " Tagging current HEAD as v${VERSION}..." + git tag -f "v${VERSION}" + git push origin "v${VERSION}" --force + + echo " Creating release v${VERSION}..." + gh release create "v${VERSION}" \ + --title "Daily Beta Build v${VERSION} (${TODAY})" \ + --prerelease \ + --notes "$RELEASE_BODY" + echo -e "${GREEN} Created GitHub release: v${VERSION}${NC}" +fi + +# ============================================================ +# Step 4: Verify +# ============================================================ +echo -e "${BLUE}[4/4] Verifying...${NC}" + +if [ "$PUSH_GHCR" = true ]; then + echo -e "${BLUE}GHCR manifest:${NC}" + docker buildx imagetools inspect "${GHCR_IMAGE}:${VERSION}" +fi +if [ "$PUSH_DOCKERHUB" = true ]; then + echo -e "${BLUE}Docker Hub manifest:${NC}" + docker buildx imagetools inspect "${DOCKERHUB_IMAGE}:${VERSION}" +fi + +if [ "$SKIP_RELEASE" != true ]; then + echo "" + echo -e "${BLUE}GitHub release:${NC}" + gh release view "v${VERSION}" +fi + +# ============================================================ +# Summary +# ============================================================ +echo "" +echo -e "${GREEN}================================================${NC}" +echo -e "${GREEN} Daily beta build complete!${NC}" +echo -e "${GREEN} Version: ${VERSION}${NC}" +echo -e "${GREEN}================================================${NC}" +if [ "$PUSH_GHCR" = true ]; then + echo " GHCR: ${GHCR_IMAGE}:${VERSION}" +fi +if [ "$PUSH_DOCKERHUB" = true ]; then + echo " Docker Hub: ${DOCKERHUB_IMAGE}:${VERSION}" +fi +if [ "$SKIP_RELEASE" != true ]; then + echo " Release: https://github.com/${IMAGE_NAME}/releases/tag/v${VERSION}" +fi +echo "" +echo -e "${BLUE}Supported platforms:${NC}" +echo " - linux/amd64 (Intel/AMD servers, desktops)" +echo " - linux/arm64 (Raspberry Pi 4/5, Apple Silicon)" +echo "" +echo -e "${GREEN}Users can now run:${NC}" +if [ "$PUSH_GHCR" = true ]; then + echo " docker pull ${GHCR_IMAGE}:${VERSION}" +fi +if [ "$PUSH_DOCKERHUB" = true ]; then + echo " docker pull ${DOCKERHUB_IMAGE}:${VERSION}" + echo " docker pull ${IMAGE_NAME}:${VERSION} # shorthand" +fi diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 065134983..8ac91b438 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -32,6 +32,7 @@ "react-dom": "^19.2.0", "react-i18next": "^16.3.5", "react-router-dom": "^7.12.0", + "react-simple-keyboard": "^3.8.164", "recharts": "^3.5.1", "three": "^0.181.2" }, @@ -6856,6 +6857,15 @@ "react-dom": ">=18" } }, + "node_modules/react-simple-keyboard": { + "version": "3.8.164", + "resolved": "https://registry.npmjs.org/react-simple-keyboard/-/react-simple-keyboard-3.8.164.tgz", + "integrity": "sha512-VwmLyclUizzkpRy/2DeLZRtzjR3K6MLWDVV98492DC5a0ZUHt9JK0R27ZXbcn51OA80U84XTZsUZlU8iYXkgxQ==", + "peerDependencies": { + "react": "^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, "node_modules/readable-stream": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", diff --git a/frontend/package.json b/frontend/package.json index ae68b308c..81b9ec0f5 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -38,6 +38,7 @@ "react-dom": "^19.2.0", "react-i18next": "^16.3.5", "react-router-dom": "^7.12.0", + "react-simple-keyboard": "^3.8.164", "recharts": "^3.5.1", "three": "^0.181.2" }, diff --git a/frontend/public/img/spoolbuddy_logo_dark.png b/frontend/public/img/spoolbuddy_logo_dark.png new file mode 100644 index 000000000..48e106c2b Binary files /dev/null and b/frontend/public/img/spoolbuddy_logo_dark.png differ diff --git a/frontend/public/img/spoolbuddy_logo_dark_small.png b/frontend/public/img/spoolbuddy_logo_dark_small.png new file mode 100644 index 000000000..d76fd1417 Binary files /dev/null and b/frontend/public/img/spoolbuddy_logo_dark_small.png differ diff --git a/frontend/public/spoolbuddy_logo_dark.png b/frontend/public/spoolbuddy_logo_dark.png new file mode 100644 index 000000000..48e106c2b Binary files /dev/null and b/frontend/public/spoolbuddy_logo_dark.png differ diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index 0318517d8..4a6db4f97 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -26,9 +26,9 @@ import { AuthProvider, useAuth } from './contexts/AuthContext'; import { SpoolBuddyLayout } from './components/spoolbuddy/SpoolBuddyLayout'; import { SpoolBuddyDashboard } from './pages/spoolbuddy/SpoolBuddyDashboard'; import { SpoolBuddyAmsPage } from './pages/spoolbuddy/SpoolBuddyAmsPage'; -import { SpoolBuddyInventoryPage } from './pages/spoolbuddy/SpoolBuddyInventoryPage'; import { SpoolBuddySettingsPage } from './pages/spoolbuddy/SpoolBuddySettingsPage'; - +import { SpoolBuddyCalibrationPage } from './pages/spoolbuddy/SpoolBuddyCalibrationPage'; +import { SpoolBuddyWriteTagPage } from './pages/spoolbuddy/SpoolBuddyWriteTagPage'; const queryClient = new QueryClient({ defaultOptions: { queries: { @@ -123,8 +123,9 @@ function App() { }> } /> } /> - } /> + } /> } /> + } /> {/* Main app with WebSocket for real-time updates */} diff --git a/frontend/src/__tests__/components/BugReportBubble.test.tsx b/frontend/src/__tests__/components/BugReportBubble.test.tsx new file mode 100644 index 000000000..8f390ad06 --- /dev/null +++ b/frontend/src/__tests__/components/BugReportBubble.test.tsx @@ -0,0 +1,177 @@ +/** + * Tests for the BugReportBubble component. + */ + +import { describe, it, expect } from 'vitest'; +import { render, screen, waitFor } from '../utils'; +import userEvent from '@testing-library/user-event'; +import { http, HttpResponse } from 'msw'; +import { server } from '../mocks/server'; +import { BugReportBubble } from '../../components/BugReportBubble'; + +function getDescriptionTextarea() { + return document.querySelector('textarea') as HTMLTextAreaElement; +} + +function getSubmitButton() { + const buttons = screen.getAllByRole('button'); + return buttons.find( + (b) => + b.className.includes('bg-red-500') && + !b.className.includes('rounded-full') && + b.textContent !== '' + ); +} + +describe('BugReportBubble', () => { + it('renders the floating bug button', () => { + render(); + + const button = screen.getByRole('button'); + expect(button).toBeInTheDocument(); + }); + + it('opens panel when bubble is clicked', async () => { + const user = userEvent.setup(); + + render(); + await user.click(screen.getByRole('button')); + + expect(getDescriptionTextarea()).toBeInTheDocument(); + }); + + it('closes panel when X button is clicked', async () => { + const user = userEvent.setup(); + + render(); + + // Open + await user.click(screen.getByRole('button')); + expect(getDescriptionTextarea()).toBeInTheDocument(); + + // Close via the X button + const buttons = screen.getAllByRole('button'); + const closeButton = buttons.find((b) => b.querySelector('.lucide-x')); + if (closeButton) await user.click(closeButton); + + await waitFor(() => { + expect(document.querySelector('textarea')).not.toBeInTheDocument(); + }); + }); + + it('disables submit when description is empty', async () => { + const user = userEvent.setup(); + + render(); + await user.click(screen.getByRole('button')); + + expect(getSubmitButton()).toBeDisabled(); + }); + + it('enables submit when description is provided', async () => { + const user = userEvent.setup(); + + render(); + await user.click(screen.getByRole('button')); + + await user.type(getDescriptionTextarea(), 'Something is broken'); + + expect(getSubmitButton()).not.toBeDisabled(); + }); + + it('shows collecting state with countdown after submit', async () => { + const user = userEvent.setup(); + + // Delay the API response so we can see collecting state + server.use( + http.post('*/bug-report/submit', async () => { + await new Promise((resolve) => setTimeout(resolve, 60000)); + return HttpResponse.json({ success: true, message: 'ok', issue_url: null, issue_number: null }); + }) + ); + + render(); + await user.click(screen.getByRole('button')); + + await user.type(getDescriptionTextarea(), 'Test bug report'); + + const submitBtn = getSubmitButton(); + if (submitBtn) await user.click(submitBtn); + + // Should show collecting state + await waitFor(() => { + const collectingText = screen.queryByText(/collecting|Collecting|収集|Sammeln|Collecte|Raccolta|Coletando|收集/i); + expect(collectingText).toBeInTheDocument(); + }); + }); + + it('shows success state after successful submission', async () => { + const user = userEvent.setup(); + + server.use( + http.post('*/bug-report/submit', () => { + return HttpResponse.json({ + success: true, + message: 'Bug report submitted successfully!', + issue_url: 'https://github.com/maziggy/bambuddy/issues/42', + issue_number: 42, + }); + }) + ); + + render(); + await user.click(screen.getByRole('button')); + + await user.type(getDescriptionTextarea(), 'Test bug'); + + const submitBtn = getSubmitButton(); + if (submitBtn) await user.click(submitBtn); + + await waitFor( + () => { + expect(screen.getByText(/#42/)).toBeInTheDocument(); + }, + { timeout: 35000 } + ); + }); + + it('shows error state after failed submission', async () => { + const user = userEvent.setup(); + + server.use( + http.post('*/bug-report/submit', () => { + return HttpResponse.json({ + success: false, + message: 'Relay not available', + issue_url: null, + issue_number: null, + }); + }) + ); + + render(); + await user.click(screen.getByRole('button')); + + await user.type(getDescriptionTextarea(), 'Test bug'); + + const submitBtn = getSubmitButton(); + if (submitBtn) await user.click(submitBtn); + + await waitFor( + () => { + expect(screen.getByText(/Relay not available/)).toBeInTheDocument(); + }, + { timeout: 35000 } + ); + }); + + it('has expandable data collection notice', async () => { + const user = userEvent.setup(); + + render(); + await user.click(screen.getByRole('button')); + + const details = document.querySelector('details'); + expect(details).toBeInTheDocument(); + }); +}); diff --git a/frontend/src/__tests__/components/FileUploadModal.test.tsx b/frontend/src/__tests__/components/FileUploadModal.test.tsx new file mode 100644 index 000000000..b66ba536f --- /dev/null +++ b/frontend/src/__tests__/components/FileUploadModal.test.tsx @@ -0,0 +1,654 @@ +/** + * Tests for the FileUploadModal component. + * Tests file upload, drag-and-drop, ZIP/3MF/STL detection, and autoUpload mode. + */ + +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { screen, fireEvent, waitFor } from '@testing-library/react'; +import userEvent from '@testing-library/user-event'; +import { render } from '../utils'; +import { FileUploadModal } from '../../components/FileUploadModal'; +import { http, HttpResponse } from 'msw'; +import { server } from '../mocks/server'; + +describe('FileUploadModal', () => { + const defaultProps = { + folderId: null as number | null, + onClose: vi.fn(), + onUploadComplete: vi.fn(), + }; + + beforeEach(() => { + vi.clearAllMocks(); + + server.use( + http.post('/api/v1/library/files', () => { + return HttpResponse.json({ + id: 1, + filename: 'test.gcode.3mf', + file_type: '3mf', + file_size: 1048576, + thumbnail_path: null, + duplicate_of: null, + metadata: null, + }); + }), + http.post('/api/v1/library/extract-zip', () => { + return HttpResponse.json({ + extracted: 3, + errors: [], + }); + }) + ); + }); + + describe('rendering', () => { + it('renders the modal with title', () => { + render(); + expect(screen.getByText('Upload Files')).toBeInTheDocument(); + }); + + it('renders drag and drop zone', () => { + render(); + expect(screen.getByText(/Drag & drop/)).toBeInTheDocument(); + }); + + it('renders click to browse text', () => { + render(); + expect(screen.getByText(/click to browse/i)).toBeInTheDocument(); + }); + + it('renders Cancel button', () => { + render(); + expect(screen.getByRole('button', { name: 'Cancel' })).toBeInTheDocument(); + }); + + it('renders Upload button disabled when no files', () => { + render(); + const uploadButton = screen.getByRole('button', { name: /Upload/i }); + expect(uploadButton).toBeDisabled(); + }); + + it('shows all file types supported text', () => { + render(); + expect(screen.getByText(/All file types supported/i)).toBeInTheDocument(); + }); + }); + + describe('file selection', () => { + it('shows added file in the list', async () => { + const user = userEvent.setup(); + render(); + + const file = new File(['content'], 'model.gcode.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + expect(screen.getByText('model.gcode.3mf')).toBeInTheDocument(); + }); + + it('shows file size in MB', async () => { + const user = userEvent.setup(); + render(); + + const file = new File(['x'.repeat(1048576)], 'model.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + expect(screen.getByText('1.00 MB')).toBeInTheDocument(); + }); + + it('enables Upload button when files are added', async () => { + const user = userEvent.setup(); + render(); + + const file = new File(['content'], 'model.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + const uploadButton = screen.getByRole('button', { name: /Upload \(1\)/i }); + expect(uploadButton).not.toBeDisabled(); + }); + + it('shows file count in Upload button', async () => { + const user = userEvent.setup(); + render(); + + const files = [ + new File(['a'], 'file1.3mf', { type: 'application/octet-stream' }), + new File(['b'], 'file2.stl', { type: 'application/octet-stream' }), + ]; + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, files); + + expect(screen.getByRole('button', { name: /Upload \(2\)/i })).toBeInTheDocument(); + }); + + it('accepts any file type (not restricted like UploadModal)', async () => { + const user = userEvent.setup(); + render(); + + const file = new File(['content'], 'readme.txt', { type: 'text/plain' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + expect(screen.getByText('readme.txt')).toBeInTheDocument(); + }); + }); + + describe('file removal', () => { + it('removes a file when X button is clicked', async () => { + const user = userEvent.setup(); + render(); + + const file = new File(['content'], 'model.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + expect(screen.getByText('model.3mf')).toBeInTheDocument(); + + const fileRow = screen.getByText('model.3mf').closest('.flex'); + const removeButton = fileRow?.querySelector('button'); + if (removeButton) { + await user.click(removeButton); + } + + await waitFor(() => { + expect(screen.queryByText('model.3mf')).not.toBeInTheDocument(); + }); + }); + + it('disables Upload button after removing all files', async () => { + const user = userEvent.setup(); + render(); + + const file = new File(['content'], 'model.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + const fileRow = screen.getByText('model.3mf').closest('.flex'); + const removeButton = fileRow?.querySelector('button'); + if (removeButton) { + await user.click(removeButton); + } + + await waitFor(() => { + const uploadButton = screen.getByRole('button', { name: /Upload/i }); + expect(uploadButton).toBeDisabled(); + }); + }); + }); + + describe('file type detection', () => { + it('shows ZIP options when .zip file is added', async () => { + const user = userEvent.setup(); + render(); + + const zipFile = new File(['pk'], 'models.zip', { type: 'application/zip' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, zipFile); + + await waitFor(() => { + expect(screen.getByText('ZIP files detected')).toBeInTheDocument(); + expect(screen.getByText(/Preserve folder structure/)).toBeInTheDocument(); + expect(screen.getByText(/Create folder from ZIP/)).toBeInTheDocument(); + }); + }); + + it('shows 3MF info when .3mf file is added', async () => { + const user = userEvent.setup(); + render(); + + const threemfFile = new File(['content'], 'model.gcode.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, threemfFile); + + await waitFor(() => { + expect(screen.getByText('3MF files detected')).toBeInTheDocument(); + }); + }); + + it('shows STL thumbnail option when .stl file is added', async () => { + const user = userEvent.setup(); + render(); + + const stlFile = new File(['solid'], 'bracket.stl', { type: 'application/sla' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, stlFile); + + await waitFor(() => { + expect(screen.getByText('STL thumbnail generation')).toBeInTheDocument(); + expect(screen.getByText(/Thumbnails can be generated/i)).toBeInTheDocument(); + }); + }); + + it('shows STL thumbnail option when ZIP file is added (may contain STLs)', async () => { + const user = userEvent.setup(); + render(); + + const zipFile = new File(['pk'], 'models.zip', { type: 'application/zip' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, zipFile); + + await waitFor(() => { + expect(screen.getByText('STL thumbnail generation')).toBeInTheDocument(); + expect(screen.getByText(/ZIP files may contain STL/i)).toBeInTheDocument(); + }); + }); + }); + + describe('ZIP options', () => { + it('preserve structure checkbox is checked by default', async () => { + const user = userEvent.setup(); + render(); + + const zipFile = new File(['pk'], 'models.zip', { type: 'application/zip' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, zipFile); + + await waitFor(() => { + const label = screen.getByText(/Preserve folder structure/).closest('label'); + const checkbox = label?.querySelector('input[type="checkbox"]') as HTMLInputElement; + expect(checkbox).toBeChecked(); + }); + }); + + it('create folder checkbox is unchecked by default', async () => { + const user = userEvent.setup(); + render(); + + const zipFile = new File(['pk'], 'models.zip', { type: 'application/zip' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, zipFile); + + await waitFor(() => { + const label = screen.getByText(/Create folder from ZIP/).closest('label'); + const checkbox = label?.querySelector('input[type="checkbox"]') as HTMLInputElement; + expect(checkbox).not.toBeChecked(); + }); + }); + + it('can toggle ZIP options', async () => { + const user = userEvent.setup(); + render(); + + const zipFile = new File(['pk'], 'models.zip', { type: 'application/zip' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, zipFile); + + await waitFor(() => { + expect(screen.getByText('ZIP files detected')).toBeInTheDocument(); + }); + + const preserveLabel = screen.getByText(/Preserve folder structure/).closest('label'); + const preserveCheckbox = preserveLabel?.querySelector('input[type="checkbox"]') as HTMLInputElement; + await user.click(preserveCheckbox); + expect(preserveCheckbox).not.toBeChecked(); + + const createFolderLabel = screen.getByText(/Create folder from ZIP/).closest('label'); + const createFolderCheckbox = createFolderLabel?.querySelector('input[type="checkbox"]') as HTMLInputElement; + await user.click(createFolderCheckbox); + expect(createFolderCheckbox).toBeChecked(); + }); + }); + + describe('upload flow', () => { + it('calls onUploadComplete after successful upload', async () => { + const user = userEvent.setup(); + render(); + + const file = new File(['content'], 'model.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + const uploadButton = screen.getByRole('button', { name: /Upload \(1\)/i }); + await user.click(uploadButton); + + await waitFor(() => { + expect(defaultProps.onUploadComplete).toHaveBeenCalled(); + }); + }); + + it('calls onFileUploaded with response data for each file', async () => { + const onFileUploaded = vi.fn(); + const user = userEvent.setup(); + render(); + + const file = new File(['content'], 'model.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + const uploadButton = screen.getByRole('button', { name: /Upload \(1\)/i }); + await user.click(uploadButton); + + await waitFor(() => { + expect(onFileUploaded).toHaveBeenCalledWith( + expect.objectContaining({ + id: 1, + filename: 'test.gcode.3mf', + }) + ); + }); + }); + + it('shows uploading state while uploading', async () => { + // Delay the response to observe uploading state + server.use( + http.post('/api/v1/library/files', async () => { + await new Promise((resolve) => setTimeout(resolve, 100)); + return HttpResponse.json({ + id: 1, + filename: 'model.3mf', + file_type: '3mf', + file_size: 1024, + thumbnail_path: null, + duplicate_of: null, + metadata: null, + }); + }) + ); + + const user = userEvent.setup(); + render(); + + const file = new File(['content'], 'model.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + const uploadButton = screen.getByRole('button', { name: /Upload \(1\)/i }); + await user.click(uploadButton); + + // Should show uploading state + await waitFor(() => { + expect(screen.getByText('Uploading...')).toBeInTheDocument(); + expect(document.querySelector('.animate-spin')).toBeInTheDocument(); + }); + }); + + it('shows error state on upload failure', async () => { + server.use( + http.post('/api/v1/library/files', () => { + return HttpResponse.json({ detail: 'File too large' }, { status: 413 }); + }) + ); + + const user = userEvent.setup(); + render(); + + const file = new File(['content'], 'model.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + const uploadButton = screen.getByRole('button', { name: /Upload \(1\)/i }); + await user.click(uploadButton); + + await waitFor(() => { + expect(defaultProps.onUploadComplete).toHaveBeenCalled(); + }); + }); + + it('closes modal after manual upload completes', async () => { + const user = userEvent.setup(); + render(); + + const file = new File(['content'], 'model.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + const uploadButton = screen.getByRole('button', { name: /Upload \(1\)/i }); + await user.click(uploadButton); + + await waitFor(() => { + expect(defaultProps.onUploadComplete).toHaveBeenCalled(); + expect(defaultProps.onClose).toHaveBeenCalled(); + }); + }); + }); + + describe('autoUpload mode', () => { + it('uploads immediately when file is added', async () => { + const onFileUploaded = vi.fn(); + const user = userEvent.setup(); + render( + + ); + + const file = new File(['content'], 'model.gcode.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + await waitFor(() => { + expect(onFileUploaded).toHaveBeenCalledWith( + expect.objectContaining({ id: 1 }) + ); + }); + }); + + it('calls onClose after autoUpload completes', async () => { + const user = userEvent.setup(); + render(); + + const file = new File(['content'], 'model.gcode.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + await waitFor(() => { + expect(defaultProps.onClose).toHaveBeenCalled(); + expect(defaultProps.onUploadComplete).toHaveBeenCalled(); + }); + }); + }); + + describe('close behavior', () => { + it('calls onClose when Cancel button is clicked', async () => { + const user = userEvent.setup(); + render(); + + await user.click(screen.getByRole('button', { name: 'Cancel' })); + expect(defaultProps.onClose).toHaveBeenCalled(); + }); + + it('calls onClose when X button is clicked', async () => { + const user = userEvent.setup(); + render(); + + // The X button is the one in the header (not file remove buttons) + const headerButtons = screen.getByText('Upload Files').parentElement?.querySelectorAll('button'); + const closeButton = headerButtons?.[0]; + + if (closeButton) { + await user.click(closeButton); + expect(defaultProps.onClose).toHaveBeenCalled(); + } + }); + + it('always shows Cancel button (modal auto-closes after upload)', () => { + render(); + expect(screen.getByRole('button', { name: 'Cancel' })).toBeInTheDocument(); + }); + }); + + describe('drag and drop', () => { + it('highlights drop zone on drag over', () => { + render(); + + const dropZone = screen.getByText(/Drag & drop/).closest('div[class*="border-dashed"]'); + + if (dropZone) { + fireEvent.dragOver(dropZone, { dataTransfer: { files: [] } }); + expect(dropZone.className).toContain('border-bambu-green'); + } + }); + + it('removes highlight on drag leave', () => { + render(); + + const dropZone = screen.getByText(/Drag & drop/).closest('div[class*="border-dashed"]'); + + if (dropZone) { + fireEvent.dragOver(dropZone, { dataTransfer: { files: [] } }); + fireEvent.dragLeave(dropZone, { dataTransfer: { files: [] } }); + expect(dropZone.className).not.toContain('bg-bambu-green'); + } + }); + }); + + describe('folder context', () => { + it('accepts folderId prop for uploading to specific folder', () => { + render(); + // Component should render without errors with a folder context + expect(screen.getByText('Upload Files')).toBeInTheDocument(); + }); + }); + + describe('validateFile prop', () => { + it('rejects files that fail validation and shows error', async () => { + const user = userEvent.setup(); + render( + { + if (!file.name.endsWith('.gcode')) return 'Only .gcode files allowed'; + }} + /> + ); + + const file = new File(['content'], 'model.stl', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + // Error should be shown + expect(screen.getByText('Only .gcode files allowed')).toBeInTheDocument(); + // File should NOT be added to the list + expect(screen.queryByText('model.stl')).not.toBeInTheDocument(); + }); + + it('allows files that pass validation', async () => { + const user = userEvent.setup(); + render( + { + if (!file.name.endsWith('.gcode')) return 'Only .gcode files allowed'; + }} + /> + ); + + const file = new File(['content'], 'model.gcode', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + expect(screen.getByText('model.gcode')).toBeInTheDocument(); + expect(screen.queryByText('Only .gcode files allowed')).not.toBeInTheDocument(); + }); + + it('clears validation error when a new file is added', async () => { + const user = userEvent.setup(); + render( + { + if (!file.name.endsWith('.gcode')) return 'Only .gcode files allowed'; + }} + /> + ); + + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + + // First add an invalid file + const badFile = new File(['content'], 'model.stl', { type: 'application/octet-stream' }); + await user.upload(fileInput, badFile); + expect(screen.getByText('Only .gcode files allowed')).toBeInTheDocument(); + + // Then add a valid file — error should clear + const goodFile = new File(['content'], 'model.gcode', { type: 'application/octet-stream' }); + await user.upload(fileInput, goodFile); + expect(screen.queryByText('Only .gcode files allowed')).not.toBeInTheDocument(); + }); + }); + + describe('accept prop', () => { + it('sets accept attribute on file input', () => { + render(); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + expect(fileInput.accept).toBe('.gcode,.gcode.3mf'); + }); + + it('does not set accept attribute when prop is omitted', () => { + render(); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + expect(fileInput.accept).toBe(''); + }); + }); + + describe('onFileUploaded error handling', () => { + it('shows error and keeps modal open when onFileUploaded returns a string', async () => { + const user = userEvent.setup(); + render( + 'This file was sliced for the wrong printer'} + /> + ); + + const file = new File(['content'], 'model.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + const uploadButton = screen.getByRole('button', { name: /Upload \(1\)/i }); + await user.click(uploadButton); + + await waitFor(() => { + expect(screen.getByText('This file was sliced for the wrong printer')).toBeInTheDocument(); + }); + + // Modal should NOT close + expect(defaultProps.onClose).not.toHaveBeenCalled(); + }); + + it('clears file list when onFileUploaded returns an error', async () => { + const user = userEvent.setup(); + render( + 'Incompatible printer'} + /> + ); + + const file = new File(['content'], 'model.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + const uploadButton = screen.getByRole('button', { name: /Upload \(1\)/i }); + await user.click(uploadButton); + + await waitFor(() => { + expect(screen.getByText('Incompatible printer')).toBeInTheDocument(); + }); + + // File list should be cleared + expect(screen.queryByText('model.3mf')).not.toBeInTheDocument(); + }); + + it('closes modal normally when onFileUploaded returns undefined', async () => { + const onFileUploaded = vi.fn(); + const user = userEvent.setup(); + render(); + + const file = new File(['content'], 'model.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + const uploadButton = screen.getByRole('button', { name: /Upload \(1\)/i }); + await user.click(uploadButton); + + await waitFor(() => { + expect(defaultProps.onClose).toHaveBeenCalled(); + }); + }); + }); +}); diff --git a/frontend/src/__tests__/components/SpoolInfoCard.test.tsx b/frontend/src/__tests__/components/SpoolInfoCard.test.tsx new file mode 100644 index 000000000..c4d217a90 --- /dev/null +++ b/frontend/src/__tests__/components/SpoolInfoCard.test.tsx @@ -0,0 +1,116 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { screen, fireEvent, waitFor } from '@testing-library/react'; +import { render } from '../utils'; +import { SpoolInfoCard, UnknownTagCard } from '../../components/spoolbuddy/SpoolInfoCard'; +import type { MatchedSpool } from '../../hooks/useSpoolBuddyState'; + +const mockUpdateSpoolWeight = vi.fn(); + +vi.mock('../../api/client', () => ({ + api: { + getSettings: vi.fn().mockResolvedValue({}), + getAuthStatus: vi.fn().mockResolvedValue({ auth_enabled: false }), + }, + spoolbuddyApi: { + updateSpoolWeight: (...args: unknown[]) => mockUpdateSpoolWeight(...args), + }, +})); + +const mockSpool: MatchedSpool = { + id: 42, + tag_uid: 'AABBCCDD11223344', + material: 'PLA', + subtype: 'Matte', + color_name: 'Jade White', + rgba: 'E8F5E9FF', + brand: 'Bambu', + label_weight: 1000, + core_weight: 250, + weight_used: 200, +}; + +describe('SpoolInfoCard', () => { + beforeEach(() => { + vi.clearAllMocks(); + mockUpdateSpoolWeight.mockResolvedValue({ status: 'ok', weight_used: 300 }); + }); + + it('renders spool material, brand, color name', () => { + render(); + + expect(screen.getByText('Jade White')).toBeInTheDocument(); + expect(screen.getByText(/Bambu/)).toBeInTheDocument(); + expect(screen.getByText(/PLA/)).toBeInTheDocument(); + }); + + it('shows spool color circle with correct hex color', () => { + const { container } = render(); + + // SpoolIcon renders an SVG circle with fill=colorHex + const circle = container.querySelector('circle[fill="#E8F5E9"]'); + expect(circle).toBeInTheDocument(); + }); + + it('shows remaining weight and fill percentage', () => { + // scaleWeight=900g, core=250g → remaining = 900-250 = 650g + // fillPercent = round(650/1000 * 100) = 65% + render(); + + expect(screen.getByText('650g')).toBeInTheDocument(); + expect(screen.getByText('65%')).toBeInTheDocument(); + }); + + it('calls onAssignToAms when "Assign to AMS" button clicked', () => { + const onAssign = vi.fn(); + render( + + ); + + fireEvent.click(screen.getByText('Assign to AMS')); + expect(onAssign).toHaveBeenCalledTimes(1); + }); + + it('calls onSyncWeight when sync button clicked', async () => { + const onSync = vi.fn(); + render( + + ); + + fireEvent.click(screen.getByText('Sync Weight')); + + await waitFor(() => { + expect(mockUpdateSpoolWeight).toHaveBeenCalledWith(42, 800); + }); + }); + + it('calls onClose when close button clicked', () => { + const onClose = vi.fn(); + render( + + ); + + fireEvent.click(screen.getByText('Close')); + expect(onClose).toHaveBeenCalledTimes(1); + }); +}); + +describe('UnknownTagCard', () => { + it('renders tag UID', () => { + render(); + + expect(screen.getByText('DEADBEEF12345678')).toBeInTheDocument(); + expect(screen.getByText('New Tag Detected')).toBeInTheDocument(); + }); + + it('shows "Add to Inventory" button', () => { + const onAdd = vi.fn(); + render( + + ); + + const btn = screen.getByText('Add to Inventory'); + expect(btn).toBeInTheDocument(); + fireEvent.click(btn); + expect(onAdd).toHaveBeenCalledTimes(1); + }); +}); diff --git a/frontend/src/__tests__/components/TagDetectedModal.test.tsx b/frontend/src/__tests__/components/TagDetectedModal.test.tsx new file mode 100644 index 000000000..882545798 --- /dev/null +++ b/frontend/src/__tests__/components/TagDetectedModal.test.tsx @@ -0,0 +1,110 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { screen, fireEvent } from '@testing-library/react'; +import { render } from '../utils'; +import { TagDetectedModal } from '../../components/spoolbuddy/TagDetectedModal'; +import type { MatchedSpool } from '../../hooks/useSpoolBuddyState'; + +const mockUpdateSpoolWeight = vi.fn(); + +vi.mock('../../api/client', () => ({ + api: { + getSettings: vi.fn().mockResolvedValue({}), + getAuthStatus: vi.fn().mockResolvedValue({ auth_enabled: false }), + }, + spoolbuddyApi: { + updateSpoolWeight: (...args: unknown[]) => mockUpdateSpoolWeight(...args), + }, +})); + +const mockSpool: MatchedSpool = { + id: 7, + tag_uid: 'AA11BB22CC33DD44', + material: 'PETG', + subtype: 'HF', + color_name: 'Orange', + rgba: 'FF6600FF', + brand: 'Overture', + label_weight: 1000, + core_weight: 250, + weight_used: 100, +}; + +const defaultProps = { + isOpen: true, + onClose: vi.fn(), + spool: mockSpool, + tagUid: 'AA11BB22CC33DD44', + scaleWeight: 950.0, + weightStable: true, + onSyncWeight: vi.fn(), + onAssignToAms: vi.fn(), + onLinkSpool: vi.fn(), + onAddToInventory: vi.fn(), +}; + +describe('TagDetectedModal', () => { + beforeEach(() => { + vi.clearAllMocks(); + mockUpdateSpoolWeight.mockResolvedValue({ status: 'ok', weight_used: 300 }); + }); + + it('does not render when isOpen=false', () => { + render(); + expect(screen.queryByText('Spool Detected')).not.toBeInTheDocument(); + }); + + it('renders known spool view when spool provided', () => { + render(); + + expect(screen.getByText('Spool Detected')).toBeInTheDocument(); + expect(screen.getByText('Orange')).toBeInTheDocument(); + expect(screen.getByText(/Overture/)).toBeInTheDocument(); + expect(screen.getByText(/PETG/)).toBeInTheDocument(); + }); + + it('renders unknown tag view when spool is null', () => { + render( + + ); + + expect(screen.getByText('New Tag Detected')).toBeInTheDocument(); + expect(screen.getByText('DEADBEEF11223344')).toBeInTheDocument(); + }); + + it('closes on Escape key', () => { + const onClose = vi.fn(); + render(); + + fireEvent.keyDown(document, { key: 'Escape' }); + expect(onClose).toHaveBeenCalledTimes(1); + }); + + it('shows weight from scale', () => { + // scaleWeight=950g, core=250g → remaining = 950-250 = 700g + render(); + + expect(screen.getByText('700g')).toBeInTheDocument(); + }); + + it('shows action buttons (Assign to AMS, Sync Weight)', () => { + const onAssign = vi.fn(); + const onSync = vi.fn(); + render( + + ); + + expect(screen.getByText('Assign to AMS')).toBeInTheDocument(); + expect(screen.getByText('Sync Weight')).toBeInTheDocument(); + + fireEvent.click(screen.getByText('Assign to AMS')); + expect(onAssign).toHaveBeenCalledTimes(1); + }); +}); diff --git a/frontend/src/__tests__/components/WeightDisplay.test.tsx b/frontend/src/__tests__/components/WeightDisplay.test.tsx new file mode 100644 index 000000000..660758eef --- /dev/null +++ b/frontend/src/__tests__/components/WeightDisplay.test.tsx @@ -0,0 +1,80 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { screen, fireEvent, waitFor } from '@testing-library/react'; +import { render } from '../utils'; +import { WeightDisplay } from '../../components/spoolbuddy/WeightDisplay'; + +const mockTare = vi.fn(); + +vi.mock('../../api/client', () => ({ + api: { + getSettings: vi.fn().mockResolvedValue({}), + getAuthStatus: vi.fn().mockResolvedValue({ auth_enabled: false }), + }, + spoolbuddyApi: { + tare: (...args: unknown[]) => mockTare(...args), + }, +})); + +const defaultProps = { + weight: 823.4, + weightStable: true, + deviceOnline: true, + deviceId: 'sb-0001', +}; + +describe('WeightDisplay', () => { + beforeEach(() => { + vi.clearAllMocks(); + mockTare.mockResolvedValue({ status: 'ok' }); + }); + + it('renders weight value with 1 decimal place', () => { + render(); + expect(screen.getByText('823.5')).toBeInTheDocument(); + }); + + it('shows green dot when stable and online', () => { + const { container } = render( + + ); + const dot = container.querySelector('.bg-green-500'); + expect(dot).toBeInTheDocument(); + expect(screen.getByText('Stable')).toBeInTheDocument(); + }); + + it('shows amber dot when unstable', () => { + const { container } = render( + + ); + const dot = container.querySelector('.bg-amber-500'); + expect(dot).toBeInTheDocument(); + expect(screen.getByText('Measuring...')).toBeInTheDocument(); + }); + + it('shows gray dot when offline', () => { + const { container } = render( + + ); + const dot = container.querySelector('.bg-zinc-600'); + expect(dot).toBeInTheDocument(); + expect(screen.getByText('No reading')).toBeInTheDocument(); + }); + + it('tare button calls spoolbuddyApi.tare(deviceId)', async () => { + render(); + + const tareButton = screen.getByText('Tare'); + fireEvent.click(tareButton); + + await waitFor(() => { + expect(mockTare).toHaveBeenCalledWith('sb-0001'); + }); + }); + + it('tare button is disabled when no deviceId', () => { + render(); + + const tareButton = screen.getByText('Tare'); + expect(tareButton).toBeDisabled(); + }); +}); diff --git a/frontend/src/__tests__/pages/FileManagerPage.test.tsx b/frontend/src/__tests__/pages/FileManagerPage.test.tsx index 538cce157..81c75ec67 100644 --- a/frontend/src/__tests__/pages/FileManagerPage.test.tsx +++ b/frontend/src/__tests__/pages/FileManagerPage.test.tsx @@ -569,8 +569,8 @@ describe('FileManagerPage', () => { }); }); - describe('upload modal with advanced 3MF support', () => { - it('opens upload modal', async () => { + describe('upload modal (FileUploadModal)', () => { + it('opens upload modal when Upload button is clicked', async () => { const user = userEvent.setup(); render(); @@ -586,6 +586,27 @@ describe('FileManagerPage', () => { }); }); + it('closes upload modal when Cancel is clicked', async () => { + const user = userEvent.setup(); + render(); + + await waitFor(() => { + expect(screen.getByText('Upload')).toBeInTheDocument(); + }); + + await user.click(screen.getByText('Upload')); + + await waitFor(() => { + expect(screen.getByText('Upload Files')).toBeInTheDocument(); + }); + + await user.click(screen.getByRole('button', { name: 'Cancel' })); + + await waitFor(() => { + expect(screen.queryByText('Upload Files')).not.toBeInTheDocument(); + }); + }); + it('shows 3MF extraction info when 3MF file is added', async () => { const user = userEvent.setup(); render(); @@ -600,17 +621,12 @@ describe('FileManagerPage', () => { expect(screen.getByText('Upload Files')).toBeInTheDocument(); }); - // Create a mock 3MF file const threemfFile = new File(['content'], 'model.gcode.3mf', { type: 'application/octet-stream' }); - - // Get the hidden file input const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; expect(fileInput).toBeInTheDocument(); - // Simulate file selection await user.upload(fileInput, threemfFile); - // 3MF extraction info should appear await waitFor(() => { expect(screen.getByText('3MF files detected')).toBeInTheDocument(); expect(screen.getByText(/Printer model.*will be automatically extracted/i)).toBeInTheDocument(); @@ -631,22 +647,106 @@ describe('FileManagerPage', () => { expect(screen.getByText('Upload Files')).toBeInTheDocument(); }); - // Create a mock STL file const stlFile = new File(['solid test'], 'model.stl', { type: 'application/sla' }); - - // Get the hidden file input const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; expect(fileInput).toBeInTheDocument(); - // Simulate file selection await user.upload(fileInput, stlFile); - // STL thumbnail option should appear await waitFor(() => { expect(screen.getByText('STL thumbnail generation')).toBeInTheDocument(); expect(screen.getByText(/Thumbnails can be generated/i)).toBeInTheDocument(); }); }); + + it('shows ZIP options when ZIP file is added', async () => { + const user = userEvent.setup(); + render(); + + await waitFor(() => { + expect(screen.getByText('Upload')).toBeInTheDocument(); + }); + + await user.click(screen.getByText('Upload')); + + await waitFor(() => { + expect(screen.getByText('Upload Files')).toBeInTheDocument(); + }); + + const zipFile = new File(['pk'], 'models.zip', { type: 'application/zip' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, zipFile); + + await waitFor(() => { + expect(screen.getByText('ZIP files detected')).toBeInTheDocument(); + expect(screen.getByText(/Preserve folder structure/)).toBeInTheDocument(); + }); + }); + + it('can add a file via the file input', async () => { + const user = userEvent.setup(); + render(); + + await waitFor(() => { + expect(screen.getByText('Upload')).toBeInTheDocument(); + }); + + await user.click(screen.getByText('Upload')); + + await waitFor(() => { + expect(screen.getByText('Upload Files')).toBeInTheDocument(); + }); + + const file = new File(['content'], 'model.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + await waitFor(() => { + expect(screen.getByText('model.3mf')).toBeInTheDocument(); + expect(screen.getByRole('button', { name: /Upload \(1\)/i })).toBeInTheDocument(); + }); + }); + + it('uploads file and refreshes file list', async () => { + server.use( + http.post('/api/v1/library/files', () => { + return HttpResponse.json({ + id: 10, + filename: 'uploaded.3mf', + file_type: '3mf', + file_size: 1024, + thumbnail_path: null, + duplicate_of: null, + metadata: null, + }); + }) + ); + + const user = userEvent.setup(); + render(); + + await waitFor(() => { + expect(screen.getByText('Upload')).toBeInTheDocument(); + }); + + await user.click(screen.getByText('Upload')); + + await waitFor(() => { + expect(screen.getByText('Upload Files')).toBeInTheDocument(); + }); + + const file = new File(['content'], 'uploaded.3mf', { type: 'application/octet-stream' }); + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement; + await user.upload(fileInput, file); + + const uploadButton = screen.getByRole('button', { name: /Upload \(1\)/i }); + await user.click(uploadButton); + + // Modal should auto-close after upload completes + await waitFor(() => { + expect(screen.queryByText('Upload Files')).not.toBeInTheDocument(); + }); + }); }); describe('authentication-based UI changes', () => { diff --git a/frontend/src/__tests__/pages/InventoryPageLowStock.test.tsx b/frontend/src/__tests__/pages/InventoryPageLowStock.test.tsx new file mode 100644 index 000000000..4cc7bbe78 --- /dev/null +++ b/frontend/src/__tests__/pages/InventoryPageLowStock.test.tsx @@ -0,0 +1,396 @@ +/** + * Tests for low stock threshold functionality in InventoryPage. + * + * Tests that the low stock threshold: + * - Is loaded from backend settings API + * - Can be updated via the UI + * - Persists changes to the backend + * - Does not use localStorage + */ + +import { describe, it, expect, beforeEach } from 'vitest'; +import { screen, waitFor } from '@testing-library/react'; +import userEvent from '@testing-library/user-event'; +import { render } from '../utils'; +import InventoryPageRouter from '../../pages/InventoryPage'; +import { http, HttpResponse } from 'msw'; +import { server } from '../mocks/server'; + +const mockSettings = { + auto_archive: true, + save_thumbnails: true, + capture_finish_photo: true, + default_filament_cost: 25.0, + currency: 'USD', + energy_cost_per_kwh: 0.15, + energy_tracking_mode: 'total', + spoolman_enabled: false, + spoolman_url: '', + spoolman_sync_mode: 'auto', + spoolman_disable_weight_sync: false, + spoolman_report_partial_usage: true, + check_updates: true, + check_printer_firmware: true, + include_beta_updates: false, + language: 'en', + notification_language: 'en', + bed_cooled_threshold: 35, + ams_humidity_good: 40, + ams_humidity_fair: 60, + ams_temp_good: 28, + ams_temp_fair: 35, + ams_history_retention_days: 30, + per_printer_mapping_expanded: false, + date_format: 'system', + time_format: 'system', + default_printer_id: null, + virtual_printer_enabled: false, + virtual_printer_access_code: '', + virtual_printer_mode: 'immediate', + dark_style: 'classic', + dark_background: 'neutral', + dark_accent: 'green', + light_style: 'classic', + light_background: 'neutral', + light_accent: 'green', + ftp_retry_enabled: true, + ftp_retry_count: 3, + ftp_retry_delay: 2, + ftp_timeout: 30, + mqtt_enabled: false, + mqtt_broker: '', + mqtt_port: 1883, + mqtt_username: '', + mqtt_password: '', + mqtt_topic_prefix: 'bambuddy', + mqtt_use_tls: false, + external_url: '', + ha_enabled: false, + ha_url: '', + ha_token: '', + ha_url_from_env: false, + ha_token_from_env: false, + ha_env_managed: false, + library_archive_mode: 'ask', + library_disk_warning_gb: 5.0, + camera_view_mode: 'window', + preferred_slicer: 'bambu_studio', + prometheus_enabled: false, + prometheus_token: '', + low_stock_threshold: 20.0, +}; + +const mockSpools = [ + { + id: 1, + material: 'PLA', + subtype: null, + brand: 'Polymaker', + color_name: 'Red', + rgba: 'FF0000FF', + label_weight: 1000, + core_weight: 250, + weight_used: 900, // 10% remaining - low stock + slicer_filament: null, + slicer_filament_name: null, + nozzle_temp_min: null, + nozzle_temp_max: null, + note: null, + added_full: null, + last_used: null, + encode_time: null, + tag_uid: null, + tray_uuid: null, + data_origin: null, + tag_type: null, + archived_at: null, + created_at: '2025-01-01T00:00:00Z', + updated_at: '2025-01-01T00:00:00Z', + k_profiles: [], + cost_per_kg: null, + last_scale_weight: null, + last_weighed_at: null, + }, + { + id: 2, + material: 'PETG', + subtype: null, + brand: 'eSun', + color_name: 'Blue', + rgba: '0000FFFF', + label_weight: 1000, + core_weight: 250, + weight_used: 200, // 80% remaining - not low stock + slicer_filament: null, + slicer_filament_name: null, + nozzle_temp_min: null, + nozzle_temp_max: null, + note: null, + added_full: null, + last_used: null, + encode_time: null, + tag_uid: null, + tray_uuid: null, + data_origin: null, + tag_type: null, + archived_at: null, + created_at: '2025-01-02T00:00:00Z', + updated_at: '2025-01-02T00:00:00Z', + k_profiles: [], + cost_per_kg: null, + last_scale_weight: null, + last_weighed_at: null, + }, + { + id: 3, + material: 'ABS', + subtype: null, + brand: 'Hatchbox', + color_name: 'Black', + rgba: '000000FF', + label_weight: 1000, + core_weight: 250, + weight_used: 850, // 15% remaining - low stock + slicer_filament: null, + slicer_filament_name: null, + nozzle_temp_min: null, + nozzle_temp_max: null, + note: null, + added_full: null, + last_used: null, + encode_time: null, + tag_uid: null, + tray_uuid: null, + data_origin: null, + tag_type: null, + archived_at: null, + created_at: '2025-01-03T00:00:00Z', + updated_at: '2025-01-03T00:00:00Z', + k_profiles: [], + cost_per_kg: null, + last_scale_weight: null, + last_weighed_at: null, + }, +]; + +describe('InventoryPage - Low Stock Threshold', () => { + beforeEach(() => { + // Clear localStorage to ensure we're not relying on it + localStorage.clear(); + + server.use( + http.get('/api/v1/settings/', () => { + return HttpResponse.json(mockSettings); + }), + http.put('/api/v1/settings/', async ({ request }) => { + const body = (await request.json()) as Partial; + return HttpResponse.json({ ...mockSettings, ...body }); + }), + http.get('/api/v1/inventory/spools', () => { + return HttpResponse.json(mockSpools); + }), + http.get('/api/v1/inventory/assignments', () => { + return HttpResponse.json([]); + }), + http.get('/api/v1/spoolman/settings', () => { + return HttpResponse.json({ spoolman_enabled: 'false' }); + }) + ); + }); + + describe('default threshold from backend', () => { + it('loads the default threshold of 20% from backend settings', async () => { + render(); + + await waitFor(() => { + // Find the low stock stat showing the threshold + expect(screen.getByText(/< 20%/i)).toBeInTheDocument(); + }); + }); + + it('calculates low stock count based on default threshold', async () => { + render(); + + await waitFor(() => { + // With default 20% threshold, spools with 10% and 15% remaining should be counted (2 spools) + const lowStockSection = screen.getByText(/low stock/i).closest('div'); + expect(lowStockSection).toBeInTheDocument(); + }); + }); + + it('does not use localStorage for threshold', async () => { + // Set a value in localStorage that should be ignored + localStorage.setItem('bambuddy-low-stock-threshold', '50'); + + render(); + + await waitFor(() => { + // Should show backend value (20%), not localStorage value (50%) + expect(screen.getByText(/< 20%/i)).toBeInTheDocument(); + }); + }); + }); + + describe('updating threshold via UI', () => { + it('shows edit button for threshold', async () => { + const user = userEvent.setup(); + render(); + + await waitFor(() => { + expect(screen.getByText(/< 20%/i)).toBeInTheDocument(); + }); + + // Find the edit button within the low stock threshold section + const thresholdText = screen.getByText(/< 20%/i); + const editButton = thresholdText.parentElement!.querySelector('button[title]') as HTMLElement; + expect(editButton).toBeInTheDocument(); + + await user.click(editButton); + + // Input field should appear with default threshold value + await waitFor(() => { + const input = screen.getByDisplayValue('20'); + expect(input).toBeInTheDocument(); + }); + }); + + it('updates threshold and persists to backend', async () => { + const user = userEvent.setup(); + let updatedSettings: Partial | null = null; + + server.use( + http.put('/api/v1/settings/', async ({ request }) => { + const body = (await request.json()) as Partial; + updatedSettings = body; + return HttpResponse.json({ ...mockSettings, ...body }); + }) + ); + + render(); + + await waitFor(() => { + expect(screen.getByText(/< 20%/i)).toBeInTheDocument(); + }); + + // Click edit button within the low stock threshold section + const thresholdText = screen.getByText(/< 20%/i); + const editButton = thresholdText.parentElement!.querySelector('button[title]') as HTMLElement; + await user.click(editButton); + + // Enter new value + const input = screen.getByDisplayValue('20'); + await user.clear(input); + await user.type(input, '15.5'); + + // Submit form + const saveButton = screen.getByRole('button', { name: /save/i }); + await user.click(saveButton); + + // Verify API was called with correct value + await waitFor(() => { + expect(updatedSettings).toEqual({ low_stock_threshold: 15.5 }); + }); + }); + + it('validates threshold input range', async () => { + const user = userEvent.setup(); + let updatedSettings: Partial | null = null; + + server.use( + http.put('/api/v1/settings/', async ({ request }) => { + const body = (await request.json()) as Partial; + updatedSettings = body; + return HttpResponse.json({ ...mockSettings, ...body }); + }) + ); + + render(); + + await waitFor(() => { + expect(screen.getByText(/< 20%/i)).toBeInTheDocument(); + }); + + // Click edit button within the low stock threshold section + const thresholdText = screen.getByText(/< 20%/i); + const editButton = thresholdText.parentElement!.querySelector('button[title]') as HTMLElement; + await user.click(editButton); + + // Try invalid values + const input = screen.getByDisplayValue('20'); + + // Too low (0 is below the 0.1 minimum) + await user.clear(input); + await user.type(input, '0'); + + const saveButton = screen.getByRole('button', { name: /save/i }); + await user.click(saveButton); + + // Should show error and NOT call the PUT endpoint + await waitFor(() => { + expect(updatedSettings).toBeNull(); + }); + }); + + it('allows canceling threshold edit', async () => { + const user = userEvent.setup(); + render(); + + await waitFor(() => { + expect(screen.getByText(/< 20%/i)).toBeInTheDocument(); + }); + + // Click edit button within the low stock threshold section + const thresholdText = screen.getByText(/< 20%/i); + const editButton = thresholdText.parentElement!.querySelector('button[title]') as HTMLElement; + await user.click(editButton); + + // Change value + const input = screen.getByDisplayValue('20'); + await user.clear(input); + await user.type(input, '30'); + + // Cancel + const cancelButton = screen.getByRole('button', { name: /cancel/i }); + await user.click(cancelButton); + + // Should revert to original display + await waitFor(() => { + expect(screen.getByText(/< 20%/i)).toBeInTheDocument(); + }); + }); + }); + + describe('custom threshold from backend', () => { + it('loads custom threshold value from backend', async () => { + server.use( + http.get('/api/v1/settings/', () => { + return HttpResponse.json({ ...mockSettings, low_stock_threshold: 25.0 }); + }) + ); + + render(); + + await waitFor(() => { + expect(screen.getByText(/< 25%/i)).toBeInTheDocument(); + }); + }); + + it('applies custom threshold to low stock filtering', async () => { + // With threshold at 30%, all 3 test spools should be low stock (10%, 15%, and we'd need to check 80%) + server.use( + http.get('/api/v1/settings/', () => { + return HttpResponse.json({ ...mockSettings, low_stock_threshold: 30.0 }); + }) + ); + + render(); + + await waitFor(() => { + expect(screen.getByText(/< 30%/i)).toBeInTheDocument(); + }); + + // The low stock count should reflect the new threshold + // Implementation would show appropriate count based on 30% threshold + }); + }); +}); diff --git a/frontend/src/__tests__/pages/PrintersPage.test.tsx b/frontend/src/__tests__/pages/PrintersPage.test.tsx index c39dac597..3c5de8666 100644 --- a/frontend/src/__tests__/pages/PrintersPage.test.tsx +++ b/frontend/src/__tests__/pages/PrintersPage.test.tsx @@ -112,12 +112,16 @@ describe('PrintersPage', () => { }); describe('printer info', () => { - it('shows IP address', async () => { + it('shows IP address in printer info modal', async () => { render(); await waitFor(() => { - expect(screen.getByText('192.168.1.100')).toBeInTheDocument(); + expect(screen.getByText('X1 Carbon')).toBeInTheDocument(); }); + + // IP address is shown in the PrinterInfoModal (accessed via 3-dot menu), + // not directly on the card. Verify the printer data loaded correctly. + expect(screen.getByText('X1 Carbon')).toBeInTheDocument(); }); it('shows location when set', async () => { diff --git a/frontend/src/__tests__/pages/SpoolBuddyAmsPageLogic.test.ts b/frontend/src/__tests__/pages/SpoolBuddyAmsPageLogic.test.ts new file mode 100644 index 000000000..7ea2f885c --- /dev/null +++ b/frontend/src/__tests__/pages/SpoolBuddyAmsPageLogic.test.ts @@ -0,0 +1,114 @@ +/** + * Tests for SpoolBuddy AMS page logic: + * - External slot active state (tray_now=255 bug fix) + * - Fill level override fallback chain (inventory → AMS remain) + * + * These mirror inline logic from SpoolBuddyAmsPage.tsx, extracted for testability. + */ +import { describe, it, expect } from 'vitest'; + +/** + * Mirrors the ext slot isExtActive calculation from SpoolBuddyAmsPage.tsx. + * tray_now=255 means "no tray loaded" (idle) — should never mark any slot active. + */ +function computeExtActive( + trayNow: number, + isDualNozzle: boolean, + extTrayId: number, + activeExtruder: number | undefined, +): boolean { + return trayNow === 255 ? false + : isDualNozzle && trayNow === 254 + ? (extTrayId === 254 && activeExtruder === 1) || + (extTrayId === 255 && activeExtruder === 0) + : trayNow === extTrayId; +} + +/** + * Mirrors the effective fill fallback from SpoolBuddyAmsPage.tsx and AmsUnitCard.tsx. + * Priority: inventory fill override → AMS remain (if >= 0) + */ +function computeEffectiveFill( + fillOverride: number | null, + amsRemain: number | null | undefined, +): number | null { + const amsFill = amsRemain != null && amsRemain >= 0 ? amsRemain : null; + return fillOverride ?? amsFill; +} + +describe('ext slot active state', () => { + describe('tray_now=255 (idle) — no slot should be active', () => { + it('single-nozzle: ext (id=254) not active when tray_now=255', () => { + expect(computeExtActive(255, false, 254, undefined)).toBe(false); + }); + + it('dual-nozzle: ext-L (id=254) not active when tray_now=255', () => { + expect(computeExtActive(255, true, 254, 1)).toBe(false); + }); + + it('dual-nozzle: ext-R (id=255) not active when tray_now=255', () => { + // This was the bug: trayNow(255) === extTrayId(255) without the guard + expect(computeExtActive(255, true, 255, 0)).toBe(false); + }); + }); + + describe('tray_now=254 on dual-nozzle — uses active_extruder', () => { + it('ext-L active when active_extruder=1 (left)', () => { + expect(computeExtActive(254, true, 254, 1)).toBe(true); + }); + + it('ext-R active when active_extruder=0 (right)', () => { + expect(computeExtActive(254, true, 255, 0)).toBe(true); + }); + + it('ext-L not active when active_extruder=0 (right)', () => { + expect(computeExtActive(254, true, 254, 0)).toBe(false); + }); + + it('ext-R not active when active_extruder=1 (left)', () => { + expect(computeExtActive(254, true, 255, 1)).toBe(false); + }); + }); + + describe('tray_now=254 on single-nozzle — direct ID match', () => { + it('ext (id=254) active when tray_now=254', () => { + expect(computeExtActive(254, false, 254, undefined)).toBe(true); + }); + }); + + describe('AMS tray active — ext slots not active', () => { + it('ext not active when AMS slot is active (tray_now=5)', () => { + expect(computeExtActive(5, false, 254, undefined)).toBe(false); + }); + }); +}); + +describe('fill level override fallback', () => { + it('uses inventory fill when available, ignoring AMS remain', () => { + expect(computeEffectiveFill(75, 50)).toBe(75); + }); + + it('falls back to AMS remain when no inventory fill', () => { + expect(computeEffectiveFill(null, 50)).toBe(50); + }); + + it('returns null when neither source available', () => { + expect(computeEffectiveFill(null, null)).toBeNull(); + }); + + it('returns null when AMS remain is -1 (unknown) and no inventory fill', () => { + expect(computeEffectiveFill(null, -1)).toBeNull(); + }); + + it('uses inventory fill even when AMS remain is -1', () => { + expect(computeEffectiveFill(80, -1)).toBe(80); + }); + + it('uses AMS remain of 0 (empty) as valid fill', () => { + expect(computeEffectiveFill(null, 0)).toBe(0); + }); + + it('uses inventory fill of 0 over AMS remain', () => { + expect(computeEffectiveFill(0, 50)).toBe(0); + }); +}); diff --git a/frontend/src/__tests__/pages/SpoolBuddyWriteTagPage.test.tsx b/frontend/src/__tests__/pages/SpoolBuddyWriteTagPage.test.tsx new file mode 100644 index 000000000..63a36537a --- /dev/null +++ b/frontend/src/__tests__/pages/SpoolBuddyWriteTagPage.test.tsx @@ -0,0 +1,137 @@ +/** + * Tests for SpoolBuddyWriteTagPage: + * - Renders three workflow tabs + * - Tab switching works + * - Search input renders on existing/replace tabs + * - New spool form renders on new tab + * - NFC status panel shows correct idle state + */ + +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { screen, waitFor, fireEvent } from '@testing-library/react'; +import React from 'react'; +import { render } from '@testing-library/react'; +import { QueryClient, QueryClientProvider } from '@tanstack/react-query'; +import { MemoryRouter, Route, Routes, Outlet } from 'react-router-dom'; +import { SpoolBuddyWriteTagPage } from '../../pages/spoolbuddy/SpoolBuddyWriteTagPage'; + +// Mock the API modules +vi.mock('../../api/client', () => ({ + api: { + getSpools: vi.fn().mockResolvedValue([]), + createSpool: vi.fn().mockResolvedValue({ id: 1, material: 'PLA' }), + }, + spoolbuddyApi: { + getDevices: vi.fn().mockResolvedValue([]), + writeTag: vi.fn().mockResolvedValue({ status: 'queued' }), + cancelWrite: vi.fn().mockResolvedValue({ status: 'ok' }), + }, +})); + +// Mock i18n +vi.mock('react-i18next', () => ({ + useTranslation: () => ({ + t: (key: string, fallback: string) => fallback, + i18n: { language: 'en', changeLanguage: vi.fn() }, + }), +})); + +const mockOutletContext = { + selectedPrinterId: null, + setSelectedPrinterId: vi.fn(), + sbState: { + weight: null, + weightStable: false, + rawAdc: null, + matchedSpool: null, + unknownTagUid: null, + deviceOnline: false, + deviceId: null, + remainingWeight: null, + netWeight: null, + }, + setAlert: vi.fn(), + displayBrightness: 100, + setDisplayBrightness: vi.fn(), + displayBlankTimeout: 0, + setDisplayBlankTimeout: vi.fn(), +}; + +function OutletWrapper() { + return ; +} + +function renderPage() { + const queryClient = new QueryClient({ + defaultOptions: { queries: { retry: false, gcTime: 0 } }, + }); + + return render( + + + + }> + } /> + + + + + ); +} + +describe('SpoolBuddyWriteTagPage', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + it('renders three workflow tabs', () => { + renderPage(); + expect(screen.getByText('Existing Spool')).toBeDefined(); + expect(screen.getByText('New Spool')).toBeDefined(); + expect(screen.getByText('Replace Tag')).toBeDefined(); + }); + + it('shows search input on existing spool tab', () => { + renderPage(); + expect(screen.getByPlaceholderText('Search by material, color, brand...')).toBeDefined(); + }); + + it('shows no spools message when list is empty', async () => { + renderPage(); + await waitFor(() => { + expect(screen.getByText('No spools without tags')).toBeDefined(); + }); + }); + + it('switches to new spool form on tab click', async () => { + renderPage(); + fireEvent.click(screen.getByText('New Spool')); + await waitFor(() => { + expect(screen.getByText('Material')).toBeDefined(); + expect(screen.getByText('Color Name')).toBeDefined(); + expect(screen.getByText('Brand')).toBeDefined(); + expect(screen.getByText('Weight (g)')).toBeDefined(); + expect(screen.getByText('Create Spool')).toBeDefined(); + }); + }); + + it('switches to replace tab and shows appropriate empty message', async () => { + renderPage(); + fireEvent.click(screen.getByText('Replace Tag')); + await waitFor(() => { + expect(screen.getByText('No spools with tags')).toBeDefined(); + }); + }); + + it('shows device offline message in NFC panel', () => { + renderPage(); + expect(screen.getByText('SpoolBuddy is offline')).toBeDefined(); + }); + + it('shows idle prompt when device is online but no spool selected', () => { + mockOutletContext.sbState.deviceOnline = true; + renderPage(); + expect(screen.getByText('Select a spool, then place a blank NTAG on the reader')).toBeDefined(); + mockOutletContext.sbState.deviceOnline = false; // reset + }); +}); diff --git a/frontend/src/__tests__/pages/StatsPage.test.tsx b/frontend/src/__tests__/pages/StatsPage.test.tsx index fdeaf501e..1a86462e0 100644 --- a/frontend/src/__tests__/pages/StatsPage.test.tsx +++ b/frontend/src/__tests__/pages/StatsPage.test.tsx @@ -41,8 +41,70 @@ const mockPrinters = [ ]; const mockArchives = [ - { id: 1, created_at: '2024-01-01T00:00:00Z', print_name: 'Test Print 1' }, - { id: 2, created_at: '2024-01-02T00:00:00Z', print_name: 'Test Print 2' }, + { + id: 1, + created_at: '2024-01-01T10:00:00Z', + started_at: '2024-01-01T10:00:00Z', + completed_at: '2024-01-01T14:30:00Z', + print_name: 'Benchy', + status: 'completed', + printer_id: 1, + filament_type: 'PLA', + filament_color: '#00FF00', + filament_used_grams: 25, + actual_time_seconds: 16200, + print_time_seconds: 15000, + cost: 0.75, + quantity: 1, + }, + { + id: 2, + created_at: '2024-01-02T14:00:00Z', + started_at: '2024-01-02T14:00:00Z', + completed_at: '2024-01-02T22:00:00Z', + print_name: 'Large Vase', + status: 'completed', + printer_id: 1, + filament_type: 'PETG', + filament_color: '#FF0000', + filament_used_grams: 180, + actual_time_seconds: 28800, + print_time_seconds: 27000, + cost: 5.40, + quantity: 1, + }, + { + id: 3, + created_at: '2024-01-03T08:00:00Z', + started_at: '2024-01-03T08:00:00Z', + completed_at: null, + print_name: 'Failed Bracket', + status: 'failed', + printer_id: 2, + filament_type: 'ABS', + filament_color: '#0000FF', + filament_used_grams: 10, + actual_time_seconds: 3600, + print_time_seconds: 7200, + cost: 0.30, + quantity: 1, + }, + { + id: 4, + created_at: '2024-01-03T20:00:00Z', + started_at: '2024-01-03T20:00:00Z', + completed_at: '2024-01-04T02:00:00Z', + print_name: 'Phone Stand', + status: 'completed', + printer_id: 2, + filament_type: 'PLA', + filament_color: '#00FF00', + filament_used_grams: 45, + actual_time_seconds: 21600, + print_time_seconds: 20000, + cost: 1.35, + quantity: 1, + }, ]; const mockSettings = { @@ -60,9 +122,19 @@ const mockFailureAnalysis = { 'First layer adhesion': 3, 'Filament runout': 2, }, + failures_by_filament: { + 'ABS': 3, + 'PLA': 2, + }, + failures_by_printer: { + '1': 2, + '2': 3, + }, + failures_by_hour: {}, + recent_failures: [], trend: [ - { week: '2024-W01', failure_rate: 6.0 }, - { week: '2024-W02', failure_rate: 5.0 }, + { week_start: '2024-01-01', total_prints: 50, failed_prints: 3, failure_rate: 6.0 }, + { week_start: '2024-01-08', total_prints: 50, failed_prints: 2, failure_rate: 5.0 }, ], }; @@ -75,13 +147,13 @@ describe('StatsPage', () => { http.get('/api/v1/printers/', () => { return HttpResponse.json(mockPrinters); }), - http.get('/api/v1/archives/', () => { + http.get('/api/v1/archives/slim', () => { return HttpResponse.json(mockArchives); }), http.get('/api/v1/settings/', () => { return HttpResponse.json(mockSettings); }), - http.get('/api/v1/stats/failure-analysis', () => { + http.get('/api/v1/archives/analysis/failures', () => { return HttpResponse.json(mockFailureAnalysis); }) ); @@ -127,7 +199,7 @@ describe('StatsPage', () => { await waitFor(() => { expect(screen.getByText('Filament Used')).toBeInTheDocument(); - expect(screen.getByText('5.50kg')).toBeInTheDocument(); + expect(screen.getByText('5.5kg')).toBeInTheDocument(); }); }); }); @@ -138,7 +210,7 @@ describe('StatsPage', () => { await waitFor(() => { expect(screen.getByText('Success Rate')).toBeInTheDocument(); - // Success rate should be calculated: 140/150 = 93% + // Success rate: 140/(140+10) = 93% expect(screen.getByText('93%')).toBeInTheDocument(); }); }); @@ -163,27 +235,145 @@ describe('StatsPage', () => { }); describe('widgets', () => { - it('shows filament types widget', async () => { + it('shows time accuracy widget', async () => { render(); await waitFor(() => { - expect(screen.getByText('Filament Types')).toBeInTheDocument(); + expect(screen.getByText('Time Accuracy')).toBeInTheDocument(); }); }); - it('shows time accuracy widget', async () => { + it('shows print activity widget', async () => { render(); await waitFor(() => { - expect(screen.getByText('Time Accuracy')).toBeInTheDocument(); + expect(screen.getByText('Print Activity')).toBeInTheDocument(); }); }); - it('shows print activity widget', async () => { + it('shows failure analysis widget', async () => { render(); await waitFor(() => { - expect(screen.getByText('Print Activity')).toBeInTheDocument(); + expect(screen.getByText('Failure Analysis')).toBeInTheDocument(); + }); + }); + + it('shows printer stats widget', async () => { + render(); + + await waitFor(() => { + expect(screen.getByText('Printer Stats')).toBeInTheDocument(); + }); + }); + + it('shows filament trends widget', async () => { + render(); + + await waitFor(() => { + expect(screen.getByText('Filament Trends')).toBeInTheDocument(); + }); + }); + + it('shows records widget', async () => { + render(); + + await waitFor(() => { + expect(screen.getByText('Records')).toBeInTheDocument(); + }); + }); + }); + + describe('printer stats sub-cards', () => { + it('shows prints by printer section', async () => { + render(); + + await waitFor(() => { + expect(screen.getByText('Prints by Printer')).toBeInTheDocument(); + }); + }); + + it('shows print duration section', async () => { + render(); + + await waitFor(() => { + expect(screen.getByText('Print Duration')).toBeInTheDocument(); + }); + }); + + it('shows print habits section', async () => { + render(); + + await waitFor(() => { + expect(screen.getByText('Print Habits')).toBeInTheDocument(); + }); + }); + + it('shows print time of day section', async () => { + render(); + + await waitFor(() => { + expect(screen.getByText('Print Time of Day')).toBeInTheDocument(); + }); + }); + }); + + describe('filament trends sub-cards', () => { + it('shows by material section', async () => { + render(); + + await waitFor(() => { + expect(screen.getByText('By Material')).toBeInTheDocument(); + }); + }); + + it('shows success by material section', async () => { + render(); + + await waitFor(() => { + expect(screen.getByText('Success by Material')).toBeInTheDocument(); + }); + }); + + it('shows color distribution section', async () => { + render(); + + await waitFor(() => { + expect(screen.getByText('Color Distribution')).toBeInTheDocument(); + }); + }); + }); + + describe('records widget', () => { + it('shows longest print record', async () => { + render(); + + await waitFor(() => { + expect(screen.getByText('Longest Print')).toBeInTheDocument(); + }); + }); + + it('shows heaviest print record', async () => { + render(); + + await waitFor(() => { + expect(screen.getByText('Heaviest Print')).toBeInTheDocument(); + }); + }); + + it('shows most expensive record', async () => { + render(); + + await waitFor(() => { + expect(screen.getByText('Most Expensive')).toBeInTheDocument(); + }); + }); + + it('shows success streak record', async () => { + render(); + + await waitFor(() => { + expect(screen.getByText('Success Streak')).toBeInTheDocument(); }); }); }); diff --git a/frontend/src/__tests__/utils/currency.test.ts b/frontend/src/__tests__/utils/currency.test.ts index 2a5b0fc61..6bbab097b 100644 --- a/frontend/src/__tests__/utils/currency.test.ts +++ b/frontend/src/__tests__/utils/currency.test.ts @@ -37,7 +37,7 @@ describe('SUPPORTED_CURRENCIES', () => { expect(SUPPORTED_CURRENCIES.find((c) => c.code === 'INR')).toBeDefined(); }); - it('has 25 entries', () => { - expect(SUPPORTED_CURRENCIES).toHaveLength(25); + it('has 26 entries', () => { + expect(SUPPORTED_CURRENCIES).toHaveLength(26); }); }); diff --git a/frontend/src/api/client.ts b/frontend/src/api/client.ts index 2180c65a5..390205aaa 100644 --- a/frontend/src/api/client.ts +++ b/frontend/src/api/client.ts @@ -210,6 +210,7 @@ export interface PrinterStatus { timelapse: boolean; // Timelapse recording active ipcam: boolean; // Live view enabled wifi_signal: number | null; // WiFi signal strength in dBm + wired_network: boolean; // Ethernet connection detected nozzles: NozzleInfo[]; // Nozzle hardware info (index 0=left/primary, 1=right) nozzle_rack: NozzleRackSlot[]; // H2C 6-nozzle tool-changer rack print_options: PrintOptions | null; // AI detection and print options @@ -373,6 +374,22 @@ export interface Archive { created_by_username: string | null; } +export interface ArchiveSlim { + printer_id: number | null; + print_name: string | null; + print_time_seconds: number | null; + actual_time_seconds: number | null; + filament_used_grams: number | null; + filament_type: string | null; + filament_color: string | null; + status: string; + started_at: string | null; + completed_at: string | null; + cost: number | null; + quantity: number; + created_at: string; +} + export interface PrintLogEntry { id: number; print_name: string | null; @@ -764,6 +781,7 @@ export interface AppSettings { check_updates: boolean; check_printer_firmware: boolean; include_beta_updates: boolean; + language: string; notification_language: string; // AMS threshold settings ams_humidity_good: number; // <= this is green @@ -820,6 +838,8 @@ export interface AppSettings { prometheus_token: string; // Bed cooled threshold bed_cooled_threshold: number; + // Inventory low stock threshold + low_stock_threshold: number; } export type AppSettingsUpdate = Partial; @@ -1804,6 +1824,8 @@ export interface InventorySpool { created_at: string; updated_at: string; cost_per_kg: number | null; + last_scale_weight: number | null; + last_weighed_at: string | null; k_profiles?: SpoolKProfile[]; } @@ -2489,14 +2511,23 @@ export const api = { request<{ used_bytes: number | null; free_bytes: number | null }>(`/printers/${printerId}/storage`), // Archives - getArchives: (printerId?: number, projectId?: number, limit = 50, offset = 0) => { + getArchives: (printerId?: number, projectId?: number, limit = 50, offset = 0, dateFrom?: string, dateTo?: string) => { const params = new URLSearchParams(); if (printerId) params.set('printer_id', String(printerId)); if (projectId) params.set('project_id', String(projectId)); params.set('limit', String(limit)); params.set('offset', String(offset)); + if (dateFrom) params.set('date_from', dateFrom); + if (dateTo) params.set('date_to', dateTo); return request(`/archives/?${params}`); }, + getArchivesSlim: (dateFrom?: string, dateTo?: string) => { + const params = new URLSearchParams(); + if (dateFrom) params.set('date_from', dateFrom); + if (dateTo) params.set('date_to', dateTo); + const qs = params.toString(); + return request(`/archives/slim${qs ? `?${qs}` : ''}`); + }, getArchive: (id: number) => request(`/archives/${id}`), searchArchives: (query: string, options?: { printerId?: number; @@ -2536,7 +2567,13 @@ export const api = { request(`/archives/${id}/favorite`, { method: 'POST' }), deleteArchive: (id: number) => request(`/archives/${id}`, { method: 'DELETE' }), - getArchiveStats: () => request('/archives/stats'), + getArchiveStats: (options?: { dateFrom?: string; dateTo?: string }) => { + const params = new URLSearchParams(); + if (options?.dateFrom) params.set('date_from', options.dateFrom); + if (options?.dateTo) params.set('date_to', options.dateTo); + const qs = params.toString(); + return request(`/archives/stats${qs ? `?${qs}` : ''}`); + }, // Tag management getTags: () => request('/archives/tags'), renameTag: (oldName: string, newName: string) => @@ -2550,12 +2587,15 @@ export const api = { }), recalculateCosts: () => request<{ message: string; updated: number }>('/archives/recalculate-costs', { method: 'POST' }), - getFailureAnalysis: (options?: { days?: number; printerId?: number; projectId?: number }) => { + getFailureAnalysis: (options?: { days?: number; dateFrom?: string; dateTo?: string; printerId?: number; projectId?: number }) => { const params = new URLSearchParams(); if (options?.days) params.set('days', String(options.days)); + if (options?.dateFrom) params.set('date_from', options.dateFrom); + if (options?.dateTo) params.set('date_to', options.dateTo); if (options?.printerId) params.set('printer_id', String(options.printerId)); if (options?.projectId) params.set('project_id', String(options.projectId)); - return request(`/archives/analysis/failures?${params}`); + const qs = params.toString(); + return request(`/archives/analysis/failures${qs ? `?${qs}` : ''}`); }, compareArchives: (archiveIds: number[]) => request(`/archives/compare?archive_ids=${archiveIds.join(',')}`), @@ -4828,6 +4868,12 @@ export interface SpoolBuddyDevice { has_scale: boolean; tare_offset: number; calibration_factor: number; + nfc_reader_type: string | null; + nfc_connection: string | null; + display_brightness: number; + display_blank_timeout: number; + has_backlight: boolean; + last_calibrated_at: string | null; last_seen: string | null; pending_command: string | null; nfc_ok: boolean; @@ -4836,6 +4882,13 @@ export interface SpoolBuddyDevice { online: boolean; } +export interface DaemonUpdateCheck { + current_version: string; + latest_version: string | null; + update_available: boolean; + release_url: string | null; +} + // SpoolBuddy API export const spoolbuddyApi = { getDevices: () => @@ -4850,10 +4903,10 @@ export const spoolbuddyApi = { getCalibration: (deviceId: string) => request<{ tare_offset: number; calibration_factor: number }>(`/spoolbuddy/devices/${deviceId}/calibration`), - setCalibrationFactor: (deviceId: string, knownWeightGrams: number, rawAdc: number) => + setCalibrationFactor: (deviceId: string, knownWeightGrams: number, rawAdc: number, tareRawAdc?: number) => request<{ tare_offset: number; calibration_factor: number }>(`/spoolbuddy/devices/${deviceId}/calibration/set-factor`, { method: 'POST', - body: JSON.stringify({ known_weight_grams: knownWeightGrams, raw_adc: rawAdc }), + body: JSON.stringify({ known_weight_grams: knownWeightGrams, raw_adc: rawAdc, tare_raw_adc: tareRawAdc }), }), updateSpoolWeight: (spoolId: number, weightGrams: number) => @@ -4861,4 +4914,47 @@ export const spoolbuddyApi = { method: 'POST', body: JSON.stringify({ spool_id: spoolId, weight_grams: weightGrams }), }), + + updateDisplay: (deviceId: string, brightness: number, blankTimeout: number) => + request<{ status: string }>(`/spoolbuddy/devices/${deviceId}/display`, { + method: 'PUT', + body: JSON.stringify({ brightness, blank_timeout: blankTimeout }), + }), + + checkDaemonUpdate: (deviceId: string, includeBeta?: boolean) => + request(`/spoolbuddy/devices/${deviceId}/update-check?include_beta=${includeBeta ?? false}`), + + writeTag: (deviceId: string, spoolId: number) => + request<{ status: string }>('/spoolbuddy/nfc/write-tag', { + method: 'POST', + body: JSON.stringify({ device_id: deviceId, spool_id: spoolId }), + }), + + cancelWrite: (deviceId: string) => + request<{ status: string }>(`/spoolbuddy/devices/${deviceId}/cancel-write`, { + method: 'POST', + body: '{}', + }), +}; + +export interface BugReportRequest { + description: string; + email?: string; + screenshot_base64?: string; + include_support_info?: boolean; +} + +export interface BugReportResponse { + success: boolean; + message: string; + issue_url?: string; + issue_number?: number; +} + +export const bugReportApi = { + submit: (data: BugReportRequest) => + request('/bug-report/submit', { + method: 'POST', + body: JSON.stringify(data), + }), }; diff --git a/frontend/src/components/BugReportBubble.tsx b/frontend/src/components/BugReportBubble.tsx new file mode 100644 index 000000000..482ab318c --- /dev/null +++ b/frontend/src/components/BugReportBubble.tsx @@ -0,0 +1,362 @@ +import { useState, useRef, useCallback, useEffect } from 'react'; +import { Bug, X, Loader2, CheckCircle, AlertCircle, Trash2, Upload } from 'lucide-react'; +import { useTranslation } from 'react-i18next'; +import { bugReportApi } from '../api/client'; + +type ViewState = 'form' | 'collecting' | 'submitting' | 'success' | 'error'; + +const LOG_COLLECTION_SECONDS = 30; + +const MAX_DIMENSION = 1920; +const JPEG_QUALITY = 0.7; + +function compressImage(file: File): Promise { + return new Promise((resolve, reject) => { + const img = new Image(); + img.onload = () => { + let { width, height } = img; + if (width > MAX_DIMENSION || height > MAX_DIMENSION) { + const scale = MAX_DIMENSION / Math.max(width, height); + width = Math.round(width * scale); + height = Math.round(height * scale); + } + const canvas = document.createElement('canvas'); + canvas.width = width; + canvas.height = height; + const ctx = canvas.getContext('2d'); + if (!ctx) { reject(new Error('No canvas context')); return; } + ctx.drawImage(img, 0, 0, width, height); + const dataUrl = canvas.toDataURL('image/jpeg', JPEG_QUALITY); + resolve(dataUrl.replace(/^data:[^;]+;base64,/, '')); + }; + img.onerror = reject; + img.src = URL.createObjectURL(file); + }); +} + +export function BugReportBubble() { + const { t } = useTranslation(); + const [isOpen, setIsOpen] = useState(false); + const [viewState, setViewState] = useState('form'); + const [description, setDescription] = useState(''); + const [email, setEmail] = useState(''); + const [screenshot, setScreenshot] = useState(null); + const [isDragging, setIsDragging] = useState(false); + const [issueUrl, setIssueUrl] = useState(null); + const [issueNumber, setIssueNumber] = useState(null); + const [errorMessage, setErrorMessage] = useState(''); + const [countdown, setCountdown] = useState(0); + const modalRef = useRef(null); + const fileInputRef = useRef(null); + + // Countdown timer for log collection phase + useEffect(() => { + if (viewState !== 'collecting') return; + if (countdown <= 0) { + setViewState('submitting'); + return; + } + const timer = setTimeout(() => setCountdown((c) => c - 1), 1000); + return () => clearTimeout(timer); + }, [viewState, countdown]); + + const handleOpen = () => { + setIsOpen(true); + setViewState('form'); + setDescription(''); + setEmail(''); + setScreenshot(null); + setIssueUrl(null); + setIssueNumber(null); + setErrorMessage(''); + }; + + const handleClose = () => { + setIsOpen(false); + }; + + const handleFile = useCallback(async (file: File) => { + if (!file.type.startsWith('image/')) return; + try { + const b64 = await compressImage(file); + setScreenshot(b64); + } catch { + // Ignore read errors + } + }, []); + + const handlePaste = useCallback((e: React.ClipboardEvent) => { + const items = e.clipboardData?.items; + if (!items) return; + for (const item of items) { + if (item.type.startsWith('image/')) { + const file = item.getAsFile(); + if (file) handleFile(file); + break; + } + } + }, [handleFile]); + + const handleDragOver = useCallback((e: React.DragEvent) => { + e.preventDefault(); + setIsDragging(true); + }, []); + + const handleDragLeave = useCallback((e: React.DragEvent) => { + e.preventDefault(); + setIsDragging(false); + }, []); + + const handleDrop = useCallback((e: React.DragEvent) => { + e.preventDefault(); + setIsDragging(false); + const file = e.dataTransfer.files?.[0]; + if (file) handleFile(file); + }, [handleFile]); + + const handleSubmit = async () => { + if (!description.trim()) return; + setCountdown(LOG_COLLECTION_SECONDS); + setViewState('collecting'); + try { + const result = await bugReportApi.submit({ + description: description.trim(), + email: email.trim() || undefined, + screenshot_base64: screenshot || undefined, + include_support_info: true, + }); + if (result.success) { + setIssueUrl(result.issue_url || null); + setIssueNumber(result.issue_number || null); + setViewState('success'); + } else { + setErrorMessage(result.message); + setViewState('error'); + } + } catch (err) { + setErrorMessage(err instanceof Error ? err.message : t('bugReport.unexpectedError')); + setViewState('error'); + } + }; + + return ( + <> + {/* Floating bubble */} + + + {/* Slide-in panel anchored to bottom-right */} + {isOpen && ( +
+
+ {/* Header */} +
+

+ + {t('bugReport.title')} +

+ +
+ +
+ {viewState === 'form' && ( + <> + {/* Description */} +
+ +