22import json
33import logging
44import zipfile
5- from datetime import date , datetime , time , timezone
65from decimal import ROUND_HALF_UP , Decimal
76from pathlib import Path
87
2221from backend .app .models .filament import Filament
2322from backend .app .models .spool_usage_history import SpoolUsageHistory
2423from backend .app .models .user import User
25- from backend .app .schemas .archive import ArchiveResponse , ArchiveSlim , ArchiveStats , ArchiveUpdate , ReprintRequest
24+ from backend .app .schemas .archive import ArchiveResponse , ArchiveStats , ArchiveUpdate , ReprintRequest
2625from backend .app .services .archive import ArchiveService
2726from backend .app .utils .threemf_tools import extract_nozzle_mapping_from_3mf
2827
@@ -123,8 +122,6 @@ def archive_to_response(
123122async def list_archives (
124123 printer_id : int | None = None ,
125124 project_id : int | None = None ,
126- date_from : date | None = Query (None ),
127- date_to : date | None = Query (None ),
128125 limit : int = 50 ,
129126 offset : int = 0 ,
130127 db : AsyncSession = Depends (get_db ),
@@ -135,8 +132,6 @@ async def list_archives(
135132 archives = await service .list_archives (
136133 printer_id = printer_id ,
137134 project_id = project_id ,
138- date_from = date_from ,
139- date_to = date_to ,
140135 limit = limit ,
141136 offset = offset ,
142137 )
@@ -154,78 +149,6 @@ async def list_archives(
154149 return result
155150
156151
157- @router .get ("/slim" , response_model = list [ArchiveSlim ])
158- async def list_archives_slim (
159- date_from : date | None = Query (None ),
160- date_to : date | None = Query (None ),
161- limit : int = Query (default = 10000 , le = 50000 ),
162- offset : int = 0 ,
163- db : AsyncSession = Depends (get_db ),
164- _ : User | None = RequirePermissionIfAuthEnabled (Permission .ARCHIVES_READ ),
165- ):
166- """Lightweight archive listing for stats/dashboard widgets.
167-
168- Returns only the fields needed for client-side aggregation,
169- skipping duplicate detection, file paths, and extra_data.
170- """
171- filters = []
172- if date_from :
173- dt_from = datetime .combine (date_from , time .min , tzinfo = timezone .utc )
174- filters .append (PrintArchive .created_at >= dt_from )
175- if date_to :
176- dt_to = datetime .combine (date_to , time .max , tzinfo = timezone .utc )
177- filters .append (PrintArchive .created_at <= dt_to )
178-
179- query = (
180- select (
181- PrintArchive .printer_id ,
182- PrintArchive .print_name ,
183- PrintArchive .print_time_seconds ,
184- PrintArchive .started_at ,
185- PrintArchive .completed_at ,
186- PrintArchive .filament_used_grams ,
187- PrintArchive .filament_type ,
188- PrintArchive .filament_color ,
189- PrintArchive .status ,
190- PrintArchive .cost ,
191- PrintArchive .quantity ,
192- PrintArchive .created_at ,
193- )
194- .where (* filters )
195- .order_by (PrintArchive .created_at .desc ())
196- .limit (limit )
197- .offset (offset )
198- )
199- result = await db .execute (query )
200- rows = result .all ()
201-
202- return [
203- {
204- "printer_id" : r .printer_id ,
205- "print_name" : r .print_name ,
206- "print_time_seconds" : r .print_time_seconds ,
207- "actual_time_seconds" : (
208- int ((r .completed_at - r .started_at ).total_seconds ())
209- if r .started_at
210- and r .completed_at
211- and r .status == "completed"
212- and (r .completed_at - r .started_at ).total_seconds () > 0
213- else None
214- ),
215- "filament_used_grams" : r .filament_used_grams ,
216- "filament_type" : r .filament_type ,
217- "filament_color" : r .filament_color ,
218- "status" : r .status ,
219- "started_at" : r .started_at ,
220- "completed_at" : r .completed_at ,
221- "cost" : r .cost ,
222- "quantity" : r .quantity ,
223- "created_at" : r .created_at ,
224- }
225- for r in rows
226- ]
227-
228-
229152@router .get ("/search" , response_model = list [ArchiveResponse ])
230153async def search_archives (
231154 q : str = Query (..., min_length = 2 , description = "Search query" ),
@@ -354,9 +277,7 @@ async def rebuild_search_index(
354277
355278@router .get ("/analysis/failures" )
356279async def analyze_failures (
357- days : int | None = None ,
358- date_from : date | None = Query (None ),
359- date_to : date | None = Query (None ),
280+ days : int = 30 ,
360281 printer_id : int | None = None ,
361282 project_id : int | None = None ,
362283 db : AsyncSession = Depends (get_db ),
@@ -376,8 +297,6 @@ async def analyze_failures(
376297 service = FailureAnalysisService (db )
377298 return await service .analyze_failures (
378299 days = days ,
379- date_from = date_from ,
380- date_to = date_to ,
381300 printer_id = printer_id ,
382301 project_id = project_id ,
383302 )
@@ -521,42 +440,25 @@ async def export_stats(
521440
522441@router .get ("/stats" , response_model = ArchiveStats )
523442async def get_archive_stats (
524- date_from : date | None = Query (None , description = "Start date (inclusive), YYYY-MM-DD" ),
525- date_to : date | None = Query (None , description = "End date (inclusive), YYYY-MM-DD" ),
526443 db : AsyncSession = Depends (get_db ),
527444 _ : User | None = RequirePermissionIfAuthEnabled (Permission .STATS_READ ),
528445):
529446 """Get statistics across all archives."""
530- # Build date filter conditions
531- base_conditions = []
532- if date_from :
533- dt_from = datetime .combine (date_from , time .min , tzinfo = timezone .utc )
534- base_conditions .append (PrintArchive .created_at >= dt_from )
535- if date_to :
536- dt_to = datetime .combine (date_to , time .max , tzinfo = timezone .utc )
537- base_conditions .append (PrintArchive .created_at <= dt_to )
538-
539447 # Total counts
540- total_result = await db .execute (select (func .count (PrintArchive .id )). where ( * base_conditions ) )
448+ total_result = await db .execute (select (func .count (PrintArchive .id )))
541449 total_prints = total_result .scalar () or 0
542450
543- successful_result = await db .execute (
544- select (func .count (PrintArchive .id )).where (PrintArchive .status == "completed" , * base_conditions )
545- )
451+ successful_result = await db .execute (select (func .count (PrintArchive .id )).where (PrintArchive .status == "completed" ))
546452 successful_prints = successful_result .scalar () or 0
547453
548- failed_result = await db .execute (
549- select (func .count (PrintArchive .id )).where (PrintArchive .status == "failed" , * base_conditions )
550- )
454+ failed_result = await db .execute (select (func .count (PrintArchive .id )).where (PrintArchive .status == "failed" ))
551455 failed_prints = failed_result .scalar () or 0
552456
553457 # Totals - use actual print time from timestamps (not slicer estimates)
554458 # For archives with both started_at and completed_at, calculate actual duration
555459 # Fall back to print_time_seconds only for archives without timestamps
556460 archives_for_time = await db .execute (
557- select (PrintArchive .started_at , PrintArchive .completed_at , PrintArchive .print_time_seconds ).where (
558- * base_conditions
559- )
461+ select (PrintArchive .started_at , PrintArchive .completed_at , PrintArchive .print_time_seconds )
560462 )
561463 total_seconds = 0
562464 for started_at , completed_at , print_time_seconds in archives_for_time .all ():
@@ -571,17 +473,15 @@ async def get_archive_stats(
571473 total_time = total_seconds / 3600 # Convert to hours
572474
573475 # Sum filament directly - filament_used_grams already contains the total for the print job
574- filament_result = await db .execute (
575- select (func .coalesce (func .sum (PrintArchive .filament_used_grams ), 0 )).where (* base_conditions )
576- )
476+ filament_result = await db .execute (select (func .coalesce (func .sum (PrintArchive .filament_used_grams ), 0 )))
577477 total_filament = filament_result .scalar () or 0
578478
579- cost_result = await db .execute (select (func .sum (PrintArchive .cost )). where ( * base_conditions ) )
479+ cost_result = await db .execute (select (func .sum (PrintArchive .cost )))
580480 total_cost = cost_result .scalar () or 0
581481
582482 # By filament type (split comma-separated values for multi-material prints)
583483 filament_type_result = await db .execute (
584- select (PrintArchive .filament_type ).where (PrintArchive .filament_type .isnot (None ), * base_conditions )
484+ select (PrintArchive .filament_type ).where (PrintArchive .filament_type .isnot (None ))
585485 )
586486 prints_by_filament : dict [str , int ] = {}
587487 for (filament_types ,) in filament_type_result .all ():
@@ -593,17 +493,15 @@ async def get_archive_stats(
593493
594494 # By printer
595495 printer_result = await db .execute (
596- select (PrintArchive .printer_id , func .count (PrintArchive .id ))
597- .where (* base_conditions )
598- .group_by (PrintArchive .printer_id )
496+ select (PrintArchive .printer_id , func .count (PrintArchive .id )).group_by (PrintArchive .printer_id )
599497 )
600498 prints_by_printer = {str (k ): v for k , v in printer_result .all ()}
601499
602500 # Time accuracy statistics
603501 # Get all completed archives with both estimated and actual times
604502 accuracy_result = await db .execute (
605503 select (PrintArchive )
606- .where (PrintArchive .status == "completed" , * base_conditions )
504+ .where (PrintArchive .status == "completed" )
607505 .where (PrintArchive .print_time_seconds .isnot (None ))
608506 .where (PrintArchive .started_at .isnot (None ))
609507 .where (PrintArchive .completed_at .isnot (None ))
@@ -677,10 +575,10 @@ async def get_archive_stats(
677575 total_energy_cost = round (total_energy_kwh * energy_cost_per_kwh , 3 )
678576 else :
679577 # Print mode: sum up per-print energy from archives
680- energy_kwh_result = await db .execute (select (func .sum (PrintArchive .energy_kwh )). where ( * base_conditions ) )
578+ energy_kwh_result = await db .execute (select (func .sum (PrintArchive .energy_kwh )))
681579 total_energy_kwh = energy_kwh_result .scalar () or 0
682580
683- energy_cost_result = await db .execute (select (func .sum (PrintArchive .energy_cost )). where ( * base_conditions ) )
581+ energy_cost_result = await db .execute (select (func .sum (PrintArchive .energy_cost )))
684582 total_energy_cost = energy_cost_result .scalar () or 0
685583
686584 return ArchiveStats (
0 commit comments