@@ -110,72 +110,96 @@ def tableize(name: str) -> str:
110110 return imported
111111
112112# the current auto_log_material_files() ---
113- @app .before_first_request
114- def _seed_uploads_log ():
115- auto_log_material_files ()
116-
117-
118113def ensure_uploads_log_schema ():
119- # Creates table if missing and enforces uniqueness on (property, tab, filename)
114+ # Creates the table and backfills missing columns so existing DBs keep working
120115 with sqlite3 .connect (DB_NAME ) as conn :
121116 c = conn .cursor ()
122117 c .execute ("""
123- CREATE TABLE IF NOT EXISTS uploads_log (
124- property TEXT NOT NULL,
125- tab TEXT NOT NULL,
126- filename TEXT NOT NULL
127- )
118+ CREATE TABLE IF NOT EXISTS uploads_log (
119+ property TEXT NOT NULL,
120+ tab TEXT NOT NULL,
121+ filename TEXT NOT NULL,
122+ uploaded_at TEXT,
123+ source TEXT,
124+ description TEXT
125+ )
126+ """ )
127+ # Backfill columns if the table already existed without them
128+ existing = {row [1 ] for row in c .execute ("PRAGMA table_info(uploads_log)" )}
129+ for col , ddl in [
130+ ("uploaded_at" , "ALTER TABLE uploads_log ADD COLUMN uploaded_at TEXT" ),
131+ ("source" , "ALTER TABLE uploads_log ADD COLUMN source TEXT" ),
132+ ("description" , "ALTER TABLE uploads_log ADD COLUMN description TEXT" ),
133+ ]:
134+ if col not in existing :
135+ c .execute (ddl )
136+
137+ c .execute ("""
138+ CREATE UNIQUE INDEX IF NOT EXISTS idx_uploads_unique
139+ ON uploads_log(property, tab, filename)
128140 """ )
129- c .execute ("CREATE UNIQUE INDEX IF NOT EXISTS idx_uploads_unique ON uploads_log(property, tab, filename)" )
130141 conn .commit ()
131142
143+
132144def auto_log_material_files ():
133145 ensure_uploads_log_schema ()
134146
135- upload_root = current_app .config .get ("UPLOAD_FOLDER" , UPLOAD_FOLDER )
147+ # Avoid relying on current_app when we can read from app.config directly
148+ upload_root = app .config .get ("UPLOAD_FOLDER" , UPLOAD_FOLDER )
136149 if not os .path .exists (upload_root ):
137150 return
138151
139152 all_allowed_exts = ALLOWED_DATASET_EXTENSIONS | ALLOWED_RESULTS_EXTENSIONS
140-
141153 to_insert = []
142- for root , dirs , files in os .walk (upload_root ):
154+
155+ for root , _ , files in os .walk (upload_root ):
156+ rel_root = os .path .relpath (root , upload_root )
157+ if rel_root .split (os .sep )[0 ] == "clips" :
158+ continue
159+
143160 for fname in files :
144161 ext = fname .rsplit ("." , 1 )[- 1 ].lower () if "." in fname else ""
145162 if ext not in all_allowed_exts :
146163 continue
147164
148- fpath = os .path .join (root , fname )
149- rel_path = os .path .relpath (fpath , upload_root )
165+ rel_path = os .path .relpath (os .path .join (root , fname ), upload_root )
150166 parts = rel_path .split (os .sep )
151-
152- # Skip music uploads under /uploads/clips/
153- if parts and parts [0 ] == "clips" :
154- continue
155-
156167 if len (parts ) >= 3 :
157168 property_name , tab , file_name = parts [0 ], parts [1 ], parts [2 ]
158169 to_insert .append ((property_name , tab , file_name ))
159170
160- if to_insert :
161- with sqlite3 .connect (DB_NAME ) as conn :
162- c = conn .cursor ()
163- # idempotent insert: ignore duplicates quietly
164- c .executemany (
165- "INSERT OR IGNORE INTO uploads_log(property, tab, filename) VALUES (?, ?, ?)" ,
166- to_insert
167- )
168- conn .commit ()
171+ if not to_insert :
172+ return
173+
174+ with sqlite3 .connect (DB_NAME ) as conn :
175+ c = conn .cursor ()
176+ # Upsert: if the file is already logged, refresh uploaded_at
177+ c .executemany ("""
178+ INSERT INTO uploads_log (property, tab, filename, uploaded_at)
179+ VALUES (?, ?, ?, CURRENT_TIMESTAMP)
180+ ON CONFLICT(property, tab, filename)
181+ DO UPDATE SET uploaded_at=excluded.uploaded_at
182+ """ , to_insert )
183+ conn .commit ()
169184
170185
171186# ========== FLASK APP ==========
187+
172188app = Flask (__name__ )
173189app .config ['UPLOAD_FOLDER' ] = UPLOAD_FOLDER
174190app .secret_key = 'IronMa1deN!'
175191
176- # Create folders if missing
177- if not os .path .exists (UPLOAD_FOLDER ):
178- os .makedirs (UPLOAD_FOLDER )
192+ @app .before_first_request
193+ def _warm_up ():
194+ try :
195+ auto_import_uploads ()
196+ except Exception as e :
197+ app .logger .warning ("auto_import_uploads skipped: %s" , e )
198+ try :
199+ auto_log_material_files ()
200+ except Exception as e :
201+ app .logger .warning ("auto_log_material_files skipped: %s" , e )
202+
179203
180204# ---------- Utility Functions ----------
181205def allowed_dataset_file (filename ):
@@ -694,16 +718,6 @@ def extract_drive_id(link):
694718 return render_template ('add_drive_clip.html' , message = message )
695719
696720
697- # --- Print routes for debugging (optional, can comment out) ---
698- for rule in app .url_map .iter_rules ():
699- print (rule .endpoint , rule )
700-
701- auto_import_uploads ()
702- try :
703- auto_log_material_files ()
704- except Exception as e :
705- print ("auto_log_material_files skipped:" , e )
706-
707721
708722# ========== MAIN ==========
709723if __name__ == '__main__' :
0 commit comments