@@ -179,7 +179,6 @@ def _drive_urls(file_id: str) -> (str, str):
179179 preview = f"https://drive.google.com/file/d/{ file_id } /preview"
180180 download = f"https://drive.google.com/uc?export=download&id={ file_id } "
181181 return preview , download
182-
183182#==================================================#
184183
185184_SQLITE_RESERVED_PREFIXES = ("sqlite_" ,)
@@ -238,7 +237,6 @@ def file_to_table_name(filename: str) -> str:
238237 """
239238 import os
240239 return tableize_basename (os .path .basename (filename or "" ))
241-
242240#==================================================#
243241
244242def make_file_public (file_id : str ):
@@ -252,7 +250,6 @@ def make_file_public(file_id: str):
252250 ).execute ()
253251 except Exception :
254252 pass # ignore if permission already exists
255-
256253#==================================================#
257254
258255def ensure_uploads_log_schema ():
@@ -330,7 +327,41 @@ def ensure_trigger(name, ddl):
330327 END;""" )
331328
332329 conn .commit ()
330+ #==================================================#
333331
332+ def ensure_uploads_log_columns ():
333+ """Add missing Drive-era columns to uploads_log on old DBs."""
334+ with sqlite3 .connect (DB_NAME ) as conn :
335+ c = conn .cursor ()
336+
337+ # If the table itself doesn't exist yet, create the full schema first
338+ c .execute ("SELECT name FROM sqlite_master WHERE type='table' AND name='uploads_log'" )
339+ if not c .fetchone ():
340+ ensure_uploads_log_schema ()
341+
342+ c .execute ("PRAGMA table_info(uploads_log)" )
343+ existing = {row [1 ] for row in c .fetchall ()}
344+
345+ to_add = [
346+ ("storage" , "TEXT" ),
347+ ("drive_id" , "TEXT" ),
348+ ("preview_url" , "TEXT" ),
349+ ("download_url" , "TEXT" ),
350+ ("source" , "TEXT" ),
351+ ("description" , "TEXT" ),
352+ ]
353+ for col , typ in to_add :
354+ if col not in existing :
355+ c .execute (f"ALTER TABLE uploads_log ADD COLUMN { col } { typ } " )
356+ try :
357+ # Don’t fail the request if dupes exist; just try to enforce uniqueness.
358+ c .execute ("""
359+ CREATE UNIQUE INDEX IF NOT EXISTS idx_uploads_unique
360+ ON uploads_log(property, tab, filename)
361+ """ )
362+ except sqlite3 .OperationalError :
363+ pass # ignore if duplicates still exist; you can run your /admin/fix_uploads_uniqueness later
364+ conn .commit ()
334365#==================================================#
335366
336367def auto_log_material_files ():
@@ -486,9 +517,10 @@ def _run_startup_tasks():
486517 if _startup_done :
487518 return
488519 try :
489- ensure_uploads_log_schema () # if you have this helper; otherwise drop it
520+ ensure_uploads_log_schema ()
521+ ensure_uploads_log_columns ()
490522 except Exception as e :
491- app .logger .warning ("ensure_uploads_log_schema skipped: %s" , e )
523+ app .logger .warning ("ensure_uploads_log_schema/columns skipped: %s" , e )
492524 # try:
493525 # auto_import_uploads()
494526 # except Exception as e:
@@ -700,6 +732,10 @@ def admin_home():
700732
701733 # Make sure catalog + audit schema exist (triggers are created here too)
702734 ensure_uploads_log_schema ()
735+ try :
736+ ensure_uploads_log_columns () # Add Drive-era columns if missing
737+ except Exception as e :
738+ app .logger .warning ("ensure_uploads_log_columns : %s" , e )
703739
704740 # Build the history table: when first added, and whether still present publicly
705741 with sqlite3 .connect (DB_NAME ) as conn :
@@ -845,7 +881,6 @@ def diag_routes():
845881#########################################################
846882
847883# -- View and import (admin + public, Drive-only adds) --
848- # -- View and import (admin + Public) --
849884@app .route ('/materials/<property_name>/<tab>' , methods = ['GET' , 'POST' ])
850885def property_detail (property_name , tab ):
851886 # ---- titles / guards ----
@@ -862,31 +897,22 @@ def property_detail(property_name, tab):
862897 edit_message = ""
863898 is_admin = bool (session .get ('admin' ))
864899
900+ # Ensure columns exist on old DBs (adds storage/drive_id/preview_url/download_url/source/description if missing)
901+ try :
902+ ensure_uploads_log_columns ()
903+ except Exception as e :
904+ app .logger .warning ("ensure_uploads_log_columns raised: %s" , e )
905+
865906 # ---- admin POST handlers ----
866907 if is_admin and request .method == 'POST' :
867908 try :
868- # 0) Ensure storage columns exist (for upgraded DBs)
869- try :
870- with sqlite3 .connect (DB_NAME ) as conn :
871- cur = conn .cursor ()
872- cur .execute ("SELECT storage, drive_id, preview_url, download_url FROM uploads_log LIMIT 1" )
873- except Exception :
874- with sqlite3 .connect (DB_NAME ) as conn :
875- cur = conn .cursor ()
876- cur .execute ("ALTER TABLE uploads_log ADD COLUMN storage TEXT" )
877- cur .execute ("ALTER TABLE uploads_log ADD COLUMN drive_id TEXT" )
878- cur .execute ("ALTER TABLE uploads_log ADD COLUMN preview_url TEXT" )
879- cur .execute ("ALTER TABLE uploads_log ADD COLUMN download_url TEXT" )
880- conn .commit ()
881-
882- # 1) Add a single Drive file by link/ID
909+ # 1) Add a single Drive FILE by link/ID
883910 if request .form .get ('add_drive' ):
884- drive_link = request .form .get ('drive_link' , '' ).strip ()
885- label = request .form .get ('label' , '' ).strip ()
911+ drive_link = ( request .form .get ('drive_link' ) or '' ).strip ()
912+ label = ( request .form .get ('label' ) or '' ).strip ()
886913 new_source = (request .form .get ('row_source' ) or '' ).strip () if tab == 'dataset' else None
887- new_desc = (request .form .get ('row_description' ) or '' ).strip ()
914+ new_desc = (request .form .get ('row_description' ) or '' ).strip ()
888915
889- ext = (label .rsplit ('.' , 1 )[- 1 ].lower () if '.' in label else '' )
890916 if not _ext_ok_for_tab (label , tab ):
891917 if tab == 'dataset' :
892918 upload_message = "Label must end with .csv or .npy for datasets."
@@ -903,32 +929,33 @@ def property_detail(property_name, tab):
903929 c .execute (
904930 """
905931 INSERT INTO uploads_log
906- (property, tab, filename, uploaded_at, storage, drive_id, preview_url, download_url, source, description)
907- VALUES (?, ?, ?, CURRENT_TIMESTAMP, 'drive', ?, ?, ?, ?, ?)
932+ (property, tab, filename, uploaded_at,
933+ storage, drive_id, preview_url, download_url, source, description)
934+ VALUES (?, ?, ?, CURRENT_TIMESTAMP,
935+ 'drive', ?, ?, ?, ?, ?)
908936 ON CONFLICT(property, tab, filename)
909937 DO UPDATE SET
910938 uploaded_at = CURRENT_TIMESTAMP,
911- storage = 'drive',
912- drive_id = excluded.drive_id,
939+ storage = 'drive',
940+ drive_id = excluded.drive_id,
913941 preview_url = excluded.preview_url,
914- download_url = excluded.download_url,
915- source = COALESCE(excluded.source, uploads_log.source),
942+ download_url= excluded.download_url,
943+ source = COALESCE(excluded.source, uploads_log.source),
916944 description = COALESCE(excluded.description, uploads_log.description)
917945 """ ,
918946 (property_name , tab , label , file_id , preview_url , download_url , new_source , new_desc ),
919947 )
920948 conn .commit ()
921949 upload_message = f"Added Drive item '{ label } '."
922950
923- # 2) Link a Drive FOLDER → import all allowed files
951+ # 2) Link a Drive FOLDER → import all allowed files directly under it
924952 elif request .form .get ('link_folder' ):
925- folder_link = request .form .get ('drive_folder_link' , '' ).strip ()
953+ folder_link = ( request .form .get ('drive_folder_link' ) or '' ).strip ()
926954 folder_id = _drive_extract_id (folder_link )
927955 if not folder_id :
928956 upload_message = "Invalid Drive folder link or ID."
929957 else :
930958 service = get_drive_service ()
931- # list files and import those with allowed extensions for this tab
932959 files = drive_list_folder_files (service , folder_id )
933960 imported = 0
934961 with sqlite3 .connect (DB_NAME ) as conn :
@@ -942,30 +969,32 @@ def property_detail(property_name, tab):
942969 c .execute (
943970 """
944971 INSERT INTO uploads_log
945- (property, tab, filename, uploaded_at, storage, drive_id, preview_url, download_url)
946- VALUES (?, ?, ?, CURRENT_TIMESTAMP, 'drive', ?, ?, ?)
972+ (property, tab, filename, uploaded_at,
973+ storage, drive_id, preview_url, download_url)
974+ VALUES (?, ?, ?, CURRENT_TIMESTAMP,
975+ 'drive', ?, ?, ?)
947976 ON CONFLICT(property, tab, filename)
948977 DO UPDATE SET
949978 uploaded_at = CURRENT_TIMESTAMP,
950- storage = 'drive',
951- drive_id = excluded.drive_id,
979+ storage = 'drive',
980+ drive_id = excluded.drive_id,
952981 preview_url = excluded.preview_url,
953- download_url = excluded.download_url
982+ download_url= excluded.download_url
954983 """ ,
955984 (property_name , tab , name , fid , preview_url , download_url ),
956985 )
957986 imported += 1
958987 conn .commit ()
959988 upload_message = f"Linked folder: imported { imported } item(s)."
960989
961- # 3) Upload a ZIP → push contents to Drive <root>/<property>/<tab> → import
990+ # 3) Upload a ZIP → push files to Drive <root>/<property>/<tab> → log them
962991 elif request .form .get ('zip_upload' ):
963- if 'zipfile' not in request .files or request .files ['zipfile' ].filename == '' :
992+ if 'zipfile' not in request .files or not request .files ['zipfile' ].filename :
964993 upload_message = "No ZIP file selected."
965994 else :
966995 zf = request .files ['zipfile' ]
967996 try :
968- root_id = os .environ .get ("GDRIVE_ROOT_FOLDER_ID" , "" ).strip ()
997+ root_id = ( os .environ .get ("GDRIVE_ROOT_FOLDER_ID" ) or "" ).strip ()
969998 if not root_id :
970999 raise RuntimeError ("GDRIVE_ROOT_FOLDER_ID not set." )
9711000 service = get_drive_service ()
@@ -975,33 +1004,31 @@ def property_detail(property_name, tab):
9751004 z = zipfile .ZipFile (io .BytesIO (data ))
9761005 uploaded = 0
9771006
978- # Upload allowed files only
9791007 with sqlite3 .connect (DB_NAME ) as conn :
9801008 c = conn .cursor ()
9811009 for info in z .infolist ():
9821010 if info .is_dir ():
9831011 continue
984- # we only want the basename to be the label/filename
9851012 base = os .path .basename (info .filename )
986- if not base :
987- continue
988- if not _ext_ok_for_tab (base , tab ):
1013+ if not base or not _ext_ok_for_tab (base , tab ):
9891014 continue
9901015 file_bytes = z .read (info )
9911016 fid = drive_upload_bytes (service , target_folder_id , base , file_bytes )
9921017 preview_url , download_url = _drive_urls (fid )
9931018 c .execute (
9941019 """
9951020 INSERT INTO uploads_log
996- (property, tab, filename, uploaded_at, storage, drive_id, preview_url, download_url)
997- VALUES (?, ?, ?, CURRENT_TIMESTAMP, 'drive', ?, ?, ?)
1021+ (property, tab, filename, uploaded_at,
1022+ storage, drive_id, preview_url, download_url)
1023+ VALUES (?, ?, ?, CURRENT_TIMESTAMP,
1024+ 'drive', ?, ?, ?)
9981025 ON CONFLICT(property, tab, filename)
9991026 DO UPDATE SET
10001027 uploaded_at = CURRENT_TIMESTAMP,
1001- storage = 'drive',
1002- drive_id = excluded.drive_id,
1028+ storage = 'drive',
1029+ drive_id = excluded.drive_id,
10031030 preview_url = excluded.preview_url,
1004- download_url = excluded.download_url
1031+ download_url= excluded.download_url
10051032 """ ,
10061033 (property_name , tab , base , fid , preview_url , download_url ),
10071034 )
@@ -1012,7 +1039,7 @@ def property_detail(property_name, tab):
10121039 except Exception as e :
10131040 upload_message = f"ZIP upload failed: { e } "
10141041
1015- # 4) Inline edit (source/description) – unchanged
1042+ # 4) Inline edit (source/description)
10161043 elif 'edit_row' in request .form :
10171044 row_filename = (request .form .get ('row_filename' ) or '' ).strip ()
10181045 new_desc = (request .form .get ('row_description' ) or '' ).strip ()
@@ -1039,7 +1066,6 @@ def property_detail(property_name, tab):
10391066 )
10401067 conn .commit ()
10411068 edit_message = f"Updated info for { row_filename } ."
1042-
10431069 except Exception as e :
10441070 upload_message = f"Error: { e } "
10451071
@@ -1049,13 +1075,14 @@ def property_detail(property_name, tab):
10491075 c = conn .cursor ()
10501076 c .execute (
10511077 """
1052- SELECT filename,
1053- COALESCE(source,'') AS source,
1054- COALESCE(description,'') AS description,
1055- uploaded_at,
1056- COALESCE(storage,'local') AS storage,
1057- preview_url,
1058- download_url
1078+ SELECT
1079+ filename,
1080+ COALESCE(source,'') AS source,
1081+ COALESCE(description,'') AS description,
1082+ uploaded_at,
1083+ COALESCE(storage,'local') AS storage,
1084+ COALESCE(preview_url,'') AS preview_url,
1085+ COALESCE(download_url,'') AS download_url
10591086 FROM uploads_log
10601087 WHERE property = ? AND tab = ?
10611088 ORDER BY uploaded_at DESC, filename
@@ -1068,7 +1095,8 @@ def property_detail(property_name, tab):
10681095 table_map = {}
10691096 if tab == 'dataset' :
10701097 for row in uploads :
1071- if (row ['storage' ] or 'local' ) != 'drive' :
1098+ storage_val = row ['storage' ] if 'storage' in row .keys () else 'local'
1099+ if storage_val != 'drive' :
10721100 fname = row ['filename' ]
10731101 if fname and (fname .endswith ('.csv' ) or fname .endswith ('.npy' )):
10741102 table_map [fname ] = file_to_table_name (fname )
@@ -1083,8 +1111,7 @@ def property_detail(property_name, tab):
10831111 edit_message = edit_message ,
10841112 admin = is_admin ,
10851113 table_map = table_map ,
1086- )
1087-
1114+ )
10881115#########################################################
10891116
10901117@app .route ('/uploads/<path:filename>' )
@@ -1095,7 +1122,6 @@ def uploaded_file(filename):
10951122 print ('File not found:' , full_path )
10961123 abort (404 )
10971124 return send_from_directory (app .config ['UPLOAD_FOLDER' ], filename )
1098-
10991125#########################################################
11001126
11011127@app .route ('/view_result/<property_name>/<tab>/<path:filename>' )
0 commit comments