Skip to content

Commit ccfd787

Browse files
author
SM_SAYEED
committed
upload uniqueness during upload asserted
1 parent 1d4e51a commit ccfd787

File tree

1 file changed

+100
-0
lines changed

1 file changed

+100
-0
lines changed

app.py

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -334,6 +334,106 @@ def admin_home():
334334
uploads=uploads,
335335
music_clips=music_clips
336336
)
337+
338+
#########################################################
339+
340+
@app.route("/admin/fix_uploads_uniqueness", methods=["GET", "POST"])
341+
def fix_uploads_uniqueness():
342+
if not session.get("admin"):
343+
abort(403)
344+
345+
stats = {}
346+
try:
347+
# The schema helper may fail while dups exist; ignore and continue.
348+
try:
349+
ensure_uploads_log_schema()
350+
except Exception as e:
351+
app.logger.warning("ensure_uploads_log_schema raised (continuing): %s", e)
352+
353+
with sqlite3.connect(DB_NAME) as conn:
354+
c = conn.cursor()
355+
356+
rows_before = c.execute("SELECT COUNT(*) FROM uploads_log").fetchone()[0]
357+
dup_groups_before = c.execute("""
358+
SELECT COUNT(*)
359+
FROM (
360+
SELECT property, tab, filename, COUNT(*) c
361+
FROM uploads_log
362+
GROUP BY property, tab, filename
363+
HAVING c > 1
364+
)
365+
""").fetchone()[0]
366+
367+
deleted = 0
368+
used_window = False
369+
370+
if dup_groups_before > 0:
371+
# Prefer window-function method (keeps newest by uploaded_at, then highest rowid)
372+
try:
373+
c.execute("""
374+
WITH ranked AS (
375+
SELECT rowid,
376+
property, tab, filename,
377+
COALESCE(uploaded_at, '') AS ts,
378+
ROW_NUMBER() OVER (
379+
PARTITION BY property, tab, filename
380+
ORDER BY ts DESC, rowid DESC
381+
) AS rn
382+
FROM uploads_log
383+
)
384+
DELETE FROM uploads_log
385+
WHERE rowid IN (SELECT rowid FROM ranked WHERE rn > 1);
386+
""")
387+
used_window = True
388+
deleted = conn.total_changes
389+
except sqlite3.OperationalError:
390+
# Fallback for older SQLite (no window functions):
391+
# keep the earliest row per key (good enough to enforce uniqueness)
392+
c.execute("""
393+
DELETE FROM uploads_log
394+
WHERE rowid NOT IN (
395+
SELECT MIN(rowid)
396+
FROM uploads_log
397+
GROUP BY property, tab, filename
398+
);
399+
""")
400+
deleted = conn.total_changes
401+
402+
# Now enforce uniqueness with an index.
403+
# If dups remain for any reason, this will raise; we’ll report below.
404+
try:
405+
c.execute("""
406+
CREATE UNIQUE INDEX IF NOT EXISTS idx_uploads_unique
407+
ON uploads_log(property, tab, filename)
408+
""")
409+
except sqlite3.OperationalError as e:
410+
app.logger.warning("creating unique index failed: %s", e)
411+
412+
conn.commit()
413+
414+
rows_after = c.execute("SELECT COUNT(*) FROM uploads_log").fetchone()[0]
415+
dup_groups_after = c.execute("""
416+
SELECT COUNT(*)
417+
FROM (
418+
SELECT property, tab, filename, COUNT(*) c
419+
FROM uploads_log
420+
GROUP BY property, tab, filename
421+
HAVING c > 1
422+
)
423+
""").fetchone()[0]
424+
425+
stats.update({
426+
"rows_before": rows_before,
427+
"duplicate_groups_before": dup_groups_before,
428+
"deleted_rows": deleted,
429+
"used_window_delete": used_window,
430+
"rows_after": rows_after,
431+
"duplicate_groups_after": dup_groups_after,
432+
"status": "ok" if dup_groups_after == 0 else "still_has_duplicates"
433+
})
434+
return jsonify(stats)
435+
except Exception as e:
436+
return jsonify({"status": "error", "error": str(e)}), 500
337437

338438
#########################################################
339439

0 commit comments

Comments
 (0)