|
2 | 2 | logger = logging.getLogger(__name__) |
3 | 3 |
|
4 | 4 | from django.contrib import admin, messages |
| 5 | +from django.utils.html import format_html |
| 6 | +from django.conf import settings |
| 7 | +from django.core.mail import send_mail |
5 | 8 | from leaflet.admin import LeafletGeoAdmin |
6 | 9 | from publications.models import Publication, Source, HarvestingEvent, BlockedEmail, BlockedDomain, GlobalRegion |
7 | 10 | from import_export.admin import ImportExportModelAdmin |
|
12 | 15 | from publications.models import CustomUser |
13 | 16 | from publications.tasks import regenerate_geojson_cache |
14 | 17 | from publications.tasks import regenerate_geopackage_cache |
| 18 | +from django.test import Client |
| 19 | +from django.http import HttpResponse |
15 | 20 |
|
16 | 21 | @admin.action(description="Mark selected publications as published") |
17 | 22 | def make_public(modeladmin, request, queryset): |
@@ -133,30 +138,68 @@ def regenerate_all_exports(modeladmin, request, queryset): |
133 | 138 | messages.success(request, "GeoJSON & GeoPackage caches were regenerated.") |
134 | 139 | except Exception as e: |
135 | 140 | messages.error(request, f"Error during export regeneration: {e}") |
136 | | - |
| 141 | + |
137 | 142 | @admin.register(Publication) |
138 | 143 | class PublicationAdmin(LeafletGeoAdmin, ImportExportModelAdmin): |
139 | | - """Publication Admin.""" |
140 | | - list_display = ("title", "doi", "creationDate", "lastUpdate", "created_by", "updated_by", "status", "provenance", "source") |
141 | | - search_fields = ("title", "doi", "abstract", "source") |
142 | | - list_filter = ("status", "creationDate") |
143 | | - actions = [make_public, make_draft, regenerate_all_exports] |
144 | | - |
145 | | - fields = ( |
146 | | - "title", "doi", "status", "source", "abstract", |
147 | | - "geometry", "timeperiod_startdate", "timeperiod_enddate", |
148 | | - "created_by", "updated_by", "provenance" |
149 | | - ) |
| 144 | + list_display = ("title", "doi", "has_permalink", "permalink_link", |
| 145 | + "creationDate", "lastUpdate", "created_by", "updated_by", |
| 146 | + "status", "provenance", "source") |
| 147 | + search_fields = ("title", "doi", "abstract", "source__name") |
| 148 | + list_filter = ("status", "creationDate") |
| 149 | + fields = ("title", "doi", "status", "source", "abstract", |
| 150 | + "geometry", "timeperiod_startdate", "timeperiod_enddate", |
| 151 | + "created_by", "updated_by", "provenance") |
150 | 152 | readonly_fields = ("created_by", "updated_by") |
151 | | - |
| 153 | + actions = ["make_public", "make_draft", "regenerate_all_exports", |
| 154 | + "export_permalinks_csv", "email_permalinks_preview"] |
| 155 | + |
| 156 | + @admin.display(boolean=True, description="Has DOI") |
| 157 | + def has_permalink(self, obj): |
| 158 | + return bool(obj.doi) |
| 159 | + |
| 160 | + @admin.display(description="Permalink") |
| 161 | + def permalink_link(self, obj): |
| 162 | + url = obj.permalink() |
| 163 | + return format_html('<a href="{}" target="_blank">{}</a>', url, url) if url else "—" |
| 164 | + |
| 165 | + def export_permalinks_csv(self, request, queryset): |
| 166 | + rows = [("title", "doi", "permalink")] |
| 167 | + rows += [(p.title or "", p.doi, p.permalink() or "") |
| 168 | + for p in queryset.only("title", "doi") if p.doi] |
| 169 | + if len(rows) == 1: |
| 170 | + self.message_user(request, "No items with DOI in selection.", level=messages.WARNING) |
| 171 | + return |
| 172 | + #esc = lambda v: f"\"{(v or '').replace('\"','\"\"')}\"" |
| 173 | + escape_row = lambda v: '"{}"'.format((v or '').replace('"', '""')) |
| 174 | + csv = "\n".join(",".join(map(escape_row, r)) for r in rows) |
| 175 | + resp = HttpResponse(csv, content_type="text/csv; charset=utf-8") |
| 176 | + resp["Content-Disposition"] = 'attachment; filename="publication_permalinks.csv"' |
| 177 | + return resp |
| 178 | + export_permalinks_csv.short_description = "Export permalinks (CSV)" |
| 179 | + |
| 180 | + def email_permalinks_preview(self, request, queryset): |
| 181 | + base = settings.BASE_URL.rstrip("/") |
| 182 | + c = Client() |
| 183 | + lines, ok, bad = [], 0, 0 |
| 184 | + for p in queryset.only("title", "doi"): |
| 185 | + if not p.doi: |
| 186 | + continue |
| 187 | + url = p.permalink() |
| 188 | + path = url[len(base):] if url and url.startswith(base) else url |
| 189 | + status = c.get(path).status_code |
| 190 | + ok += (status == 200); bad += (status != 200) |
| 191 | + lines.append(f"{'✅' if status == 200 else '❌'} {p.title} — {url} (HTTP {status})") |
| 192 | + if not lines: |
| 193 | + self.message_user(request, "No items with DOI in selection.", level=messages.WARNING) |
| 194 | + return |
| 195 | + send_mail( |
| 196 | + "OPTIMAP — Permalink preview", |
| 197 | + "Selected publication permalinks:\n\n" + "\n".join(lines) + f"\n\nSummary: {ok} OK, {bad} not OK", |
| 198 | + settings.EMAIL_HOST_USER, [request.user.email] |
| 199 | + ) |
| 200 | + self.message_user(request, f"Emailed preview to {request.user.email}.", level=messages.INFO) |
| 201 | + email_permalinks_preview.short_description = "Email permalinks preview to me" |
152 | 202 |
|
153 | | -@admin.register(Source) |
154 | | -class SourceAdmin(admin.ModelAdmin): |
155 | | - list_display = ("id", "url_field", "harvest_interval_minutes", "last_harvest", "collection_name", "tags") |
156 | | - list_filter = ("harvest_interval_minutes", "collection_name") |
157 | | - search_fields = ("url_field", "collection_name", "tags") |
158 | | - actions = [trigger_harvesting_for_specific, trigger_harvesting_for_all, schedule_harvesting] |
159 | | - |
160 | 203 | @admin.register(HarvestingEvent) |
161 | 204 | class HarvestingEventAdmin(admin.ModelAdmin): |
162 | 205 | list_display = ("id", "source", "status", "started_at", "completed_at") |
|
0 commit comments