diff --git a/ami/exports/all_captures.py b/ami/exports/all_captures.py new file mode 100644 index 000000000..7531c994d --- /dev/null +++ b/ami/exports/all_captures.py @@ -0,0 +1,62 @@ +import logging + +from django.db import models +from django.db.models.functions import TruncDate, TruncTime +from rest_framework import serializers + +from ami.main.models import SourceImage + +logger = logging.getLogger(__name__) + + +class CapturesTabularSerializer(serializers.Serializer): + capture_id = serializers.IntegerField(source="id") + latitude = serializers.FloatField() + longitude = serializers.FloatField() + datetime_observed = serializers.DateTimeField() + date_observed = serializers.DateField() + time_observed = serializers.TimeField() + session_id = serializers.IntegerField() + session_start_datetime = serializers.DateTimeField() + session_start_date = serializers.DateField() + session_start_time = serializers.TimeField() + session_end_datetime = serializers.DateTimeField() + session_end_date = serializers.DateField() + session_end_time = serializers.TimeField() + session_duration = serializers.DurationField() + station_name = serializers.CharField() + station_id = serializers.IntegerField() + device_id = serializers.IntegerField() + device_name = serializers.CharField() + detections_count = serializers.IntegerField(source="detections_count_fresh") + occurrences_count = serializers.IntegerField() + taxa_count = serializers.IntegerField() + + +def get_queryset(): + return ( + SourceImage.objects.all() + .annotate( + datetime_observed=models.F("timestamp"), + date_observed=TruncDate("timestamp"), + time_observed=TruncTime("timestamp"), + latitude=models.F("deployment__latitude"), + longitude=models.F("deployment__longitude"), + session_id=models.F("event_id"), + session_start_datetime=models.F("event__start"), + session_start_date=TruncDate("event__start"), + session_start_time=TruncTime("event__start"), + session_end_datetime=models.F("event__end"), + session_end_date=TruncDate("event__end"), + session_end_time=TruncTime("event__end"), + session_duration=models.F("event__end") - models.F("event__start"), + station_name=models.F("deployment__name"), + station_id=models.F("deployment_id"), + device_id=models.F("deployment__device_id"), + device_name=models.F("deployment__device__name"), + detections_count_fresh=models.Count("detections", distinct=True), + occurrences_count=models.Count("detections__occurrence", distinct=True), + taxa_count=models.Count("detections__occurrence__determination", distinct=True), + ) + .order_by("datetime_observed") + ) diff --git a/ami/exports/all_sessions.py b/ami/exports/all_sessions.py new file mode 100644 index 000000000..cc58c5c46 --- /dev/null +++ b/ami/exports/all_sessions.py @@ -0,0 +1,57 @@ +import logging + +from django.db import models +from django.db.models.functions import TruncDate, TruncTime +from rest_framework import serializers + +from ami.main.models import Event + +logger = logging.getLogger(__name__) + + +class SessionsTabularSerializer(serializers.Serializer): + session_id = serializers.IntegerField(source="id") + session_start_datetime = serializers.DateTimeField() + session_start_date = serializers.DateField() + session_start_time = serializers.TimeField() + session_end_datetime = serializers.DateTimeField() + session_end_date = serializers.DateField() + session_end_time = serializers.TimeField() + session_duration = serializers.DurationField() + latitude = serializers.FloatField() + longitude = serializers.FloatField() + station_name = serializers.CharField() + station_id = serializers.IntegerField() + device_id = serializers.IntegerField() + device_name = serializers.CharField() + captures_count = serializers.IntegerField(source="captures_count_fresh") + detections_count = serializers.IntegerField(source="detections_count_fresh") + occurrences_count = serializers.IntegerField() + taxa_count = serializers.IntegerField() + + +def get_queryset(): + return ( + Event.objects.all() + .annotate( + session_id=models.F("id"), + session_start_datetime=models.F("start"), + session_start_date=TruncDate("start"), + session_start_time=TruncTime("start"), + session_end_datetime=models.F("end"), + session_end_date=TruncDate("end"), + session_end_time=TruncTime("end"), + session_duration=models.F("end") - models.F("start"), + latitude=models.F("deployment__latitude"), + longitude=models.F("deployment__longitude"), + station_name=models.F("deployment__name"), + station_id=models.F("deployment_id"), + device_id=models.F("deployment__device_id"), + device_name=models.F("deployment__device__name"), + captures_count_fresh=models.Count("captures", distinct=True), + detections_count_fresh=models.Count("captures__detections", distinct=True), + occurrences_count_fresh=models.Count("captures__detections__occurrence", distinct=True), + taxa_count=models.Count("captures__detections__occurrence__determination", distinct=True), + ) + .order_by("session_start_datetime", "station_id") + ) diff --git a/ami/exports/apps.py b/ami/exports/apps.py new file mode 100644 index 000000000..d3d96b498 --- /dev/null +++ b/ami/exports/apps.py @@ -0,0 +1,7 @@ +from django.apps import AppConfig +from django.utils.translation import gettext_lazy as _ + + +class MainConfig(AppConfig): + name = "ami.exports" + verbose_name = _("Data Exports & Reports") diff --git a/ami/exports/base.py b/ami/exports/base.py new file mode 100644 index 000000000..df3690131 --- /dev/null +++ b/ami/exports/base.py @@ -0,0 +1,90 @@ +import csv +import logging +import typing + +from django.core.files.storage import default_storage +from django.db import models +from django.utils import timezone +from django.utils.text import slugify +from rest_framework import serializers +from rest_framework.views import APIView +from tqdm import tqdm + +logger = logging.getLogger(__name__) + + +class BaseExportSerializer(serializers.Serializer): + """ + Base serializer for exporting data in various formats, from multiple models. + """ + + pass + + +class BaseExportView(APIView): + """ + Read-only API view for exporting data in various formats, from multiple models. + """ + + pass + + +def get_data_in_batches( + QuerySet: models.QuerySet, + Serializer: type[serializers.Serializer], + batch_size: int = 1000, +) -> typing.Iterator[list[dict]]: + items = QuerySet.iterator(chunk_size=batch_size) + batch = [] + for i, item in enumerate(items): + # logger.info(f"Processing item {i}") + try: + # item_data = { + # "user_id": item.id, + # "username": item.username, + # "email": item.email, + # "total_orders": Order.objects.filter(user=item).count(), + # "total_spent": Order.objects.filter(user=item).aggregate(total=Sum("total_amount"))["total"] or 0, + # } + serializer = Serializer(item) + item_data = serializer.data + batch.append(item_data) + + if len(batch) >= batch_size: + yield batch + batch = [] + except Exception as e: + logger.warning(f"Error processing item {i}: {str(e)}") + raise + if batch: + yield batch + + +def write_export( + report_name: str, + Serializer: type[serializers.Serializer], + QuerySet: models.QuerySet, +) -> str: + timestamp = timezone.now().strftime("%Y%m%d-%H%M%S") + file_name = f"{slugify(report_name)}-{timestamp}.csv" + file_path = file_name + + try: + with default_storage.open(file_path, "w") as file: + writer = csv.writer(file) + writer.writerow(Serializer().fields.keys()) # Write header + + # Calculate total items for progress bar + total_items = QuerySet.count() + + with tqdm(total=total_items, desc="Exporting data", unit="items") as pbar: + for batch in get_data_in_batches(Serializer=Serializer, QuerySet=QuerySet): + for item in batch: + writer.writerow(item.values()) + pbar.update(1) + + logger.info(f"CSV export generated successfully: {file_path}") + return file_path + except Exception as e: + logger.error(f"Error generating CSV export: {str(e)}") + raise diff --git a/ami/exports/by_capture.py b/ami/exports/by_capture.py new file mode 100644 index 000000000..534ea3223 --- /dev/null +++ b/ami/exports/by_capture.py @@ -0,0 +1,103 @@ +import logging +import typing + +from django.contrib.postgres.aggregates import ArrayAgg +from django.db import models +from django.db.models.functions import TruncDate, TruncTime +from rest_framework import serializers + +from ami.main.models import Detection, Taxon, TaxonRank + +logger = logging.getLogger(__name__) + + +class DetectionsByDeterminationAndCaptureTabularSerializer(serializers.Serializer): + capture_id = serializers.IntegerField(source="source_image_id") + latitude = serializers.FloatField() + longitude = serializers.FloatField() + datetime_observed = serializers.DateTimeField() + date_observed = serializers.DateField() + time_observed = serializers.TimeField() + session_id = serializers.IntegerField() + session_start_datetime = serializers.DateTimeField() + session_start_date = serializers.DateField() + session_start_time = serializers.TimeField() + session_end_datetime = serializers.DateTimeField() + session_end_date = serializers.DateField() + session_end_time = serializers.TimeField() + session_duration = serializers.DurationField() + taxon_id = serializers.IntegerField() + taxon_name = serializers.CharField() + taxon_rank = serializers.CharField() + taxon_count = serializers.IntegerField() + determination_score_max = serializers.FloatField() + detection_ids = serializers.CharField() + occurrence_ids = serializers.CharField() + station_name = serializers.CharField() + station_id = serializers.IntegerField() + device_id = serializers.IntegerField() + device_name = serializers.CharField() + + def to_representation(self, instance: typing.Any) -> dict[str, typing.Any]: + data = super().to_representation(instance) + try: + taxon: Taxon = Taxon.objects.get(id=data["taxon_id"]) + except Taxon.DoesNotExist: + logger.warning(f"Taxon with ID '{data['taxon_id']}' not found") + pass + else: + for taxon_rank in taxon.parents_json: + field_name = f"taxon_{taxon_rank.rank.name.lower()}" + data[field_name] = taxon_rank.name + + return data + + def get_fields(self): + fields = super().get_fields() + for rank in TaxonRank: + field_name = f"taxon_{rank.name.lower()}" + fields[field_name] = serializers.CharField(required=False) + return fields + + +def get_queryset(): + return ( + Detection.objects.all() + .select_related( + "occurrence", + "occurrence__determination", + "source_image", + ) + .values( + "source_image_id", + "occurrence__determination_id", + ) + .annotate( + capture_id=models.F("source_image_id"), + datetime_observed=models.F("source_image__timestamp"), + date_observed=TruncDate("source_image__timestamp"), + time_observed=TruncTime("source_image__timestamp"), + latitude=models.F("source_image__deployment__latitude"), + longitude=models.F("source_image__deployment__longitude"), + session_id=models.F("source_image__event_id"), + session_start_datetime=models.F("source_image__event__start"), + session_start_date=TruncDate("source_image__event__start"), + session_start_time=TruncTime("source_image__event__start"), + session_end_datetime=models.F("source_image__event__end"), + session_end_date=TruncDate("source_image__event__end"), + session_end_time=TruncTime("source_image__event__end"), + session_duration=models.F("source_image__event__end") - models.F("source_image__event__start"), + station_name=models.F("source_image__deployment__name"), + station_id=models.F("source_image__deployment_id"), + taxon_id=models.F("occurrence__determination_id"), + taxon_name=models.F("occurrence__determination__name"), + taxon_rank=models.F("occurrence__determination__rank"), + determination_score_max=models.Max("occurrence__determination_score"), + taxon_count=models.Count("id"), + detection_ids=ArrayAgg("id"), + occurrence_ids=ArrayAgg("occurrence_id"), + device_id=models.F("source_image__deployment__device_id"), + device_name=models.F("source_image__deployment__device__name"), + ) + .order_by("source_image_id", "-taxon_count", "-determination_score_max") + ) diff --git a/ami/exports/by_detection.py b/ami/exports/by_detection.py new file mode 100644 index 000000000..c11796641 --- /dev/null +++ b/ami/exports/by_detection.py @@ -0,0 +1,91 @@ +import logging +import typing + +from django.db import models +from django.db.models.functions import TruncDate, TruncTime +from rest_framework import serializers + +from ami.main.models import Detection, Taxon, TaxonRank + +logger = logging.getLogger(__name__) + + +class DetectionsTabularSerializer(serializers.Serializer): + detection_id = serializers.IntegerField(source="id") + occurrence_id = serializers.IntegerField() + capture_id = serializers.IntegerField(source="source_image_id") + latitude = serializers.FloatField() + longitude = serializers.FloatField() + datetime_observed = serializers.DateTimeField() + date_observed = serializers.DateField() + time_observed = serializers.TimeField() + session_id = serializers.IntegerField() + session_start_datetime = serializers.DateTimeField() + session_start_date = serializers.DateField() + session_start_time = serializers.TimeField() + session_end_datetime = serializers.DateTimeField() + session_end_date = serializers.DateField() + session_end_time = serializers.TimeField() + session_duration = serializers.DurationField() + taxon_id = serializers.IntegerField() + taxon_name = serializers.CharField() + taxon_rank = serializers.CharField() + determination_score = serializers.FloatField() + station_name = serializers.CharField() + station_id = serializers.IntegerField() + device_id = serializers.IntegerField() + device_name = serializers.CharField() + + def to_representation(self, instance: typing.Any) -> dict[str, typing.Any]: + data = super().to_representation(instance) + taxon: Taxon = instance.occurrence.determination + + for taxon_rank in taxon.parents_json: + field_name = f"taxon_{taxon_rank.rank.name.lower()}" + data[field_name] = taxon_rank.name + + return data + + def get_fields(self): + fields = super().get_fields() + for rank in TaxonRank: + field_name = f"taxon_{rank.name.lower()}" + fields[field_name] = serializers.CharField(required=False) + return fields + + +def get_queryset(): + return ( + Detection.objects.all() + .select_related( + "occurrence", + "occurrence__determination", + "source_image", + ) + .annotate( + capture_id=models.F("source_image_id"), + datetime_observed=models.F("source_image__timestamp"), + date_observed=TruncDate("source_image__timestamp"), + time_observed=TruncTime("source_image__timestamp"), + latitude=models.F("source_image__deployment__latitude"), + longitude=models.F("source_image__deployment__longitude"), + session_id=models.F("source_image__event_id"), + session_start_datetime=models.F("source_image__event__start"), + session_start_date=TruncDate("source_image__event__start"), + session_start_time=TruncTime("source_image__event__start"), + session_end_datetime=models.F("source_image__event__end"), + session_end_date=TruncDate("source_image__event__end"), + session_end_time=TruncTime("source_image__event__end"), + session_duration=models.F("source_image__event__end") - models.F("source_image__event__start"), + station_name=models.F("source_image__deployment__name"), + station_id=models.F("source_image__deployment_id"), + taxon_id=models.F("occurrence__determination_id"), + taxon_name=models.F("occurrence__determination__name"), + taxon_rank=models.F("occurrence__determination__rank"), + determination_score=models.F("occurrence__determination_score"), + taxon_count=models.Count("id"), + device_id=models.F("source_image__deployment__device_id"), + device_name=models.F("source_image__deployment__device__name"), + ) + .order_by("source_image_id", "-determination_score") + ) diff --git a/ami/exports/management/commands/export_by_capture.py b/ami/exports/management/commands/export_by_capture.py new file mode 100644 index 000000000..cf5b1c757 --- /dev/null +++ b/ami/exports/management/commands/export_by_capture.py @@ -0,0 +1,54 @@ +""" +Management command that runs the export_by_capture function in exports.py and reports the progress as it processes and +writes batches. +""" + +import logging +import typing + +from django.core.management.base import BaseCommand +from django.db import models + +from ami.exports import by_capture +from ami.exports.base import write_export + +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + help = "Export data by capture" + + def add_arguments(self, parser) -> None: + parser.add_argument( + "--project-id", + type=int, + required=True, + help="Project ID to export data from", + ) + parser.add_argument( + "--collection-ids", + type=int, + nargs="+", + required=False, + default=[], + help="Collection IDs to export data from (space-separated list)", + ) + + def handle(self, *args, **options) -> None: + project_id: int = options["project_id"] + collection_ids: list[int] = options["collection_ids"] + + qs = by_capture.get_queryset().filter(occurrence__project=project_id) + if collection_ids: + qs = qs.filter(source_image__collections__in=collection_ids) + + fname = write_export( + "detections_by_determination_and_capture", + Serializer=by_capture.DetectionsByDeterminationAndCaptureTabularSerializer, + QuerySet=typing.cast(models.QuerySet, qs), + ) + # get full path to the file + print(f"Exported to {fname}") + + logger.info("Export by capture completed") + self.stdout.write(self.style.SUCCESS("Export by capture completed")) diff --git a/ami/exports/management/commands/export_by_detection.py b/ami/exports/management/commands/export_by_detection.py new file mode 100644 index 000000000..7fb36a46a --- /dev/null +++ b/ami/exports/management/commands/export_by_detection.py @@ -0,0 +1,55 @@ +""" +Management command that runs the export_by_capture function in exports.py and reports the progress as it processes and +writes batches. +""" + +import logging + +from django.core.management.base import BaseCommand + +from ami.exports import by_detection +from ami.exports.base import write_export + +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + help = "Export data by detection and determination" + + def add_arguments(self, parser) -> None: + parser.add_argument( + "--project-id", + type=int, + required=True, + help="Project ID to export data from", + ) + parser.add_argument( + "--collection-ids", + type=int, + nargs="+", + required=False, + default=[], + help="Collection IDs to export data from (space-separated list)", + ) + + def handle(self, *args, **options): + # for i, batch in enumerate(by_capture.get_data_in_batches()) + # # print(f"Processing batch {batch}") + # print(f"Processing batch {i}") + project_id: int = options["project_id"] + collection_ids: list[int] = options["collection_ids"] + + qs = by_detection.get_queryset().filter(occurrence__project=project_id) + if collection_ids: + qs = qs.filter(source_image__collections__in=collection_ids) + + fname = write_export( + "detections", + Serializer=by_detection.DetectionsTabularSerializer, + QuerySet=qs, + ) + # get full path to the file + print(f"Exported to {fname}") + + logger.info("Export by detection completed") + self.stdout.write(self.style.SUCCESS("Export by detection completed")) diff --git a/ami/exports/management/commands/export_captures.py b/ami/exports/management/commands/export_captures.py new file mode 100644 index 000000000..6d2391ad0 --- /dev/null +++ b/ami/exports/management/commands/export_captures.py @@ -0,0 +1,55 @@ +""" +Management command that runs the export_by_capture function in exports.py and reports the progress as it processes and +writes batches. +""" + +import logging + +from django.core.management.base import BaseCommand + +from ami.exports import all_captures +from ami.exports.base import write_export + +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + help = "Export data by capture" + + def add_arguments(self, parser) -> None: + parser.add_argument( + "--project-id", + type=int, + required=True, + help="Project ID to export data from", + ) + parser.add_argument( + "--collection-ids", + type=int, + nargs="+", + required=False, + default=[], + help="Collection IDs to export data from (space-separated list)", + ) + + def handle(self, *args, **options): + # for i, batch in enumerate(by_capture.get_data_in_batches()) + # # print(f"Processing batch {batch}") + # print(f"Processing batch {i}") + project_id: int = options["project_id"] + collection_ids: list[int] = options["collection_ids"] + + qs = all_captures.get_queryset().filter(project=project_id) + if collection_ids: + qs = qs.filter(collections__in=collection_ids) + + fname = write_export( + "captures", + Serializer=all_captures.CapturesTabularSerializer, + QuerySet=qs, + ) + # get full path to the file + print(f"Exported to {fname}") + + logger.info("Export by capture completed") + self.stdout.write(self.style.SUCCESS("Export by capture completed")) diff --git a/ami/exports/management/commands/export_sessions.py b/ami/exports/management/commands/export_sessions.py new file mode 100644 index 000000000..b3f33dba9 --- /dev/null +++ b/ami/exports/management/commands/export_sessions.py @@ -0,0 +1,42 @@ +""" +Management command that runs the export_by_capture function in exports.py and reports the progress as it processes and +writes batches. +""" + +import logging + +from django.core.management.base import BaseCommand, CommandParser + +from ami.exports import all_sessions +from ami.exports.base import write_export + +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + help = "Export data by capture" + + def add_arguments(self, parser: CommandParser) -> None: + parser.add_argument( + "--project-id", + type=int, + required=True, + help="Project ID to export data from", + ) + + def handle(self, *args, **options): + # for i, batch in enumerate(by_capture.get_data_in_batches()) + # # print(f"Processing batch {batch}") + # print(f"Processing batch {i}") + project_id: int = options["project_id"] + + fname = write_export( + "sessions", + Serializer=all_sessions.SessionsTabularSerializer, + QuerySet=all_sessions.get_queryset().filter(project=project_id), + ) + # get full path to the file + print(f"Exported to {fname}") + + logger.info("Export by capture completed") + self.stdout.write(self.style.SUCCESS("Export by capture completed")) diff --git a/ami/exports/models.py b/ami/exports/models.py new file mode 100644 index 000000000..e69de29bb diff --git a/ami/main/api/views.py b/ami/main/api/views.py index 312e7cc14..3ea97cbcd 100644 --- a/ami/main/api/views.py +++ b/ami/main/api/views.py @@ -631,6 +631,8 @@ class SourceImageCollectionViewSet(DefaultViewSet, ProjectMixin): SourceImageCollection.objects.all() .with_source_images_count() # type: ignore .with_source_images_with_detections_count() + .with_occurrences_count() + .with_taxa_count() .prefetch_related("jobs") ) serializer_class = SourceImageCollectionSerializer diff --git a/ami/main/management/commands/export_occurrences.py b/ami/main/management/commands/export_occurrences.py new file mode 100644 index 000000000..ad3b2ea95 --- /dev/null +++ b/ami/main/management/commands/export_occurrences.py @@ -0,0 +1,94 @@ +import json +import logging +from typing import Any + +from django.core.management.base import BaseCommand, CommandError +from django.db.models import Q +from django.test.client import RequestFactory +from tqdm import tqdm + +from ami.main.api.serializers import OccurrenceSerializer +from ami.main.models import Deployment, Occurrence, Project + +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) + + +class OccurrenceExportSerializer(OccurrenceSerializer): + class Meta(OccurrenceSerializer.Meta): + extra_kwargs = { + "url": {"view_name": "api:occurrence-detail"}, + } + + def to_representation(self, instance): + representation = super().to_representation(instance) + for field in self.fields: + try: + logger.debug(f"Processing field: {field}") + representation[field] = self.fields[field].to_representation(getattr(instance, field)) + except Exception as e: + logger.error(f"Error processing field {field}: {e}") + raise e + return representation + + +class Command(BaseCommand): + help = "Export Occurrence model instances to JSON" + + def add_arguments(self, parser): + parser.add_argument("--project", type=int, help="Filter by project ID") + parser.add_argument("--deployment", type=int, help="Filter by deployment ID") + parser.add_argument("--start-date", type=str, help="Filter by start date (YYYY-MM-DD)") + parser.add_argument("--end-date", type=str, help="Filter by end date (YYYY-MM-DD)") + parser.add_argument("--output", type=str, default="occurrences_export.json", help="Output file name") + parser.add_argument("--limit", type=int, default=10, help="Limit the number of occurrences to export") + parser.add_argument( + "--base-url", type=str, default="http://example.com", help="Base URL for hyperlinked fields" + ) + + def handle(self, *args: Any, **options: Any) -> None: + queryset = Occurrence.objects.all() + + if options["project"]: + try: + project = Project.objects.get(pk=options["project"]) + queryset = queryset.filter(project=project) + except Project.DoesNotExist: + raise CommandError(f"Project with ID {options['project']} does not exist") + + if options["deployment"]: + try: + deployment = Deployment.objects.get(pk=options["deployment"]) + queryset = queryset.filter(deployment=deployment) + except Deployment.DoesNotExist: + raise CommandError(f"Deployment with ID {options['deployment']} does not exist") + + date_filter = Q() + if options["start_date"]: + date_filter &= Q(event__start__gte=options["start_date"]) + if options["end_date"]: + date_filter &= Q(event__start__lte=options["end_date"]) + queryset = queryset.filter(date_filter) + + limit = options["limit"] + queryset = queryset[:limit] + total_occurrences = queryset.count() + self.stdout.write(f"Exporting up to {limit} occurrences...") + + serialized_data = [] + + # Create a fake request for the serializer context + factory = RequestFactory() + fake_request = factory.get("/") + fake_request.META["HTTP_HOST"] = options["base_url"] + + for occurrence in tqdm(queryset, total=total_occurrences, desc="Exporting occurrences"): + serializer = OccurrenceExportSerializer(occurrence, context={"request": fake_request}) + serialized_data.append(serializer.data) + + with open(options["output"], "w") as f: + json.dump(serialized_data, f, indent=2) + + self.stdout.write( + self.style.SUCCESS(f"Successfully exported {total_occurrences} occurrences to {options['output']}") + ) diff --git a/ami/main/models.py b/ami/main/models.py index 7432e13c3..c2f305728 100644 --- a/ami/main/models.py +++ b/ami/main/models.py @@ -3004,25 +3004,19 @@ def with_source_images_with_detections_count(self): ) ) - def with_occurrences_count(self, classification_threshold: float = 0): + def with_occurrences_count(self): return self.annotate( occurrences_count=models.Count( "images__detections__occurrence", - filter=models.Q( - images__detections__occurrence__determination_score__gte=classification_threshold, - ), distinct=True, ) ) - def with_taxa_count(self, classification_threshold: float = 0): + def with_taxa_count(self): return self.annotate( taxa_count=models.Count( "images__detections__occurrence__determination", distinct=True, - filter=models.Q( - images__detections__occurrence__determination_score__gte=classification_threshold, - ), ) ) diff --git a/config/settings/base.py b/config/settings/base.py index 7a6a522c5..68ea01cbe 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -101,6 +101,7 @@ "ami.jobs", "ami.ml", "ami.labelstudio", + "ami.exports", ] # https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS @@ -378,8 +379,8 @@ CSRF_TRUSTED_ORIGINS = env.list( "DJANGO_CSRF_TRUSTED_ORIGINS", default=[ - "https://api.dev.insectai.org", - "http://api.dev.insectai.org", + "http://localhost:4000/", + "http://localhost:8000/", EXTERNAL_BASE_URL, ], # type: ignore[no-untyped-call] ) diff --git a/ui/src/pages/overview/collections/collection-columns.tsx b/ui/src/pages/overview/collections/collection-columns.tsx index 48c1c62bc..56f42a60f 100644 --- a/ui/src/pages/overview/collections/collection-columns.tsx +++ b/ui/src/pages/overview/collections/collection-columns.tsx @@ -51,7 +51,43 @@ export const columns: (projectId: string) => TableColumn[] = ( textAlign: TextAlign.Right, }, renderCell: (item: Collection) => ( - + + ), + }, + { + id: 'occurrences', + name: translate(STRING.FIELD_LABEL_OCCURRENCES), + sortField: 'occurrences_count', + styles: { + textAlign: TextAlign.Right, + }, + renderCell: (item: Collection) => ( + + + + ), + }, + { + id: 'taxa', + name: translate(STRING.FIELD_LABEL_SPECIES), + sortField: 'taxa_count', + styles: { + textAlign: TextAlign.Right, + }, + renderCell: (item: Collection) => ( + + + ), }, { diff --git a/ui/src/utils/useFilters.ts b/ui/src/utils/useFilters.ts index b7bb2a2b5..52ea1b94e 100644 --- a/ui/src/utils/useFilters.ts +++ b/ui/src/utils/useFilters.ts @@ -104,8 +104,8 @@ export const AVAILABLE_FILTERS: { field: 'verified', }, { - label: 'Verified by', - field: 'verified_by_me', + label: 'Capture collection', + field: 'collection', }, ]