Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions apps/common/admin.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import typing
from datetime import datetime

from django.contrib import admin
from django.db import models
from django.http import HttpRequest
from django.utils import timezone
from djangoql.admin import DjangoQLSearchMixin # type: ignore[reportMissingTypeStubs]

from apps.common.firebase.push import FirebaseAnnouncementPush
Expand Down Expand Up @@ -127,7 +127,7 @@ def save_model(self, request, obj, form, change): # type: ignore[reportMissingP
obj.modified_by = request.user
if obj.is_archived:
obj.archived_by = request.user
obj.archived_at = datetime.now()
obj.archived_at = timezone.now()
else:
obj.archived_by = None
obj.archived_at = None
Expand Down
3 changes: 2 additions & 1 deletion apps/common/firebase/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,8 @@ class RelaxedModel(self.firebase_model_class):
model_obj.update_firebase_push_status(FirebasePushStatusEnum.FAILED)
except Exception:
logger.error(
"Firebase push error: Unexpected error occurred",
"Firebase push error (%s): Unexpected error occurred",
f"{self.firebase_model_class.__module__}.{self.firebase_model_class.__qualname__}",
extra={"id": model_obj.pk},
exc_info=True,
)
Expand Down
9 changes: 7 additions & 2 deletions apps/contributor/firebase/pull.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import logging

from django.db import connection, transaction
from django.utils import timezone
from pyfirebase_mapswipe import extended_models as firebase_ext_models
from pyfirebase_mapswipe import models as firebase_models

Expand Down Expand Up @@ -37,13 +38,17 @@ def pull_users_from_firebase():

users_to_pull = list[ContributorUser]()
for key, valid_user in valid_users:
username = valid_user.username
# XXX: For OSM users, firebase doesn't include username
if username in ["", None, firebase_models.UNDEFINED]:
username = key
user = ContributorUser(
firebase_id=key,
username=valid_user.username or key, # XXX: For OSM users, firebase doesn't include username
username=username,
created_at=valid_user.created,
modified_at=valid_user.created,
# NOTE: Setting firebase_last_pushed so that we can send updates to firebase.
firebase_last_pushed=datetime.datetime.now(),
firebase_last_pushed=timezone.now(),
firebase_push_status=FirebasePushStatusEnum.SUCCESS,
)
users_to_pull.append(user)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -574,7 +574,8 @@ def create_project(
requesting_organization=get_organization_by_name(requesting_organization, bot_user),
created_by_id=get_user_by_contributor_user_firebase_id(existing_project.created_by, fallback=bot_user),
modified_by_id=get_user_by_contributor_user_firebase_id(existing_project.created_by, fallback=bot_user),
project_type_specifics=existing_project.project_type_specifics,
# This was modified in the database manually for some projects
# project_type_specifics=existing_project.project_type_specifics,
description=existing_project.project_details.strip() if existing_project.project_details else "",
)

Expand Down
80 changes: 65 additions & 15 deletions apps/mapping/firebase/utils.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import csv
import logging
import typing
from pathlib import Path

import dateutil.parser
from django.db import connection, transaction
Expand Down Expand Up @@ -322,6 +324,68 @@ def _cleanup(_cursor: "CursorWrapper"):
_cleanup(cursor_)


def process_invalid_temp_results(
firebase_cleanup: FirebaseCleanup,
):
base_qs = MappingSessionResultTemp.objects.filter(is_firebase_mapping_valid=False)

invalid_results_count = base_qs.count()
if invalid_results_count == 0:
return

logger.warning("%s results has been flagged as invalid", invalid_results_count)

# Add unsynced user to firebase, which will be processed by the user sync task
invalid_user_firebase_ids = (
base_qs.filter(contributor_user_id__isnull=True).values_list("contributor_user_firebase_id", flat=True).distinct()
)
for invalid_user_firebase_id in invalid_user_firebase_ids:
logger.warning(
"Adding %s to the firebase user update %s",
invalid_user_firebase_id,
Config.FirebaseKeys.contributor_user_updates(),
)
Config.FIREBASE_HELPER.ref(
Config.FirebaseKeys.contributor_user_update(invalid_user_firebase_id),
).set(True)

# Skip firebase cleanup for invalid mapping data
invalid_result_temp_qs = base_qs.values_list(
"project_firebase_id",
"group_firebase_id",
"contributor_user_firebase_id",
).distinct()

for project_firebase_id, group_firebase_id, contributor_user_firebase_id in invalid_result_temp_qs:
firebase_cleanup.undo_mark_as_delete(
project_firebase_id=project_firebase_id,
group_firebase_id=group_firebase_id,
contributor_user_firebase_id=contributor_user_firebase_id,
)

try:
# NOTE: For debugging, store the latest invalid dataset to internal directory
with Path.open(
Config.InternalDir.LAST_RUN_MAPPING_SESSION_INVALID_DATA,
"w",
newline="",
encoding="utf-8",
) as f:
fields = [f.name for f in MappingSessionResultTemp._meta.fields]

writer = csv.DictWriter(f, fieldnames=fields)
writer.writeheader()

for row in base_qs.values(*fields).iterator(chunk_size=2000):
writer.writerow(row)
logger.info("Stored invalid mapping data to %s", Config.InternalDir.LAST_RUN_MAPPING_SESSION_INVALID_DATA)
except Exception:
logger.error(
"Failed to generate mapping session invalid data export to internal directory",
exc_info=True,
)


def transfer_results_from_temp_tables(
firebase_cleanup: FirebaseCleanup,
):
Expand All @@ -337,21 +401,7 @@ def transfer_results_from_temp_tables(
cursor.execute(SQL_QUERY_TO_TRANSFER_TEMP_TABLE_DATA_TO_MAPPING_SESSION_USER_GROUP)
logger.info("Transferred staging results to real tables")

invalid_result_temp_qs = (
MappingSessionResultTemp.objects.filter(is_firebase_mapping_valid=False)
.values_list(
"project_firebase_id",
"group_firebase_id",
"contributor_user_firebase_id",
)
.distinct()
)
for project_firebase_id, group_firebase_id, contributor_user_firebase_id in invalid_result_temp_qs:
firebase_cleanup.undo_mark_as_delete(
project_firebase_id=project_firebase_id,
group_firebase_id=group_firebase_id,
contributor_user_firebase_id=contributor_user_firebase_id,
)
process_invalid_temp_results(firebase_cleanup)

cleanup_temp_tables(cursor)

Expand Down
25 changes: 21 additions & 4 deletions apps/project/exports/overall_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import typing
from pathlib import Path

from django.contrib.gis.db.models.functions import AsWKT
from django.core.files.base import ContentFile
from django.db import models
from django.db.models.fields.files import FieldFile
Expand Down Expand Up @@ -43,7 +44,7 @@ def regenerate_project_stats_by_types_csv():
"project_type": None,
"project_type_display": MANUAL_FIELD,
"projects_count": models.Count("*"),
"total_area_sqkm": models.F("total_area"),
"total_area_sqkm": models.Sum("total_area"),
"total_number_of_results": models.Count("number_of_results"),
"total_number_of_results_progress": models.Count("number_of_results_for_progress"),
"average_number_of_users_per_project": models.Avg("number_of_contributor_users"),
Expand Down Expand Up @@ -75,8 +76,10 @@ def regenerate_project_stats_by_types_csv():

def regenerate_projects_csv(temp_projects_csv: typing.IO): # type: ignore[reportMissingTypeArgument]
logger.info("Processing regenerate_projects_csv")

fieldnames = {
"id": None,
"firebase_id": None,
"name": Project.generate_name_query(),
"description": None,
"look_for": None,
Expand All @@ -90,8 +93,10 @@ def regenerate_projects_csv(temp_projects_csv: typing.IO): # type: ignore[repor
"status": None,
"status_display": MANUAL_FIELD,
"area_sqkm": models.F("aoi_geometry__total_area"),
"centroid": None, # TODO: use this after removing from model models.F("aoi_geometry__centroid"),
"geom": models.F("aoi_geometry__geometry"),
# TODO: Change _centroid to centroid after `centroid` field is removed from the project's table
"centroid": MANUAL_FIELD,
"_centroid": AsWKT("aoi_geometry__centroid"),
"geom": AsWKT("aoi_geometry__geometry"),
"progress": None, # NOTE: This is changed to float later
"number_of_contributor_users": None,
"number_of_results": None,
Expand All @@ -100,6 +105,9 @@ def regenerate_projects_csv(temp_projects_csv: typing.IO): # type: ignore[repor
}

projects_aggregate_qs = _project_queryset(fieldnames)

fieldnames.pop("_centroid")

writer = csv.DictWriter(temp_projects_csv, fieldnames=fieldnames)
writer.writeheader()

Expand All @@ -113,6 +121,8 @@ def regenerate_projects_csv(temp_projects_csv: typing.IO): # type: ignore[repor
name=image_file,
),
)
# TODO: Remove this logic to set centroid after `centroid` field is removed from the project's table
data["centroid"] = data.pop("_centroid")
data["image_url"] = image_file_url
data["status_display"] = ProjectStatusEnum(data["status"]).label
data["project_type_display"] = ProjectTypeEnum(data["project_type"]).label
Expand Down Expand Up @@ -144,6 +154,13 @@ def _regenerate_projects_centroid_for_geometry_field(
tmp_geojson_outfile = Config.TEMP_DIR / f"projects_centroid_{geometry_field}_{get_random_string(6)}.geojson"
inputfile_without_path = projects_csv_inputfile.name.split("/")[-1].replace(".csv", "")

# TODO: Use EXCLUDE after upgrading gdal to > 3.9.0 https://github.com/OSGeo/gdal/pull/8675
# With that, we can use `SELECT * EXCLUDE(geom), CAST(...` to exclude one column
with Path.open(projects_csv_inputfile, "r") as fp:
csv_reader = csv.DictReader(fp)
inputfile_columns = [column for column in csv_reader.fieldnames or [] if column != "geom"]
inputfile_columns_str = ",".join(inputfile_columns)

subprocess.run( # noqa: S603
[
"/usr/bin/ogr2ogr",
Expand All @@ -154,7 +171,7 @@ def _regenerate_projects_centroid_for_geometry_field(
str(tmp_geojson_outfile),
str(projects_csv_inputfile),
"-sql",
f'SELECT *, CAST({geometry_field} as geometry) FROM "{inputfile_without_path}"',
f'SELECT {inputfile_columns_str}, CAST({geometry_field} as geometry) FROM "{inputfile_without_path}"',
],
check=True,
)
Expand Down
2 changes: 1 addition & 1 deletion firebase
Submodule firebase updated 0 files
9 changes: 8 additions & 1 deletion main/config.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import os
import typing
from dataclasses import dataclass
from pathlib import Path

from django.conf import settings

if typing.TYPE_CHECKING:
from pathlib import Path
from urllib.parse import ParseResult as URLParseResult

from utils.firebase import FirebaseHelper
Expand Down Expand Up @@ -58,6 +58,11 @@ class Config:
EXISTING_SYSTEM_API = typing.cast("URLParseResult", getattr(settings, "EXISTING_SYSTEM_API", None))
EXISTING_SYSTEM_API_INSECURE = typing.cast("bool", getattr(settings, "EXISTING_SYSTEM_API_INSECURE", False))

class InternalDir:
INTERNAL_ROOT = Path(settings.INTERNAL_ROOT)

LAST_RUN_MAPPING_SESSION_INVALID_DATA = INTERNAL_ROOT / "last-run-invalid-mapping-sessisons.csv"

class CommunityDashboardKeys:
@staticmethod
def contributor_user(firebase_id: str):
Expand Down Expand Up @@ -163,6 +168,8 @@ def announcement():
return "/v2/announcement"


Config.InternalDir.INTERNAL_ROOT.mkdir(parents=True, exist_ok=True)

# FIXME: Import utils/geo/raster_tile_server/config.py here
# FIXME: Import utils/geo/vector_tile_server/config.py here

Expand Down
3 changes: 3 additions & 0 deletions main/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ def urlparse(value) -> ParseResult:
# -- Filesystem (default) XXX: Don't use in production?
MEDIA_ROOT=(str, BASE_DIR / ".data/media"),
STATIC_ROOT=(str, BASE_DIR / ".data/static"),
INTERNAL_ROOT=(str, BASE_DIR / ".data/internal"),
# Email
EMAIL_HOST=str,
EMAIL_SUBJECT_PREFIX=(str, "Mapswipe:"),
Expand Down Expand Up @@ -385,6 +386,8 @@ def urlparse(value) -> ParseResult:
},
}

INTERNAL_ROOT = env("INTERNAL_ROOT")

assert STORAGE_OVERWRITE_KEY in STORAGES, f"{STORAGE_OVERWRITE_KEY} should be defined in STORAGES"

# Default primary key field type
Expand Down
Loading