diff --git a/apps/mapping/models.py b/apps/mapping/models.py index b94f837f..80d7d02d 100644 --- a/apps/mapping/models.py +++ b/apps/mapping/models.py @@ -26,6 +26,17 @@ def get_client_type(cls, value: str) -> "MappingSessionClientTypeEnum": "web": cls.WEB, }.get(value, cls.UNKNOWN) + @classmethod + def get_client_type_label_sql(cls, field: str) -> str: + return f""" + CASE {field} + WHEN {cls.MOBILE_ANDROID.value} THEN 'android' + WHEN {cls.MOBILE_IOS.value} THEN 'ios' + WHEN {cls.WEB.value} THEN 'web' + ELSE 'unknown' + END + """ + class MappingSession(models.Model): """Model representing a mapping session where a contributor user worked on a specific project task group.""" diff --git a/apps/project/exports/mapping_results.py b/apps/project/exports/mapping_results.py index 49fb9f4a..cdad664f 100644 --- a/apps/project/exports/mapping_results.py +++ b/apps/project/exports/mapping_results.py @@ -8,6 +8,7 @@ from apps.contributor.models import ContributorUser from apps.mapping.models import ( MappingSession, + MappingSessionClientTypeEnum, MappingSessionResult, ) from apps.project.models import Project, ProjectTask, ProjectTaskGroup @@ -19,7 +20,6 @@ def generate_mapping_results(*, destination_filename: Path, project: Project) -> pd.DataFrame: - # TODO: client_type IS ENUM -- CONVERT TO VALUE? sql_query = sql.SQL(f""" COPY ( SELECT @@ -38,7 +38,9 @@ def generate_mapping_results(*, destination_filename: Path, project: Project) -> MS.{fd_name(MappingSession.start_time)} as start_time, MS.{fd_name(MappingSession.end_time)} as end_time, MS.{fd_name(MappingSession.app_version)} as app_version, - MS.{fd_name(MappingSession.client_type)} as client_type, + ( + {MappingSessionClientTypeEnum.get_client_type_label_sql(f"MS.{fd_name(MappingSession.client_type)}")} + ) as client_type, MSR.{fd_name(MappingSessionResult.result)} as result, -- the username for users which login to MapSwipe with their -- OSM account is not defined or ''. diff --git a/apps/project/exports/project_stats_by_date.py b/apps/project/exports/project_stats_by_date.py index 65300342..3c5c0bde 100644 --- a/apps/project/exports/project_stats_by_date.py +++ b/apps/project/exports/project_stats_by_date.py @@ -177,6 +177,6 @@ def get_project_history( # merge contributors and progress project_history_df = progress_by_date_df.merge(contributors_by_date_df, left_on="day", right_on="day") - project_history_df["project_id"] = project.id + project_history_df["project_id"] = project.firebase_id project_history_df.to_csv(destination_filename) return project_history_df diff --git a/apps/project/exports/project_tasks.py b/apps/project/exports/project_tasks.py index acf6444f..83f45970 100644 --- a/apps/project/exports/project_tasks.py +++ b/apps/project/exports/project_tasks.py @@ -44,7 +44,8 @@ def generate_project_tasks( PTG.{fd_name(ProjectTaskGroup.firebase_id)} as group_id, PT.{fd_name(ProjectTask.firebase_id)} as task_id, -- Metadata - ST_AsText({fd_name(ProjectTask.geometry)}) AS geom, + -- NOTE: Using ST_Multi only to make the exports backward compatible with previous exports + ST_AsText(ST_Multi({fd_name(ProjectTask.geometry)})) AS geom, '{project.project_type_specifics.get("zoom_level")}' as tile_z, -- NOTE: Existing tile_x and tile_y are passed from project_type_specifics now -- NOTE: this is destructured by normalize_project_type_specifics(write_sql_to_gzipped_csv) diff --git a/apps/project/exports/tasking_manager_geometries.py b/apps/project/exports/tasking_manager_geometries.py index 853a53fc..07057eab 100644 --- a/apps/project/exports/tasking_manager_geometries.py +++ b/apps/project/exports/tasking_manager_geometries.py @@ -59,9 +59,10 @@ def _get_row_value[T: int | float]( task_id = row[1] - task_x = _get_row_value(column_index_map, row, "task_x") - task_y = _get_row_value(column_index_map, row, "task_y") - task_z = _get_row_value(column_index_map, row, "task_z") + # TODO: rename all task_N to tile_N + task_x = _get_row_value(column_index_map, row, "tile_x") + task_y = _get_row_value(column_index_map, row, "tile_y") + task_z = _get_row_value(column_index_map, row, "tile_z") # TODO: Add no_count here and use later project_data.append( @@ -108,6 +109,8 @@ def _get_row_value[T: int | float]( task_x, task_y, task_z, + # NOTE: We do not flatten to 2D only for backwards compatibility + skip_flatten=True, ), }, ) diff --git a/apps/project/tasks.py b/apps/project/tasks.py index 25da94e3..6e084d2c 100644 --- a/apps/project/tasks.py +++ b/apps/project/tasks.py @@ -104,4 +104,4 @@ def send_slack_message_for_project(project_id: int, action: Literal["progress-ch update_base_slack_message(client=mapslack, project=project, ts=base_slack_message_ts) if action == "progress-change": project.slack_progress_notifications = project.progress - project.save(update_fields=["slack_message_notifications"]) + project.save(update_fields=["slack_progress_notifications"]) diff --git a/apps/project/tests/e2e_create_project_tile_map_service_test.py b/apps/project/tests/e2e_create_project_tile_map_service_test.py index a6b6f35f..8007c280 100644 --- a/apps/project/tests/e2e_create_project_tile_map_service_test.py +++ b/apps/project/tests/e2e_create_project_tile_map_service_test.py @@ -1,12 +1,19 @@ +import csv +import gzip +import io +import json +import operator import typing from contextlib import contextmanager from datetime import datetime from pathlib import Path import json5 +from django.core.files.base import File from django.db.models.signals import pre_save from ulid import ULID +from apps.common.models import AssetTypeEnum from apps.common.utils import remove_object_keys from apps.contributor.factories import ContributorUserFactory from apps.contributor.models import ContributorUserGroup @@ -18,13 +25,69 @@ MappingSessionUserGroup, MappingSessionUserGroupTemp, ) -from apps.project.models import Organization, Project +from apps.project.models import Organization, Project, ProjectAsset, ProjectAssetExportTypeEnum, ProjectAssetInputTypeEnum from apps.tutorial.models import Tutorial from apps.user.factories import UserFactory from main.config import Config from main.tests import TestCase +def read_json( + file_path: Path | File, + *, + compressed: bool = False, + ignore_fields: set[str] | None = None, +): + if compressed: + with ( + file_path.open("rb") as file, + gzip.GzipFile(fileobj=file, mode="rb") as gz, + io.TextIOWrapper(gz, encoding="utf-8") as text_stream, + ): + data = json.load(text_stream) + elif isinstance(file_path, Path): + with file_path.open(mode="r", encoding="utf-8") as file: + data = json.load(file) + else: + with file_path.open(mode="r") as file: + data = json.load(file) + + if ignore_fields: + data = remove_object_keys(data, ignore_fields) + + return data + + +def read_csv( + file_path: Path | File, + *, + compressed: bool = False, + ignore_columns: set[str] | None = None, + sort_column: typing.Callable[[typing.Any], typing.Any] | None = None, +): + if compressed: + with ( + file_path.open("rb") as file, + gzip.GzipFile(fileobj=file, mode="rb") as gz, + io.TextIOWrapper(gz, encoding="utf-8") as text_stream, + ): + data = list(csv.DictReader(text_stream)) + elif isinstance(file_path, Path): + with file_path.open(mode="r", encoding="utf-8") as file: + data = list(csv.DictReader(file)) + else: + with file_path.open(mode="r") as file: + data = list(csv.DictReader(file)) + + if sort_column: + data.sort(key=sort_column) + + if ignore_columns: + data = remove_object_keys(data, ignore_columns) + + return data + + @contextmanager def create_override(): def pre_save_override(sender: typing.Any, instance: typing.Any, **kwargs): # type: ignore[reportMissingParameterType] @@ -627,3 +690,250 @@ def _test_project(self, projectKey: str, filename: str): assert isinstance(project_fb_data, dict), "Project in firebase should be a dictionary" assert project_fb_data["progress"] == project.progress, "Progress should be synced with firebase" assert project_fb_data["contributorCount"] == 1, "Contributor count should be synced with firebase" + + if not test_data.get("expected_project_exports_data"): + return + + # Check groups export + groups_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.GROUPS, + ).first() + assert groups_project_asset is not None, "Groups project asset not found" + + expected_groups = read_csv( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["groups"]), + ignore_columns={ + "total_area", # NOTE: previously empty, now real value + "time_spent_max_allowed", # NOTE: previously empty, now real value + }, + ) + actual_groups = read_csv( + groups_project_asset.file, + compressed=True, + ignore_columns={ + "total_area", # NOTE: previously empty, now real value + "time_spent_max_allowed", # NOTE: previously empty, now real value + "project_internal_id", # NOTE: added for referencing + "group_internal_id", # NOTE: added for referencing + }, + ) + assert expected_groups == actual_groups, "Difference found for groups export file." + + # Check tasks export + tasks_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.TASKS, + ).first() + assert tasks_project_asset is not None, "Tasks project asset not found" + + expected_tasks = read_csv( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["tasks"]), + sort_column=operator.itemgetter("task_id"), + ignore_columns={ + "", # NOTE: dataframe index + }, + ) + actual_tasks = read_csv( + tasks_project_asset.file, + compressed=True, + sort_column=operator.itemgetter("task_id"), + ignore_columns={ + "", # NOTE: dataframe index + "project_internal_id", # NOTE: added for referencing + "group_internal_id", # NOTE: added for referencing + "task_internal_id", # NOTE: added for referencing + }, + ) + assert expected_tasks == actual_tasks, "Difference found for tasks export file." + + # Check results export + results_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.RESULTS, + ).first() + assert results_project_asset is not None, "Results project asset not found" + + expected_results = read_csv( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["results"]), + sort_column=operator.itemgetter("task_id"), + ignore_columns={ + "", # NOTE: dataframe index + }, + ) + actual_results = read_csv( + results_project_asset.file, + sort_column=operator.itemgetter("task_id"), + ignore_columns={ + "", # NOTE: dataframe index + "task_internal_id", # NOTE: added for referencing + "user_internal_id", # NOTE: added for referencing + "group_internal_id", # NOTE: added for referencing + "project_internal_id", # NOTE: added for referencing + }, + compressed=True, + ) + assert expected_results == actual_results, "Difference found for results export file." + + # Check aoi export + aoi_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.INPUT, + input_type=ProjectAssetInputTypeEnum.AOI_GEOMETRY, + ).first() + assert aoi_project_asset is not None, "AOI Geometry project asset not found" + + expected_aoi = read_json( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["area_of_interest"]), + ignore_fields={ + "crs", # NOTE: crs has almost no data + "name", # NOTE: previously system file path + "properties", # FIXME: previously has id (index) + "coordinates", # FIXME: precision has changed + }, + ) + actual_aoi = read_json( + aoi_project_asset.file, + ignore_fields={ + "properties", # FIXME: previously has id (index) + "coordinates", # FIXME: precision has changed + }, + ) + assert expected_aoi == actual_aoi, "Difference found for AOI geometry export file." + + # Check aggregated results export + aggregated_results_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.AGGREGATED_RESULTS, + ).first() + assert aggregated_results_project_asset is not None, "Aggregated results project asset not found" + + expected_aggregated_results = read_csv( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["aggregated_results"]), + ) + actual_aggregated_results = read_csv( + aggregated_results_project_asset.file, + compressed=True, + ignore_columns={ + "project_internal_id", # NOTE: added for referencing + "group_internal_id", # NOTE: added for referencing + "task_internal_id", # NOTE: added for referencing + }, + ) + assert expected_aggregated_results == actual_aggregated_results, ( + "Difference found for aggregated results export file." + ) + + # Check aggregated results with geometry export + aggregated_results_with_geometry_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.AGGREGATED_RESULTS_WITH_GEOMETRY, + ).first() + assert aggregated_results_with_geometry_project_asset is not None, ( + "Aggregated results with geometry project asset not found" + ) + + expected_aggregated_results_with_geometry = read_json( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["aggregated_results_with_geometry"]), + ignore_fields={ + "name", # NOTE: Previously "tmp", now "tmp" + random_str + }, + ) + actual_aggregated_results_with_geometry = read_json( + aggregated_results_with_geometry_project_asset.file, + compressed=True, + ignore_fields={ + "name", # NOTE: Previously "tmp", now "tmp" + random_str + "project_internal_id", # NOTE: added for referencing + "group_internal_id", # NOTE: added for referencing + "task_internal_id", # NOTE: added for referencing + }, + ) + assert expected_aggregated_results_with_geometry == actual_aggregated_results_with_geometry, ( + "Difference found for aggregated results with geometry export file." + ) + + # Check history export + history_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.HISTORY, + ).first() + assert history_project_asset is not None, "History project asset not found" + + expected_history = read_csv( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["history"]), + ) + actual_history = read_csv( + history_project_asset.file, + ) + assert expected_history == actual_history, "Difference found for history export file." + + # Check users export + users_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.USERS, + ).first() + assert users_project_asset is not None, "Users project asset not found" + + expected_users = read_csv( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["users"]), + ) + actual_users = read_csv( + users_project_asset.file, + compressed=True, + ) + assert expected_users == actual_users, "Difference found for users export file." + + # Check hot tasking manager geometry export + hot_aoi_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.HOT_TASKING_MANAGER_GEOMETRIES, + ).first() + assert hot_aoi_project_asset is not None, "HOT TM AOI Geometry project asset not found" + + expected_hot_aoi = read_json( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["hot_tasking_manager_geometry"]), + ignore_fields={ + "name", # NOTE: previously full path, not just filename + }, + ) + expected_hot_aoi["features"].sort(key=lambda x: x["properties"]["group_id"]) # type: ignore[reportArgumentType, reportCallIssue] + actual_hot_aoi = read_json( + hot_aoi_project_asset.file, + ignore_fields={ + "name", # NOTE: previously full path, not just filename + }, + ) + + actual_hot_aoi["features"].sort(key=lambda x: x["properties"]["group_id"]) # type: ignore[reportArgumentType, reportCallIssue] + assert expected_hot_aoi == actual_hot_aoi, "Difference found for HOT TM AOI geometry export file." + + # Check for moderate to high agreement export + agreement_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.MODERATE_TO_HIGH_AGREEMENT_YES_MAYBE_GEOMETRIES, + ).first() + assert agreement_project_asset is not None, "Moderate to high agreement project asset not found" + + expected_agreement = read_json( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["moderate_to_high_agreement"]), + ignore_fields={ + "name", # NOTE: previously full path, not just filename + }, + ) + actual_agreement = read_json( + agreement_project_asset.file, + ignore_fields={ + "name", # NOTE: previously full path, not just filename + }, + ) + assert expected_agreement == actual_agreement, "Difference found for moderate to high agreement export file." diff --git a/apps/project/tests/export_test.py b/apps/project/tests/export_test.py index 978ed7b0..9c2c0244 100644 --- a/apps/project/tests/export_test.py +++ b/apps/project/tests/export_test.py @@ -95,6 +95,7 @@ def setUpClass(cls): project_type_specifics=FindProjectTaskProperty( tile_x=1, tile_y=2, + url="https://some-service.com/14/1/2/", ).model_dump(), ) ] diff --git a/apps/project/tests/mutation_test.py b/apps/project/tests/mutation_test.py index 8009cebd..0a24aa37 100644 --- a/apps/project/tests/mutation_test.py +++ b/apps/project/tests/mutation_test.py @@ -1393,7 +1393,7 @@ class TaskGroupType(typing.TypedDict): { "firebase_id": "g101", "number_of_tasks": 18, - "required_count": 18 * 10, + "required_count": 10, "total_area": 210.10735845202447, "project_type_specifics": { "x_max": 24152, @@ -1405,7 +1405,7 @@ class TaskGroupType(typing.TypedDict): { "firebase_id": "g102", "number_of_tasks": 24, - "required_count": 24 * 10, + "required_count": 10, "total_area": 280.2915392364502, "project_type_specifics": { "x_max": 24153, @@ -1417,7 +1417,7 @@ class TaskGroupType(typing.TypedDict): { "firebase_id": "g103", "number_of_tasks": 24, - "required_count": 24 * 10, + "required_count": 10, "total_area": 280.4398676951218, "project_type_specifics": { "x_max": 24153, @@ -1429,7 +1429,7 @@ class TaskGroupType(typing.TypedDict): { "firebase_id": "g104", "number_of_tasks": 6, - "required_count": 6 * 10, + "required_count": 10, "total_area": 70.14703242812156, "project_type_specifics": { "x_max": 24150, @@ -1445,6 +1445,8 @@ class TaskGroupType(typing.TypedDict): "project_type_specifics": { "tile_x": 24147, "tile_y": 13753, + "url": "https://hi-there/24147/13753/15", + "url_b": "https://services.digitalglobe.com/earthservice/tmsaccess/tms/1.0.0/DigitalGlobe%3AImageryTileService@EPSG%3A3857@jpg/15/24147/13753.jpg?connectId=dummy-maxar-standard", }, }, { @@ -1452,6 +1454,8 @@ class TaskGroupType(typing.TypedDict): "project_type_specifics": { "tile_x": 24147, "tile_y": 13754, + "url": "https://hi-there/24147/13754/15", + "url_b": "https://services.digitalglobe.com/earthservice/tmsaccess/tms/1.0.0/DigitalGlobe%3AImageryTileService@EPSG%3A3857@jpg/15/24147/13754.jpg?connectId=dummy-maxar-standard", }, }, { @@ -1459,6 +1463,8 @@ class TaskGroupType(typing.TypedDict): "project_type_specifics": { "tile_x": 24147, "tile_y": 13755, + "url": "https://hi-there/24147/13755/15", + "url_b": "https://services.digitalglobe.com/earthservice/tmsaccess/tms/1.0.0/DigitalGlobe%3AImageryTileService@EPSG%3A3857@jpg/15/24147/13755.jpg?connectId=dummy-maxar-standard", }, }, { @@ -1466,6 +1472,8 @@ class TaskGroupType(typing.TypedDict): "project_type_specifics": { "tile_x": 24148, "tile_y": 13753, + "url": "https://hi-there/24148/13753/15", + "url_b": "https://services.digitalglobe.com/earthservice/tmsaccess/tms/1.0.0/DigitalGlobe%3AImageryTileService@EPSG%3A3857@jpg/15/24148/13753.jpg?connectId=dummy-maxar-standard", }, }, { @@ -1473,6 +1481,8 @@ class TaskGroupType(typing.TypedDict): "project_type_specifics": { "tile_x": 24148, "tile_y": 13754, + "url": "https://hi-there/24148/13754/15", + "url_b": "https://services.digitalglobe.com/earthservice/tmsaccess/tms/1.0.0/DigitalGlobe%3AImageryTileService@EPSG%3A3857@jpg/15/24148/13754.jpg?connectId=dummy-maxar-standard", }, }, ] @@ -1482,7 +1492,7 @@ class TaskGroupType(typing.TypedDict): project_task_qs = ProjectTask.objects.filter(task_group__project=latest_project) assert { - "required_results": sum(task_group["required_count"] for task_group in expected_task_groups), + "required_results": (18 + 24 + 24 + 6) * 10, "tasks_groups_count": project_task_group_qs.count(), "tasks_groups": list( project_task_group_qs.order_by("id").values( diff --git a/assets b/assets index 69dae411..6771f901 160000 --- a/assets +++ b/assets @@ -1 +1 @@ -Subproject commit 69dae411dde7df1b20df2d1ec0e35fc8ccee877b +Subproject commit 6771f9018fc67d39c11277924a0e2aec14149db8 diff --git a/bulk_ignore_pyright_warnings.py b/bulk_ignore_pyright_warnings.py index e1340229..53ad7970 100644 --- a/bulk_ignore_pyright_warnings.py +++ b/bulk_ignore_pyright_warnings.py @@ -19,6 +19,8 @@ # - We need to use the same version of python as specified on precommit # - We look for pyproject.toml on the current working directory +SEVERITY = "warning" + def run_pyright(path: str) -> dict: # type: ignore[reportMissingTypeArgument] """Run pyright and return JSON output.""" @@ -46,7 +48,7 @@ def apply_specific_ignores(pyright_output: dict): # type: ignore[reportMissingT if "file" not in diag or "range" not in diag or "rule" not in diag or "severity" not in diag: print("Error: diagnostics should define file, range and rule", diag) continue - if diag["severity"] != "warning": + if diag["severity"] != SEVERITY: continue file_path = diag["file"] diff --git a/project_types/base/project.py b/project_types/base/project.py index 27f91d44..d5369065 100644 --- a/project_types/base/project.py +++ b/project_types/base/project.py @@ -142,13 +142,13 @@ def analyze_groups(self): ) # NOTE: After number_of_tasks is calculated project_task_groups_qs.update( - required_count=models.F("number_of_tasks") * self.project.verification_number, + required_count=self.project.verification_number, time_spent_max_allowed=(models.F("number_of_tasks") * self.get_max_time_spend_percentile()), ) self.project.required_results = ( ProjectTaskGroup.objects.filter(project_id=self.project.pk).aggregate( - required_results=models.Sum("required_count"), + required_results=models.Sum("number_of_tasks") * self.project.verification_number, ) )["required_results"] or 0 diff --git a/project_types/tile_map_service/base/project.py b/project_types/tile_map_service/base/project.py index 4335d92c..b0aea5eb 100644 --- a/project_types/tile_map_service/base/project.py +++ b/project_types/tile_map_service/base/project.py @@ -51,6 +51,8 @@ class TileMapServiceProjectTaskGroupProperty(base_project.BaseProjectTaskGroupPr class TileMapServiceProjectTaskProperty(base_project.BaseProjectTaskProperty): tile_x: int tile_y: int + # NOTE: We added URL as it's used directly when creating exports + url: str class TileMapServiceBaseProject[ @@ -132,6 +134,17 @@ def get_feature(task: ProjectTask): self.project.project_type_specific_output_asset = asset self.project.save(update_fields=("project_type_specific_output_asset",)) + def get_task_specifics_for_db(self, tile_x: int, tile_y: int) -> TileMapServiceProjectTaskProperty: + return self.project_task_property_class( + tile_x=tile_x, + tile_y=tile_y, + url=self.project_type_specifics.tile_server_property.generate_url( + tile_x, + tile_y, + self.project_type_specifics.zoom_level, + ), + ) + @typing.override def create_tasks( self, @@ -155,10 +168,7 @@ def create_tasks( task_group_id=group.pk, geometry=geometry, project_type_specifics=clean_up_none_keys( - self.project_task_property_class( - tile_x=tile_x, - tile_y=tile_y, - ).model_dump(), + self.get_task_specifics_for_db(tile_x, tile_y).model_dump(), ), ), ) diff --git a/project_types/tile_map_service/compare/project.py b/project_types/tile_map_service/compare/project.py index 208f0a13..2ba959ed 100644 --- a/project_types/tile_map_service/compare/project.py +++ b/project_types/tile_map_service/compare/project.py @@ -14,7 +14,9 @@ class CompareProjectProperty(base_project.TileMapServiceProjectProperty): class CompareProjectTaskGroupProperty(base_project.TileMapServiceProjectTaskGroupProperty): ... -class CompareProjectTaskProperty(base_project.TileMapServiceProjectTaskProperty): ... +class CompareProjectTaskProperty(base_project.TileMapServiceProjectTaskProperty): + # NOTE: We added URL as it's used directly when creating exports + url_b: str class CompareProject( @@ -37,6 +39,23 @@ def __init__(self, project: Project): def get_max_time_spend_percentile(self) -> float: return 11.2 + @typing.override + def get_task_specifics_for_db(self, tile_x: int, tile_y: int) -> CompareProjectTaskProperty: + return self.project_task_property_class( + tile_x=tile_x, + tile_y=tile_y, + url=self.project_type_specifics.tile_server_property.generate_url( + tile_x, + tile_y, + self.project_type_specifics.zoom_level, + ), + url_b=self.project_type_specifics.tile_server_b_property.generate_url( + tile_x, + tile_y, + self.project_type_specifics.zoom_level, + ), + ) + # FIREBASE @typing.override @@ -46,24 +65,14 @@ def skip_tasks_on_firebase(self) -> bool: @typing.override def get_task_specifics_for_firebase(self, task: ProjectTask) -> firebase_models.FbMappingTaskCompareCreateOnlyInput: task_specifics = self.project_task_property_class.model_validate(task.project_type_specifics) - tsp = self.project_type_specifics.tile_server_property - tsp_b = self.project_type_specifics.tile_server_b_property return firebase_models.FbMappingTaskCompareCreateOnlyInput( groupId=str(task.task_group.firebase_id), taskId=task.firebase_id, taskX=task_specifics.tile_x, taskY=task_specifics.tile_y, - url=tsp.generate_url( - task_specifics.tile_x, - task_specifics.tile_y, - self.project_type_specifics.zoom_level, - ), - urlB=tsp_b.generate_url( - task_specifics.tile_x, - task_specifics.tile_y, - self.project_type_specifics.zoom_level, - ), + url=task_specifics.url, + urlB=task_specifics.url_b, ) @typing.override diff --git a/utils/geo/tile_functions.py b/utils/geo/tile_functions.py index 86db6052..14fdaa40 100644 --- a/utils/geo/tile_functions.py +++ b/utils/geo/tile_functions.py @@ -91,7 +91,7 @@ def quad_key_to_bing_url(quad_key: str, api_key: str): # FIXME(tnagorra): Add typings for osgeo -def geometry_from_tile_coords(tile_x: float, tile_y: float, zoom: int) -> str: +def geometry_from_tile_coords(tile_x: float, tile_y: float, zoom: int, *, skip_flatten: bool = False) -> str: """Compute the polygon geometry of a tile map service tile.""" # Calculate lat, lon of upper left corner of tile pixel_x = tile_x * 256 @@ -113,7 +113,7 @@ def geometry_from_tile_coords(tile_x: float, tile_y: float, zoom: int) -> str: poly = ogr.Geometry(ogr.wkbPolygon) poly.AddGeometry(ring) - if poly.GetCoordinateDimension() == 3: + if not skip_flatten and poly.GetCoordinateDimension() == 3: poly.FlattenTo2D() return poly.ExportToWkt()