diff --git a/apps/mapping/firebase/utils.py b/apps/mapping/firebase/utils.py index 86718c57..ae504520 100644 --- a/apps/mapping/firebase/utils.py +++ b/apps/mapping/firebase/utils.py @@ -248,14 +248,16 @@ def results_complete( {fd_name(MappingSessionResult.session)}, {fd_name(MappingSessionResult.project_task)}, -- Value - {fd_name(MappingSessionResult.result)} + {fd_name(MappingSessionResult.result)}, + {fd_name(MappingSessionResult.reference)} ) ( SELECT -- Ref MS.{fd_name(MappingSession.id)}, -- mapping_session_id RT.{fd_name(MappingSessionResultTemp.task_id)}, -- task_id -- Value - RT.{fd_name(MappingSessionResultTemp.result)} -- result [TODO: ST_Transform(ST_SetSRID(ST_GeomFromGeoJSON(RT.result), 3857), 4326)] + RT.{fd_name(MappingSessionResultTemp.result)}, -- result [TODO: ST_Transform(ST_SetSRID(ST_GeomFromGeoJSON(RT.result), 3857), 4326)] + RT.{fd_name(MappingSessionResultTemp.reference)} FROM {tb_name(MappingSessionResultTemp)} RT LEFT JOIN {tb_name(MappingSession)} MS ON MS.{fd_name(MappingSession.project_task_group)} = RT.{fd_name(MappingSessionResultTemp.group_id)} AND @@ -489,6 +491,8 @@ def results_to_temp_table( continue # Collect results for each tasks + reference_map = mapping_session_data.get("reference", {}) + for task_firebase_id, result in session_results_iterator: if result is None: # TODO: Do we treat it as 0? @@ -499,6 +503,8 @@ def results_to_temp_table( # if result_type == "geometry": # result = geojson.dumps(geojson.GeometryCollection(result)) + reference_for_task = reference_map.get(task_firebase_id) + bulk_create_manager.add( MappingSessionResultTemp( project_firebase_id=project.firebase_id, @@ -508,6 +514,7 @@ def results_to_temp_table( start_time=start_time, end_time=end_time, result=result, + reference=reference_for_task, app_version=app_version, client_type=client_type, ), diff --git a/apps/mapping/migrations/0004_mappingsessionresult_reference_and_more.py b/apps/mapping/migrations/0004_mappingsessionresult_reference_and_more.py new file mode 100644 index 00000000..48632690 --- /dev/null +++ b/apps/mapping/migrations/0004_mappingsessionresult_reference_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 5.2.5 on 2025-12-08 14:47 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('mapping', '0003_alter_mappingsession_app_version'), + ] + + operations = [ + migrations.AddField( + model_name='mappingsessionresult', + name='reference', + field=models.JSONField(blank=True, null=True), + ), + migrations.AddField( + model_name='mappingsessionresulttemp', + name='reference', + field=models.JSONField(blank=True, null=True), + ), + ] diff --git a/apps/mapping/models.py b/apps/mapping/models.py index 0f60ca85..5e10decb 100644 --- a/apps/mapping/models.py +++ b/apps/mapping/models.py @@ -104,6 +104,7 @@ class MappingSessionResult(models.Model): session = models.ForeignKey[MappingSession, MappingSession](MappingSession, on_delete=models.PROTECT) project_task = models.ForeignKey[ProjectTask, ProjectTask](ProjectTask, on_delete=models.PROTECT) result = models.PositiveSmallIntegerField[int, int]() + reference = models.JSONField(null=True, blank=True) # TODO(thenav56): Add constraint to make sure we have non-duplicate row with task_id, .session.user_id @@ -155,6 +156,7 @@ class MappingSessionResultTemp(models.Model): start_time = models.DateTimeField[datetime.datetime, datetime.datetime]() end_time = models.DateTimeField[datetime.datetime, datetime.datetime]() result = models.PositiveSmallIntegerField[int, int]() + reference = models.JSONField(null=True, blank=True) app_version = models.CharField[str, str](max_length=255) client_type: int = IntegerChoicesField(choices_enum=MappingSessionClientTypeEnum) # type: ignore[reportAssignmentType] diff --git a/apps/project/custom_options.py b/apps/project/custom_options.py index 050176fd..da0f8600 100644 --- a/apps/project/custom_options.py +++ b/apps/project/custom_options.py @@ -118,11 +118,12 @@ def get_fallback_custom_options_for_export(project_type: ProjectTypeEnum) -> lis project_type == ProjectTypeEnum.FIND or project_type == ProjectTypeEnum.COMPARE or project_type == ProjectTypeEnum.COMPLETENESS + or project_type == ProjectTypeEnum.CONFLATION ): return [ - 0, # No - 1, # Yes - 2, # Maybe - 3, # Bad Imagery + 0, # No / Conflation: No | OSM feature is more accurate + 1, # Yes / Conflation: Yes | Other feature is more accurate + 2, # Maybe / Conflation: Not sure | Neither is accurate + 3, # Bad Imagery / Conflation: Skipped because of multiple OSM features intersecting ] typing.assert_never(project_type) diff --git a/apps/project/exports/exports.py b/apps/project/exports/exports.py index 351ae639..d126a17b 100644 --- a/apps/project/exports/exports.py +++ b/apps/project/exports/exports.py @@ -15,6 +15,7 @@ from apps.user.models import User from main.config import Config from main.logging import log_extra +from project_types.conflation.project import ConflationProjectProperty from project_types.store import get_project_type_handler from project_types.tile_map_service.compare.project import CompareProjectProperty from project_types.tile_map_service.completeness.project import CompletenessProjectProperty @@ -87,11 +88,11 @@ def _export_project_data(project: Project, tmp_directory: Path): custom_options_raw = [] - # NOTE: We do not have custom options for Compare, Completeness and Find projects + # NOTE: We do not have custom options for Compare, Completeness, Conflation and Find projects if not isinstance( project_type_handler.project_type_specifics, # NOTE: Using negate test to throw type error if new project type is added - (CompareProjectProperty | CompletenessProjectProperty | FindProjectProperty), + (CompareProjectProperty | CompletenessProjectProperty | FindProjectProperty | ConflationProjectProperty), ): custom_options_raw = [ {"value": custom_option.value} diff --git a/apps/project/exports/mapping_results.py b/apps/project/exports/mapping_results.py index 1919cd4c..d1c8e618 100644 --- a/apps/project/exports/mapping_results.py +++ b/apps/project/exports/mapping_results.py @@ -42,6 +42,7 @@ def generate_mapping_results(*, destination_filename: Path, project: Project) -> {MappingSessionClientTypeEnum.get_client_type_label_sql(f"MS.{fd_name(MappingSession.client_type)}")} ) as client_type, MSR.{fd_name(MappingSessionResult.result)} as result, + MSR.{fd_name(MappingSessionResult.reference)}::text as reference, -- the username for users which login to MapSwipe with their -- OSM account is not defined or ''. -- We capture this here as it will cause problems diff --git a/apps/project/exports/mapping_results_aggregate/task.py b/apps/project/exports/mapping_results_aggregate/task.py index f085dc75..02d39455 100644 --- a/apps/project/exports/mapping_results_aggregate/task.py +++ b/apps/project/exports/mapping_results_aggregate/task.py @@ -1,3 +1,4 @@ +import json import typing from pathlib import Path @@ -87,6 +88,30 @@ def _get_custom_options(custom_options: CustomOptionType): } +def _add_reference_to_agg_results( + results_df: pd.DataFrame, + agg_results_df: pd.DataFrame, +) -> pd.DataFrame: + """Adds a 'reference' column to agg_results_df if it exists in results_df. + For each task_id, all unique non-empty refs are collected into a list. + If no refs exist for a task, the corresponding value is empty string. + If results_df has no 'ref' column, agg_results_df is returned unchanged. + """ + if "reference" not in results_df.columns: + return agg_results_df + + refs_per_task = ( + results_df.groupby("task_id")["reference"] + .apply(lambda x: list({r for r in x if pd.notna(r) and r not in ({}, "")})) + .apply(lambda lst: json.dumps([json.loads(r) for r in lst]) if lst else "") + ) + + if refs_per_task.apply(lambda x: len(x) > 0).any(): + agg_results_df["reference"] = agg_results_df["task_id"].map(refs_per_task).fillna("") + + return agg_results_df + + def generate_mapping_results_aggregate_by_task( *, destination_filename: Path, @@ -153,6 +178,8 @@ def generate_mapping_results_aggregate_by_task( right_on="task_id", ) + agg_results_df = _add_reference_to_agg_results(results_df, agg_results_df) + agg_results_df.to_csv(destination_filename, index_label="idx") return agg_results_df diff --git a/apps/project/graphql/inputs/inputs.py b/apps/project/graphql/inputs/inputs.py index 7b99982d..6a77a1d1 100644 --- a/apps/project/graphql/inputs/inputs.py +++ b/apps/project/graphql/inputs/inputs.py @@ -17,6 +17,7 @@ from .project_types.compare import CompareProjectPropertyInput from .project_types.completeness import CompletenessProjectPropertyInput +from .project_types.conflation import ConflationProjectPropertyInput from .project_types.find import FindProjectPropertyInput from .project_types.street import StreetProjectPropertyInput from .project_types.validate import ValidateProjectPropertyInput @@ -53,6 +54,7 @@ class ProjectTypeSpecificInput: validate: ValidateProjectPropertyInput | None = strawberry.UNSET validate_image: ValidateImageProjectPropertyInput | None = strawberry.UNSET street: StreetProjectPropertyInput | None = strawberry.UNSET + conflation: ConflationProjectPropertyInput | None = strawberry.UNSET # NOTE: Make sure this matches with the serializers ../serializers.py diff --git a/apps/project/graphql/inputs/project_types/conflation.py b/apps/project/graphql/inputs/project_types/conflation.py new file mode 100644 index 00000000..2c1505f0 --- /dev/null +++ b/apps/project/graphql/inputs/project_types/conflation.py @@ -0,0 +1,11 @@ +import strawberry + +from project_types.conflation import project as conflation_project + + +@strawberry.experimental.pydantic.input(model=conflation_project.ConflationObjectSourceConfig, all_fields=True) +class ConflationObjectSourceConfigInput: ... + + +@strawberry.experimental.pydantic.input(model=conflation_project.ConflationProjectProperty, all_fields=True) +class ConflationProjectPropertyInput: ... diff --git a/apps/project/graphql/types/project_types/conflation.py b/apps/project/graphql/types/project_types/conflation.py new file mode 100644 index 00000000..1fed896a --- /dev/null +++ b/apps/project/graphql/types/project_types/conflation.py @@ -0,0 +1,14 @@ +import strawberry + +from project_types.conflation import project as conflation_project + + +@strawberry.experimental.pydantic.type(model=conflation_project.ConflationObjectSourceConfig, all_fields=True) +class ConflationObjectSourceConfig: ... + + +@strawberry.experimental.pydantic.type(model=conflation_project.ConflationProjectProperty, all_fields=True) +class ConflationProjectPropertyType: ... + + +DEFAULT_TEST_RESPONSE_ERROR_MESSAGE: str = "Something unexpected has occurred. Please contact an admin to fix this issue." diff --git a/apps/project/graphql/types/types.py b/apps/project/graphql/types/types.py index 315ab2da..87743921 100644 --- a/apps/project/graphql/types/types.py +++ b/apps/project/graphql/types/types.py @@ -14,6 +14,7 @@ from apps.tutorial.graphql.types.types import TutorialType from main.config import Config from main.graphql.context import Info +from project_types.conflation import project as conflation_project from project_types.street import project as street_project from project_types.tile_map_service.compare import project as compare_project from project_types.tile_map_service.completeness import project as completeness_project @@ -31,6 +32,7 @@ from .project_types.compare import CompareProjectPropertyType from .project_types.completeness import CompletenessProjectPropertyType +from .project_types.conflation import ConflationProjectPropertyType from .project_types.find import FindProjectPropertyType from .project_types.street import StreetProjectPropertyType from .project_types.validate import ValidateProjectPropertyType @@ -219,6 +221,7 @@ async def project_type_specifics( | ValidateImageProjectPropertyType | CompletenessProjectPropertyType | StreetProjectPropertyType + | ConflationProjectPropertyType | None ): data = project.project_type_specifics @@ -245,4 +248,9 @@ async def project_type_specifics( "StreetProjectPropertyType", street_project.StreetProjectProperty.model_validate(data), ) + if project.project_type_enum == Project.Type.CONFLATION: + return typing.cast( + "ConflationProjectPropertyType", + conflation_project.ConflationProjectProperty.model_validate(data), + ) typing.assert_never(project.project_type_enum) diff --git a/apps/project/migrations/0011_alter_project_project_type.py b/apps/project/migrations/0011_alter_project_project_type.py new file mode 100644 index 00000000..9df3148a --- /dev/null +++ b/apps/project/migrations/0011_alter_project_project_type.py @@ -0,0 +1,19 @@ +# Generated by Django 5.2.5 on 2025-12-08 14:47 + +import django_choices_field.fields +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('project', '0010_alter_projecttask_unique_together'), + ] + + operations = [ + migrations.AlterField( + model_name='project', + name='project_type', + field=django_choices_field.fields.IntegerChoicesField(choices=[(1, 'Find Features'), (2, 'Validate Footprints'), (10, 'Assess Images'), (3, 'Compare Dates'), (4, 'Check Completeness'), (7, 'View Streets'), (8, 'Conflate Features')]), + ), + ] diff --git a/apps/project/models.py b/apps/project/models.py index 44aa399c..c7363a13 100644 --- a/apps/project/models.py +++ b/apps/project/models.py @@ -114,6 +114,9 @@ class ProjectTypeEnum(models.IntegerChoices): STREET = 7, "View Streets" """ Street project type. """ + CONFLATION = 8, "Conflate Features" + """ Conflation project type. """ + # TODO(thenav56): Confirm if we have more/less @classmethod @@ -136,6 +139,8 @@ def to_firebase(self) -> firebase_models.FbEnumProjectType: return firebase_models.FbEnumProjectType.VALIDATE_IMAGE case ProjectTypeEnum.STREET: return firebase_models.FbEnumProjectType.STREET + case ProjectTypeEnum.CONFLATION: + return firebase_models.FbEnumProjectType.CONFLATION # TODO(tnagorra): Reset the values later diff --git a/apps/project/serializers.py b/apps/project/serializers.py index 03fc146d..7e868ee7 100644 --- a/apps/project/serializers.py +++ b/apps/project/serializers.py @@ -125,6 +125,8 @@ def _validate_group_size(self, attrs: dict[str, typing.Any]): group_size = 80 case Project.Type.STREET: group_size = 25 + case Project.Type.CONFLATION: + group_size = 25 attrs["group_size"] = group_size diff --git a/apps/project/slack_messages.py b/apps/project/slack_messages.py index 7196cf47..9021de32 100644 --- a/apps/project/slack_messages.py +++ b/apps/project/slack_messages.py @@ -64,6 +64,8 @@ def format_project_type(project_type: ProjectTypeEnum): Project.Type.STREET: ":street:", Project.Type.COMPLETENESS: ":completeness:", Project.Type.VALIDATE_IMAGE: ":validate_image:", + # TODO: add custom :conflation: icon in slack + Project.Type.CONFLATION: ":construction:", } # FIXME: better way to concatenate this return f"{label} {type_to_icon.get(project_type, '')}".strip() diff --git a/apps/project/tests/e2e_create_conflation_project_test.py b/apps/project/tests/e2e_create_conflation_project_test.py new file mode 100644 index 00000000..9d698ecb --- /dev/null +++ b/apps/project/tests/e2e_create_conflation_project_test.py @@ -0,0 +1,785 @@ +import logging +import operator +import typing + +# import unittest +from contextlib import contextmanager +from datetime import datetime +from pathlib import Path + +import json5 +import pytest +from django.db.models.signals import pre_save + +from apps.common.models import AssetTypeEnum +from apps.common.utils import decode_tasks, remove_object_keys +from apps.contributor.factories import ContributorUserFactory +from apps.contributor.models import ContributorUserGroup +from apps.mapping.firebase.pull import pull_results_from_firebase +from apps.mapping.models import ( + MappingSession, + MappingSessionResult, + MappingSessionResultTemp, + MappingSessionUserGroup, + MappingSessionUserGroupTemp, +) +from apps.project.models import Organization, Project, ProjectAsset, ProjectAssetExportTypeEnum +from apps.project.tests.e2e_create_project_tile_map_service_test import read_csv, read_json +from apps.tutorial.models import Tutorial +from apps.user.factories import UserFactory +from main.config import Config +from main.tests import TestCase + +logging.getLogger("vcr").setLevel(logging.WARNING) + + +@contextmanager +def create_override(): + def pre_save_override(sender: typing.Any, instance: typing.Any, **kwargs): # type: ignore[reportMissingParameterType] + if sender == Tutorial: + instance.firebase_id = f"tutorial_{instance.client_id}" + elif sender in {Project, Organization, ContributorUserGroup}: + instance.firebase_id = instance.client_id + + pre_save.connect(pre_save_override) + try: + yield True + finally: + pre_save.disconnect(pre_save_override) + + +class TestConflationProjectE2E(TestCase): + class Mutation: + CREATE_PROJECT = """ + mutation CreateProject($data: ProjectCreateInput!) { + createProject(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + firebaseId + } + } + } + } + """ + + UPDATE_PROJECT = """ + mutation UpdateProject($pk: ID!, $data: ProjectUpdateInput!) { + updateProject(pk: $pk, data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + } + } + } + } + """ + + UPLOAD_PROJECT_ASSET = """ + mutation CreateProjectAsset($data: ProjectAssetCreateInput!) { + createProjectAsset(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectAssetTypeMutationResponseType { + errors + ok + result { + id + } + } + } + } + """ + + UPDATE_PROCESSED_PROJECT = """ + mutation UpdateProcessedProject($pk: ID!, $data: ProcessedProjectUpdateInput!) { + updateProcessedProject(pk: $pk, data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + } + } + } + } + """ + + UPDATE_PROJECT_STATUS = """ + mutation UpdateProjectStatus($pk: ID!, $data: ProjectStatusUpdateInput!) { + updateProjectStatus(pk: $pk, data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + status + } + } + } + } + """ + + CREATE_ORGANIZATION = """ + mutation CreateOrganization($data: OrganizationCreateInput!) { + createOrganization(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on OrganizationTypeMutationResponseType { + errors + ok + result { + id + firebaseId + } + } + } + } + """ + + CREATE_TUTORIAL = """ + mutation CreateTutorial($data: TutorialCreateInput!) { + createTutorial(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on TutorialTypeMutationResponseType { + errors + ok + result { + id + clientId + projectId + firebaseId + } + } + } + } + """ + + UPDATE_TUTORIAL = """ + mutation UpdateTutorial($data: TutorialUpdateInput!, $pk: ID!) { + updateTutorial(data: $data, pk: $pk) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on TutorialTypeMutationResponseType { + errors + ok + result { + id + status + } + } + } + } + """ + + UPDATE_TUTORIAL_STATUS = """ + mutation UpdateTutorialStatus($data: TutorialStatusUpdateInput!, $pk: ID!) { + updateTutorialStatus(data: $data, pk: $pk) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on TutorialTypeMutationResponseType { + errors + ok + result { + id + status + } + } + } + } + """ + + CREATE_CONTRIBUTOR_USER_GROUP = """ + mutation CreateContributorUserGroup($data: ContributorUserGroupCreateInput!) { + createContributorUserGroup(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ContributorUserGroupTypeMutationResponseType { + errors + ok + result { + id + name + description + clientId + isArchived + firebaseId + } + } + } + } + """ + + @pytest.mark.vcr("assets/tests/projects/conflation/cassette") + def test_conflation_project_e2e(self): + with create_override(): + self._test_project( + "assets/tests/projects/conflation/project_data.json5", + ) + + # Generic functions + + def _test_project(self, filename: str): + # Load test data file + full_path = Path(Config.BASE_DIR, filename) + with full_path.open("r", encoding="utf-8") as f: + test_data = json5.load(f) + + # Create contributor user and login + contributor_user = ContributorUserFactory.create( + username="Ram Bahadur", + firebase_id=test_data["contributor_user_firebase_id"], + ) + user = UserFactory.create( + contributor_user=contributor_user, + ) + + self.force_login(user) + + # Define full path for image + image_filename = Path(Config.BASE_DIR) / test_data["assets"]["image"] + + # Create an organization + create_organization_data = test_data["create_organization"] + with self.captureOnCommitCallbacks(execute=True): + organization_content = self.query_check( + self.Mutation.CREATE_ORGANIZATION, + variables={"data": create_organization_data}, + ) + + organization_response = organization_content["data"]["createOrganization"] + assert organization_response is not None, "Organization create response is None" + assert organization_response["ok"] + + organization_id = organization_response["result"]["id"] + organization_fb_id = organization_response["result"]["firebaseId"] + + organization_fb_ref = self.firebase_helper.ref(f"/v2/organisations/{organization_fb_id}") + organization_fb_data = organization_fb_ref.get() + assert organization_fb_data is not None, "Organization in firebase is None" + + # Create project + create_project_data = test_data["create_project"] + create_project_data["requestingOrganization"] = organization_id + + with self.captureOnCommitCallbacks(execute=True): + project_content = self.query_check( + self.Mutation.CREATE_PROJECT, + variables={"data": create_project_data}, + ) + + project_response = project_content["data"]["createProject"] + assert project_response is not None, "Project create response is None" + assert project_response["ok"], project_response["errors"] + + project_id = project_response["result"]["id"] + project_fb_id = project_response["result"]["firebaseId"] + project_client_id = create_project_data["clientId"] + + # Create Image Asset for cover image + image_asset_data = { + "clientId": project_client_id, + "inputType": "COVER_IMAGE", + "project": project_id, + } + with image_filename.open("rb") as img_file: + image_content = self.query_check( + self.Mutation.UPLOAD_PROJECT_ASSET, + variables={"data": image_asset_data}, + files={"imageFile": img_file}, + map={"imageFile": ["variables.data.file"]}, + ) + image_response = image_content["data"]["createProjectAsset"] + assert image_response is not None, "Image create response is None" + assert image_response["ok"] + image_id = image_response["result"]["id"] + + # Update project + update_project_data = test_data["update_project"] + update_project_data["image"] = image_id + update_project_data["requestingOrganization"] = organization_id + with self.captureOnCommitCallbacks(execute=True): + update_content = self.query_check( + self.Mutation.UPDATE_PROJECT, + variables={"pk": project_id, "data": update_project_data}, + ) + update_response = update_content["data"]["updateProject"] + assert update_response["ok"], update_response["errors"] + assert update_response is not None, "Project update response is None" + + # Process project + process_project_data = { + "clientId": project_client_id, + "status": "READY_TO_PROCESS", + } + with self.captureOnCommitCallbacks(execute=True): + process_project_content = self.query_check( + self.Mutation.UPDATE_PROJECT_STATUS, + variables={"pk": project_id, "data": process_project_data}, + ) + process_project_response = process_project_content["data"]["updateProjectStatus"] + assert process_project_response is not None, "Project ready to process response is None" + assert process_project_response["ok"], process_project_response["errors"] + assert process_project_response["result"]["status"] == "READY_TO_PROCESS", "Project should be ready to process" + + # Create tutorial from above project + create_tutorial_data = test_data["create_tutorial"] + create_tutorial_data["project"] = project_id + with self.captureOnCommitCallbacks(execute=True): + tutorial_content = self.query_check( + self.Mutation.CREATE_TUTORIAL, + variables={"data": create_tutorial_data}, + ) + + tutorial_response = tutorial_content["data"]["createTutorial"] + assert tutorial_response is not None, "Tutorial create response is None" + assert tutorial_response["ok"] + + tutorial_id = tutorial_response["result"]["id"] + tutorial_fb_id = tutorial_response["result"]["firebaseId"] + tutorial_client_id = create_tutorial_data["clientId"] + + # Update Tutorial + with self.captureOnCommitCallbacks(execute=True): + update_tutorial_content = self.query_check( + query=self.Mutation.UPDATE_TUTORIAL, + variables={ + "data": test_data["update_tutorial"], + "pk": tutorial_id, + }, + ) + update_tutorial_response = update_tutorial_content["data"]["updateTutorial"] + assert update_tutorial_response is not None, "Tutorial update response is None" + assert update_tutorial_response["ok"], update_tutorial_response["errors"] + assert update_tutorial_response is not None, "Tutorial update response is None" + + # Publish tutorial + publish_tutorial_data = { + "clientId": tutorial_client_id, + "status": "READY_TO_PUBLISH", + } + with self.captureOnCommitCallbacks(execute=True): + publish_tutorial_content = self.query_check( + self.Mutation.UPDATE_TUTORIAL_STATUS, + variables={"pk": tutorial_id, "data": publish_tutorial_data}, + ) + publish_tutorial_response = publish_tutorial_content["data"]["updateTutorialStatus"] + assert publish_tutorial_response["ok"], publish_tutorial_response["errors"] + assert publish_tutorial_response is not None, "Processed tutorial publish response is None" + assert publish_tutorial_response["result"]["status"] == "READY_TO_PUBLISH", "tutorial should be ready to published" + + tutorial_fb_ref = self.firebase_helper.ref(f"/v2/projects/{tutorial_fb_id}") + tutorial_fb_data = tutorial_fb_ref.get() + + # Check tutorial in firebase + assert tutorial_fb_data is not None, "Tutorial in firebase is None" + assert isinstance(tutorial_fb_data, dict), "Tutorial in firebase should be a dictionary" + + filtered_tutorial_actual = tutorial_fb_data + filtered_tutorial_expected = test_data["expected_tutorial_data"] + assert filtered_tutorial_actual == filtered_tutorial_expected, "Difference found for tutorial data in firebase." + + # Check tutorial groups in firebase + tutorial_groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{tutorial_fb_id}/") + tutorial_groups_fb_data = tutorial_groups_fb_ref.get() + + filtered_group_actual = tutorial_groups_fb_data + filtered_group_expected = test_data["expected_tutorial_groups_data"] + assert filtered_group_actual == filtered_group_expected, "Difference found for tutorial group data in firebase." + + # Check tutorial tasks in firebase + tutorial_tasks_ref = self.firebase_helper.ref(f"/v2/tasks/{tutorial_fb_id}/") + tutorial_task_fb_data: dict[str, typing.Any] = tutorial_tasks_ref.get() # type: ignore[reportArgumentType] + + # NOTE: We want to decode the tasks before comparison + for key, value in tutorial_task_fb_data.items(): + tutorial_task_fb_data[key] = decode_tasks(value) + + sanitized_tasks_actual = tutorial_task_fb_data + sanitized_tasks_expected = test_data["expected_tutorial_tasks_data"] + + assert sanitized_tasks_actual == sanitized_tasks_expected, ( + "Differences found between expected and actual tasks on tutorial in firebase." + ) + + # Update processed project: attach tutorial, organization + update_processed_project_data = test_data["update_processed_project"] + update_processed_project_data["tutorial"] = tutorial_id + update_processed_project_data["requestingOrganization"] = organization_id + with self.captureOnCommitCallbacks(execute=True): + update_processed_project_content = self.query_check( + self.Mutation.UPDATE_PROCESSED_PROJECT, + variables={"pk": project_id, "data": update_processed_project_data}, + ) + update_processed_response = update_processed_project_content["data"]["updateProcessedProject"] + assert update_processed_response["ok"], update_processed_response["errors"] + assert update_processed_response is not None, "Processed project update response is None" + + # Publish project + publish_project_data = { + "clientId": project_client_id, + "status": "READY_TO_PUBLISH", + } + with self.captureOnCommitCallbacks(execute=True): + publish_project_content = self.query_check( + self.Mutation.UPDATE_PROJECT_STATUS, + variables={"pk": project_id, "data": publish_project_data}, + ) + publish_project_response = publish_project_content["data"]["updateProjectStatus"] + assert publish_project_response["ok"], publish_project_response["errors"] + assert publish_project_response is not None, "Processed project publish response is None" + assert publish_project_response["result"]["status"] == "READY_TO_PUBLISH", "Project should be ready to publish" + + project_fb_ref = self.firebase_helper.ref(f"/v2/projects/{project_fb_id}") + project_fb_data = project_fb_ref.get() + + # Check project in firebase + assert project_fb_data is not None, "Project in firebase is None" + assert isinstance(project_fb_data, dict), "Project in firebase should be a dictionary" + assert project_fb_data["created"] is not None, "Field 'created' should be defined" + assert datetime.fromisoformat(project_fb_data["created"]), "Field 'created' should be a timestamp" + + ignored_project_keys = {"created"} + filtered_project_actual = remove_object_keys(project_fb_data, ignored_project_keys) + filtered_project_expected = remove_object_keys(test_data["expected_project_data"], ignored_project_keys) + assert filtered_project_actual == filtered_project_expected, "Difference found for project data in firebase." + + # Check project groups in firebase + groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{project_fb_id}/") + groups_fb_data = groups_fb_ref.get() + + filtered_group_actual = groups_fb_data + filtered_group_expected = test_data["expected_project_groups_data"] + assert filtered_group_actual == filtered_group_expected, "Difference found for group data on project in firebase." + + # Check project tasks in firebase + project_tasks_ref = self.firebase_helper.ref(f"/v2/tasks/{project_fb_id}/") + project_tasks_fb_data: dict[str, typing.Any] = project_tasks_ref.get() # type: ignore[reportArgumentType] + + # NOTE: We want to decode the tasks before comparison + for key, value in project_tasks_fb_data.items(): + project_tasks_fb_data[key] = decode_tasks(value) + + sanitized_tasks_actual = project_tasks_fb_data + sanitized_tasks_expected = test_data["expected_project_tasks_data"] + + assert sanitized_tasks_actual == sanitized_tasks_expected, ( + "Differences found between expected and actual tasks on project in firebase." + ) + + # Create contributor user group + old_contributor_user_group_data = test_data["create_contributor_user_group"] + for input_data in old_contributor_user_group_data: + usergroup_content = self.query_check( + self.Mutation.CREATE_CONTRIBUTOR_USER_GROUP, + variables={ + "data": input_data, + }, + ) + usergroup_response = usergroup_content["data"]["createContributorUserGroup"] + assert usergroup_response is not None, "usergroup create response is None" + assert usergroup_response["ok"] + + # Pull results from firebase + input_data = test_data["create_results"] + ref_results = self.firebase_helper.ref(f"/v2/results/{project_fb_id}") + ref_results.set(input_data) + + fb_results_data = ref_results.get() + assert fb_results_data is not None + + assert [ + MappingSession.objects.count(), + MappingSessionResult.objects.count(), + MappingSessionUserGroup.objects.count(), + MappingSessionUserGroupTemp.objects.count(), + MappingSessionResultTemp.objects.count(), + ] == [0, 0, 0, 0, 0], "Mapping session data should be empty before pull from firebase" + + project = Project.objects.get(id=project_id) + assert project.progress == 0 + + with self.captureOnCommitCallbacks(execute=True): + pull_results_from_firebase() + + assert [ + MappingSession.objects.count(), + MappingSessionResult.objects.count(), + MappingSessionUserGroup.objects.count(), + MappingSessionUserGroupTemp.objects.count(), + MappingSessionResultTemp.objects.count(), + ] == [ + test_data["expected_pulled_results_data"]["mapping_session_count"], + test_data["expected_pulled_results_data"]["mapping_session_results_count"], + test_data["expected_pulled_results_data"]["mapping_session_user_groups_count"], + 0, + 0, + ], "Difference found for pulled results data." + + project.refresh_from_db() + assert project.progress == test_data["expected_pulled_results_data"]["progress"] + + # Check if progress and contributorCount synced to firebase + project_fb_data = project_fb_ref.get() + assert project_fb_data is not None, "Project in firebase is None" + assert isinstance(project_fb_data, dict), "Project in firebase should be a dictionary" + assert project_fb_data["progress"] == project.progress, "Progress should be synced with firebase" + assert project_fb_data["contributorCount"] == 1, "Contributor count should be synced with firebase" + + if not test_data.get("expected_project_exports_data"): + return + + # Check groups export + groups_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.GROUPS, + ).first() + assert groups_project_asset is not None, "Groups project asset not found" + + expected_groups = read_csv( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["groups"]), + ignore_columns={ + "total_area", # NOTE: previously empty, now real value + "time_spent_max_allowed", # NOTE: previously empty, now real value + }, + ) + actual_groups = read_csv( + groups_project_asset.file, + compressed=True, + ignore_columns={ + "total_area", # NOTE: previously empty, now real value + "time_spent_max_allowed", # NOTE: previously empty, now real value + "project_internal_id", # NOTE: added for referencing + "group_internal_id", # NOTE: added for referencing + }, + ) + assert expected_groups == actual_groups, "Difference found for groups export file." + + # Check tasks export + tasks_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.TASKS, + ).first() + assert tasks_project_asset is not None, "Tasks project asset not found" + + expected_tasks = read_csv( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["tasks"]), + sort_column=operator.itemgetter("task_id"), + ignore_columns={ + "", # NOTE: dataframe index + }, + ) + actual_tasks = read_csv( + tasks_project_asset.file, + compressed=True, + sort_column=operator.itemgetter("task_id"), + ignore_columns={ + "", # NOTE: dataframe index + "project_internal_id", # NOTE: added for referencing + "group_internal_id", # NOTE: added for referencing + "task_internal_id", # NOTE: added for referencing + }, + ) + assert expected_tasks == actual_tasks, "Difference found for tasks export file." + + # Check results export + results_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.RESULTS, + ).first() + assert results_project_asset is not None, "Results project asset not found" + + expected_results = read_csv( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["results"]), + sort_column=operator.itemgetter("task_id"), + ignore_columns={ + "", # NOTE: dataframe index + }, + ) + actual_results = read_csv( + results_project_asset.file, + sort_column=operator.itemgetter("task_id"), + ignore_columns={ + "", # NOTE: dataframe index + "task_internal_id", # NOTE: added for referencing + "user_internal_id", # NOTE: added for referencing + "group_internal_id", # NOTE: added for referencing + "project_internal_id", # NOTE: added for referencing + }, + compressed=True, + ) + assert expected_results == actual_results, "Difference found for results export file." + + # Check aggregated results export + aggregated_results_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.AGGREGATED_RESULTS, + ).first() + assert aggregated_results_project_asset is not None, "Aggregated results project asset not found" + + expected_aggregated_results = read_csv( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["aggregated_results"]), + ) + actual_aggregated_results = read_csv( + aggregated_results_project_asset.file, + compressed=True, + ignore_columns={ + "project_internal_id", # NOTE: added for referencing + "group_internal_id", # NOTE: added for referencing + "task_internal_id", # NOTE: added for referencing + }, + ) + + assert expected_aggregated_results == actual_aggregated_results, ( + "Difference found for aggregated results export file." + ) + + # Check aggregated results with geometry export + aggregated_results_with_geometry_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.AGGREGATED_RESULTS_WITH_GEOMETRY, + ).first() + assert aggregated_results_with_geometry_project_asset is not None, ( + "Aggregated results with geometry project asset not found" + ) + + expected_aggregated_results_with_geometry = read_json( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["aggregated_results_with_geometry"]), + ignore_fields={ + "name", # NOTE: Previously "tmp", now "tmp" + random_str + }, + ) + actual_aggregated_results_with_geometry = read_json( + aggregated_results_with_geometry_project_asset.file, + compressed=True, + ignore_fields={ + "name", # NOTE: Previously "tmp", now "tmp" + random_str + "project_internal_id", # NOTE: added for referencing + "group_internal_id", # NOTE: added for referencing + "task_internal_id", # NOTE: added for referencing + }, + ) + assert expected_aggregated_results_with_geometry == actual_aggregated_results_with_geometry, ( + "Difference found for aggregated results with geometry export file." + ) + + # Check history export + history_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.HISTORY, + ).first() + assert history_project_asset is not None, "History project asset not found" + + expected_history = read_csv( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["history"]), + ) + actual_history = read_csv( + history_project_asset.file, + ) + assert expected_history == actual_history, "Difference found for history export file." + + # Check users export + users_project_asset = ProjectAsset.objects.filter( + project=project, + type=AssetTypeEnum.EXPORT, + export_type=ProjectAssetExportTypeEnum.USERS, + ).first() + assert users_project_asset is not None, "Users project asset not found" + + expected_users = read_csv( + Path(Config.BASE_DIR, test_data["expected_project_exports_data"]["users"]), + ) + actual_users = read_csv( + users_project_asset.file, + compressed=True, + ) + assert expected_users == actual_users, "Difference found for users export file." diff --git a/apps/project/tests/e2e_create_project_tile_map_service_test.py b/apps/project/tests/e2e_create_project_tile_map_service_test.py index 6b7de570..274d0182 100644 --- a/apps/project/tests/e2e_create_project_tile_map_service_test.py +++ b/apps/project/tests/e2e_create_project_tile_map_service_test.py @@ -771,6 +771,7 @@ def _test_project(self, projectKey: str, filename: str): "user_internal_id", # NOTE: added for referencing "group_internal_id", # NOTE: added for referencing "project_internal_id", # NOTE: added for referencing + "reference", }, compressed=True, ) diff --git a/apps/project/tests/e2e_create_street_project_test.py b/apps/project/tests/e2e_create_street_project_test.py index 65d93957..17997689 100644 --- a/apps/project/tests/e2e_create_street_project_test.py +++ b/apps/project/tests/e2e_create_street_project_test.py @@ -714,6 +714,7 @@ def _test_project(self, filename: str): "user_internal_id", # NOTE: added for referencing "group_internal_id", # NOTE: added for referencing "project_internal_id", # NOTE: added for referencing + "reference", }, compressed=True, ) diff --git a/apps/project/tests/e2e_create_validate_image_project_test.py b/apps/project/tests/e2e_create_validate_image_project_test.py index bb4aa269..541d1958 100644 --- a/apps/project/tests/e2e_create_validate_image_project_test.py +++ b/apps/project/tests/e2e_create_validate_image_project_test.py @@ -709,6 +709,7 @@ def _test_project(self, filename: str): "user_internal_id", # NOTE: added for referencing "group_internal_id", # NOTE: added for referencing "project_internal_id", # NOTE: added for referencing + "reference", }, compressed=True, ) diff --git a/apps/project/tests/e2e_create_validate_project_test.py b/apps/project/tests/e2e_create_validate_project_test.py index d3d0fb02..7283ea2e 100644 --- a/apps/project/tests/e2e_create_validate_project_test.py +++ b/apps/project/tests/e2e_create_validate_project_test.py @@ -750,6 +750,7 @@ def _test_project(self, filename: str): "user_internal_id", # NOTE: added for referencing "group_internal_id", # NOTE: added for referencing "project_internal_id", # NOTE: added for referencing + "reference", }, compressed=True, ) diff --git a/apps/tutorial/graphql/inputs/inputs.py b/apps/tutorial/graphql/inputs/inputs.py index 2d0c9b39..a701cad4 100644 --- a/apps/tutorial/graphql/inputs/inputs.py +++ b/apps/tutorial/graphql/inputs/inputs.py @@ -22,6 +22,7 @@ from .project_types.compare import CompareTutorialTaskPropertyInput from .project_types.completeness import CompletenessTutorialTaskPropertyInput +from .project_types.conflation import ConflationTutorialTaskPropertyInput from .project_types.find import FindTutorialTaskPropertyInput from .project_types.street import StreetTutorialTaskPropertyInput from .project_types.validate import ValidateTutorialTaskPropertyInput @@ -36,6 +37,7 @@ class TutorialTaskProjectTypeSpecificInput: validate_image: ValidateImageTutorialTaskPropertyInput | None = strawberry.UNSET completeness: CompletenessTutorialTaskPropertyInput | None = strawberry.UNSET street: StreetTutorialTaskPropertyInput | None = strawberry.UNSET + conflation: ConflationTutorialTaskPropertyInput | None = strawberry.UNSET @strawberry_django.input(TutorialTask) diff --git a/apps/tutorial/graphql/inputs/project_types/conflation.py b/apps/tutorial/graphql/inputs/project_types/conflation.py new file mode 100644 index 00000000..cf0bad76 --- /dev/null +++ b/apps/tutorial/graphql/inputs/project_types/conflation.py @@ -0,0 +1,7 @@ +import strawberry + +from project_types.conflation import tutorial as conflation_tutorial + + +@strawberry.experimental.pydantic.input(model=conflation_tutorial.ConflationTutorialTaskProperty, all_fields=True) +class ConflationTutorialTaskPropertyInput: ... diff --git a/apps/tutorial/graphql/types/project_types/conflation.py b/apps/tutorial/graphql/types/project_types/conflation.py new file mode 100644 index 00000000..0917ed84 --- /dev/null +++ b/apps/tutorial/graphql/types/project_types/conflation.py @@ -0,0 +1,7 @@ +import strawberry + +from project_types.conflation import tutorial as conflation_tutorial + + +@strawberry.experimental.pydantic.type(model=conflation_tutorial.ConflationTutorialTaskProperty, all_fields=True) +class ConflationTutorialTaskPropertyType: ... diff --git a/apps/tutorial/graphql/types/types.py b/apps/tutorial/graphql/types/types.py index a0ad4615..8f79dca8 100644 --- a/apps/tutorial/graphql/types/types.py +++ b/apps/tutorial/graphql/types/types.py @@ -17,6 +17,7 @@ import apps.project.graphql.types.asset_types # noqa: F401 # isort: skip # type: ignore[reportUnusedImport] +from project_types.conflation import tutorial as conflation_tutorial from project_types.street import tutorial as street_tutorial from project_types.tile_map_service.compare import tutorial as compare_tutorial from project_types.tile_map_service.completeness import tutorial as completeness_tutorial @@ -26,6 +27,7 @@ from .project_types.compare import CompareTutorialTaskPropertyType from .project_types.completeness import CompletenessTutorialTaskPropertyType +from .project_types.conflation import ConflationTutorialTaskPropertyType from .project_types.find import FindTutorialTaskPropertyType from .project_types.street import StreetTutorialTaskPropertyType from .project_types.validate import ValidateTutorialTaskPropertyType @@ -65,6 +67,7 @@ async def project_type_specifics( | ValidateImageTutorialTaskPropertyType | CompletenessTutorialTaskPropertyType | StreetTutorialTaskPropertyType + | ConflationTutorialTaskPropertyType | None ): data = task.project_type_specifics @@ -99,6 +102,11 @@ async def project_type_specifics( "StreetTutorialTaskPropertyType", street_tutorial.StreetTutorialTaskProperty.model_validate(data), ) + if project_type_enum == Project.Type.CONFLATION: + return typing.cast( + "ConflationTutorialTaskPropertyType", + conflation_tutorial.ConflationTutorialTaskProperty.model_validate(data), + ) typing.assert_never(project_type_enum) diff --git a/assets b/assets index 432bbd5a..b3e56bf3 160000 --- a/assets +++ b/assets @@ -1 +1 @@ -Subproject commit 432bbd5a38ac5bc13bb3f8e2f30e36ccfc3cc41c +Subproject commit b3e56bf31d7c7f474c411c82d1c194f4f828bf9c diff --git a/firebase b/firebase index 8b0d6833..b86181ad 160000 --- a/firebase +++ b/firebase @@ -1 +1 @@ -Subproject commit 8b0d6833579d182dab7d7ea1689e6b5c83d124b7 +Subproject commit b86181adb3bc8af3c65b3325ec7033c00aac3628 diff --git a/project_types/conflation/__init__.py b/project_types/conflation/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/project_types/conflation/project.py b/project_types/conflation/project.py new file mode 100644 index 00000000..75f21e8e --- /dev/null +++ b/project_types/conflation/project.py @@ -0,0 +1,310 @@ +import json +import logging +import typing +from typing import Any + +import requests +from django.contrib.gis.geos import GEOSGeometry +from django.core.files.base import ContentFile +from pydantic import BaseModel, model_validator +from pyfirebase_mapswipe import models as firebase_models +from ulid import ULID + +from apps.common.models import ( + AssetMimetypeEnum, + AssetTypeEnum, +) +from apps.project.models import ( + Geometry, + Project, + ProjectAsset, + ProjectTask, + ProjectTaskGroup, +) +from main.bulk_managers import BulkCreateManager +from project_types.base import project as base_project +from project_types.tile_map_service.base.project import create_json_dump +from utils import fields as custom_fields +from utils.common import Grouping, clean_up_none_keys, to_groups +from utils.custom_options.models import CustomOption +from utils.geo.raster_tile_server.models import RasterTileServerConfig +from utils.geo.transform import ( + AoiFeature, + convert_feature_to_wkt, + convert_json_dict_to_geometry_collection, + get_area_of_geometry, + get_polygon_of_extent, +) + +logger = logging.getLogger(__name__) + + +class ConflationObjectSourceConfig(BaseModel): + object_geojson_url: custom_fields.PydanticUrl | None = None + + @model_validator(mode="after") + def check_validate_data(self) -> typing.Self: + if self.object_geojson_url is None: + raise ValueError("Object GeoJSON URL is required") + return self + + +class ConflationProjectProperty(base_project.BaseProjectProperty): + tile_server_property: RasterTileServerConfig + object_source: ConflationObjectSourceConfig + custom_options: list[CustomOption] | None = None + + +class ConflationProjectTaskGroupProperty(base_project.BaseProjectTaskGroupProperty): ... + + +class ConflationProjectTaskProperty(base_project.BaseProjectTaskProperty): + # TODO(tnagorra): We might need to rename this to ohsome_feature_id + task_id: str + # TODO(tnagorra): We need to define the type for properties + properties: dict[str, Any] + # NOTE: We need to send geometry to firebase + # geometry: str + + +class ConflationProject( + base_project.BaseProject[ + ConflationProjectProperty, + ConflationProjectTaskGroupProperty, + ConflationProjectTaskProperty, + list[AoiFeature], + Grouping[AoiFeature], + ], +): + project_property_class = ConflationProjectProperty + project_task_group_property_class = ConflationProjectTaskGroupProperty + project_task_property_class = ConflationProjectTaskProperty + + def __init__(self, project: Project): + super().__init__(project) + + # TODO: Is this used? + @staticmethod + def validate_object_count(count: int | None) -> int: + if count is None or count <= 0: + raise base_project.ValidationException( + "AOI does not contain objects from selected filter.", + ) + + allowed_count = 100000 + + if count > allowed_count: + raise base_project.ValidationException( + f"AOI contains more than 100,000 objects. -> {count}", + ) + + return count + + def _validate_object_geojson_url(self): + url = self.project_type_specifics.object_source.object_geojson_url + if url is None: + raise base_project.ValidationException("Object Geojson URL is missing") + + logger.info("Fetching object geojson from %s", url) + + # FIXME(frozenhelium): use predefined timeout duration + # FIXME(tnagorra): handle timeout error + response = requests.get(url, timeout=500) + if response.status_code != 200: + raise base_project.ValidationException( + f"Failed to fetch object geojson from {url}", + ) + + logger.info("Successfully fetched object geojson from %s", url) + try: + geojson = response.json() + except Exception as e: + raise base_project.ValidationException("GeoJSON URL did not respond with valid JSON") from e + + try: + features, geometry_collection = convert_json_dict_to_geometry_collection(geojson) + except Exception as e: + raise base_project.ValidationException( + "GeoJSON URL did not respond with a valid feature collection of polygon or multi-polygon", + ) from e + + # TODO(tnagorra): Also store intermediate geometries? + # TODO(tnagorra): Also create a input geometry? + hull = geometry_collection.convex_hull + hull_extent = hull.extent + hull_bbox = get_polygon_of_extent(hull_extent) + hull_center = hull.centroid + area_km2 = get_area_of_geometry(geometry_collection) + + proj_aoi_geometry = self.project.aoi_geometry + if not proj_aoi_geometry: + aoi_geometry = Geometry( + bbox=hull_bbox, + centroid=hull_center, + geometry=hull, + total_area=area_km2, + ) + aoi_geometry.save() + self.project.aoi_geometry = aoi_geometry + else: + proj_aoi_geometry.bbox = hull_bbox + proj_aoi_geometry.centroid = hull_center + proj_aoi_geometry.geometry = hull + proj_aoi_geometry.total_area = area_km2 + proj_aoi_geometry.save() + self.project.total_area = area_km2 + self.project.bbox = hull_bbox + self.project.centroid = hull_center + self.project.save(update_fields=["aoi_geometry", "total_area", "bbox", "centroid"]) + + # FIXME(frozenhelium): add validation for object count? + + return features + + @typing.override + def validate(self) -> list[AoiFeature]: + """Validate project before creating groups.""" + self.project.update_processing_status(Project.ProcessingStatus.VALIDATING_GEOMETRY, True) + return self._validate_object_geojson_url() + + @typing.override + def create_tasks(self, group: ProjectTaskGroup, raw_group: Grouping[AoiFeature]) -> int: + """Create tasks for a group.""" + bulk_mgr = BulkCreateManager(chunk_size=1000) + + tasks_count = 0 + features = raw_group["features"] + f_ids = raw_group["feature_ids"] + + for i, f_id in enumerate(f_ids): + feature = features[i] + geometry_str = convert_feature_to_wkt(feature) + + if geometry_str is not None: + bulk_mgr.add( + ProjectTask( + firebase_id=f"t{f_id}", + task_group_id=group.pk, + geometry=geometry_str, + project_type_specifics=clean_up_none_keys( + self.project_task_property_class( + task_id=f"t{f_id}", + properties=feature.properties or {}, + ).model_dump(), + ), + ), + ) + tasks_count += 1 + + bulk_mgr.done() + return tasks_count + + @typing.override + def create_groups(self, resp: list[AoiFeature]): + self.project.update_processing_status(Project.ProcessingStatus.GENERATING_GROUPS_AND_TASKS, True) + raw_groups = to_groups(resp, self.project.group_size) + + for group_key, raw_group in raw_groups.items(): + new_group = ProjectTaskGroup.objects.create( + firebase_id=group_key, + project_id=self.project.pk, + number_of_tasks=0, + progress=0, + finished_count=0, + required_count=0, + project_type_specifics=clean_up_none_keys(self.project_task_group_property_class().model_dump()), + ) + + # Create new tasks for this group + total_tasks = self.create_tasks( + group=new_group, + raw_group=raw_group, + ) + + # FIXME(tnagorra): This is not correct + logger.info("Created %s tasks for group: %s", total_tasks, new_group.pk) + + @typing.override + def post_create_groups(self): + # NOTE: Create a geojson from the tasks (useful for tutorial creation) + self.project.update_processing_status(Project.ProcessingStatus.GENERATING_TASKS_GEOJSON, True) + + tasks_qs = ProjectTask.objects.filter(task_group__project_id=self.project.pk) + + def get_feature(task: ProjectTask): + geom = GEOSGeometry(task.geometry, srid=4326) + geojson = json.loads(geom.geojson) + + return { + "type": "Feature", + "geometry": geojson, + "properties": { + # FIXME(tnagorra): revisit this, should we use firebase_id + "group_id": task.task_group_id, + "task_id": task.pk, + }, + } + + feature_collection = { + "type": "FeatureCollection", + "metadata": { + "project_id": self.project.pk, + }, + "features": [get_feature(task) for task in tasks_qs], + } + file = ContentFile( + create_json_dump(feature_collection), + "processed_geometry.geojson", + ) + + asset = ProjectAsset.objects.create( + client_id=str(ULID()), + project=self.project, + file=file, + file_size=file.size, + type=AssetTypeEnum.OUTPUT, + mimetype=AssetMimetypeEnum.GEOJSON, + # FIXME(tnagorra): Maybe create a internal user like mapswipe-bot + created_by=self.project.modified_by, + modified_by=self.project.modified_by, + ) + self.project.project_type_specific_output_asset = asset + self.project.save(update_fields=("project_type_specific_output_asset",)) + + @typing.override + def get_max_time_spend_percentile(self) -> float: + return 6.1 + + # FIREBASE + + @typing.override + def compress_tasks_on_firebase(self) -> bool: + return True + + @typing.override + def get_task_specifics_for_firebase(self, task: ProjectTask): + assert task.geometry is not None, "Task geometry must not be None" + return firebase_models.FbMappingTaskConflationCreateOnlyInput( + taskId=task.firebase_id, + geojson=json.loads(task.geometry.geojson), + ) + + @typing.override + def get_group_specifics_for_firebase(self, group: ProjectTaskGroup): + return firebase_models.FbMappingGroupConflationCreateOnlyInput( + groupId=group.firebase_id, + ) + + @typing.override + def get_project_specifics_for_firebase(self): + tsp = self.project_type_specifics.tile_server_property + return firebase_models.FbProjectConflationCreateOnlyInput( + # Conflation does not allow custom options + tileServer=firebase_models.FbObjRasterTileServer( + name=tsp.name.to_firebase(), + credits=tsp.get_config()["credits"], + url=tsp.get_config()["raw_url"], + apiKey=tsp.get_config()["api_key"], + wmtsLayerName=None, + ), + ) diff --git a/project_types/conflation/tutorial.py b/project_types/conflation/tutorial.py new file mode 100644 index 00000000..77570d73 --- /dev/null +++ b/project_types/conflation/tutorial.py @@ -0,0 +1,84 @@ +import json +import logging +import typing + +from pyfirebase_mapswipe import extended_models as firebase_ext_models +from pyfirebase_mapswipe import models as firebase_models + +from apps.project.models import ProjectTypeEnum +from apps.tutorial.models import Tutorial, TutorialTask +from project_types.base import tutorial as base_tutorial +from project_types.base.tutorial import BaseTutorialTaskProperty +from utils.geo.transform import convert_json_str_to_wkt + +from .project import ConflationProjectProperty + +logger = logging.getLogger(__name__) + + +class ConflationTutorialTaskProperty(BaseTutorialTaskProperty): + # FIXME(tnagorra): add positive integer + identifier: int + # FIXME(tnagorra): Use geometry from TutorialTask + object_geometry: str + + +class ConflationTutorial( + base_tutorial.BaseTutorial[ + ConflationProjectProperty, + ConflationTutorialTaskProperty, + ], +): + project_property_class = ConflationProjectProperty + tutorial_task_property_class = ConflationTutorialTaskProperty + + def __init__(self, tutorial: Tutorial): + super().__init__(tutorial) + + @typing.override + def compress_tasks_on_firebase(self) -> bool: + return True + + @typing.override + def get_task_specifics_for_firebase(self, task: TutorialTask, index: int, screen: int): + task_specifics = self.tutorial_task_property_class.model_validate(task.project_type_specifics) + + geojson = json.loads(task_specifics.object_geometry) + geometry_wkt = convert_json_str_to_wkt(task_specifics.object_geometry) + + return firebase_models.FbConflationTutorialTask( + taskId=f"t{index}", + geojson=geojson, + properties=firebase_models.FbConflationTutorialTaskProperties( + id=task_specifics.identifier, + reference=task.reference, + screen=screen, + ), + geometry=geometry_wkt, + ) + + @typing.override + def get_group_specifics_for_firebase(self): + return firebase_ext_models.FbEmptyModel() + + @typing.override + def get_tutorial_specifics_for_firebase(self): + tsp = self.project_type_specifics.tile_server_property + + projectType = ProjectTypeEnum.CONFLATION.value + assert projectType == 8, "Project Conflation should be 8" + + return firebase_models.FbConflationTutorial( + # FIXME(tnagorra): This is the path to local storage. + inputGeometries="", + # FIXME(tnagorra): Check if this is always 18, app is calculating zoomLevel using geometry + zoomLevel=18, + projectType=projectType, + tileServer=firebase_models.FbObjRasterTileServer( + name=tsp.name.to_firebase(), + credits=tsp.get_config()["credits"], + url=tsp.get_config()["raw_url"], + apiKey=tsp.get_config()["api_key"], + wmtsLayerName=None, + ), + ) diff --git a/project_types/store.py b/project_types/store.py index f380cbe8..79500d43 100644 --- a/project_types/store.py +++ b/project_types/store.py @@ -4,6 +4,8 @@ from project_types.street.project import StreetProject, StreetProjectProperty from project_types.street.tutorial import StreetTutorial, StreetTutorialTaskProperty +from .conflation.project import ConflationProject, ConflationProjectProperty +from .conflation.tutorial import ConflationTutorial, ConflationTutorialTaskProperty from .tile_map_service.compare.project import CompareProject, CompareProjectProperty from .tile_map_service.compare.tutorial import CompareTutorial, CompareTutorialTaskProperty from .tile_map_service.completeness.project import CompletenessProject, CompletenessProjectProperty @@ -32,6 +34,8 @@ def get_tutorial_task_property(project_type: ProjectTypeEnum | None): return ("completeness", CompletenessTutorialTaskProperty) if project_type == ProjectTypeEnum.STREET: return ("street", StreetTutorialTaskProperty) + if project_type == ProjectTypeEnum.CONFLATION: + return ("conflation", ConflationTutorialTaskProperty) typing.assert_never(project_type) @@ -51,11 +55,19 @@ def get_project_property(project_type: ProjectTypeEnum | None): return ("completeness", CompletenessProjectProperty) if project_type == ProjectTypeEnum.STREET: return ("street", StreetProjectProperty) + if project_type == ProjectTypeEnum.CONFLATION: + return ("conflation", ConflationProjectProperty) typing.assert_never(project_type) type ProjectTypeHandlers = type[ - CompareProject | ValidateProject | ValidateImageProject | FindProject | CompletenessProject | StreetProject + CompareProject + | ValidateProject + | ValidateImageProject + | FindProject + | CompletenessProject + | StreetProject + | ConflationProject ] @@ -95,6 +107,12 @@ def get_project_type_handler( ) -> type[StreetProject]: ... +@typing.overload +def get_project_type_handler( + project_type: typing.Literal[ProjectTypeEnum.CONFLATION], +) -> type[ConflationProject]: ... + + def get_project_type_handler(project_type: ProjectTypeEnum) -> ProjectTypeHandlers: match project_type: case ProjectTypeEnum.FIND: @@ -109,10 +127,18 @@ def get_project_type_handler(project_type: ProjectTypeEnum) -> ProjectTypeHandle return ValidateImageProject case ProjectTypeEnum.STREET: return StreetProject + case ProjectTypeEnum.CONFLATION: + return ConflationProject type TutorialTypeHandlers = type[ - CompareTutorial | ValidateTutorial | FindTutorial | CompletenessTutorial | ValidateImageTutorial | StreetTutorial + CompareTutorial + | ValidateTutorial + | FindTutorial + | CompletenessTutorial + | ValidateImageTutorial + | StreetTutorial + | ConflationTutorial ] @@ -153,6 +179,12 @@ def get_tutorial_type_handler( ) -> type[typing.Any]: ... +@typing.overload +def get_tutorial_type_handler( + tutorial_type: typing.Literal[ProjectTypeEnum.CONFLATION], +) -> type[ConflationTutorial]: ... + + def get_tutorial_type_handler(tutorial_type: ProjectTypeEnum) -> TutorialTypeHandlers: match tutorial_type: case ProjectTypeEnum.FIND: @@ -167,3 +199,5 @@ def get_tutorial_type_handler(tutorial_type: ProjectTypeEnum) -> TutorialTypeHan return ValidateImageTutorial case ProjectTypeEnum.STREET: return StreetTutorial + case ProjectTypeEnum.CONFLATION: + return ConflationTutorial diff --git a/schema.graphql b/schema.graphql index c66dc63d..b7ff320c 100644 --- a/schema.graphql +++ b/schema.graphql @@ -318,7 +318,7 @@ type CompareProjectPropertyType { zoomLevel: Int! } -union CompareProjectPropertyTypeFindProjectPropertyTypeValidateProjectPropertyTypeValidateImageProjectPropertyTypeCompletenessProjectPropertyTypeStreetProjectPropertyType = CompareProjectPropertyType | CompletenessProjectPropertyType | FindProjectPropertyType | StreetProjectPropertyType | ValidateImageProjectPropertyType | ValidateProjectPropertyType +union CompareProjectPropertyTypeFindProjectPropertyTypeValidateProjectPropertyTypeValidateImageProjectPropertyTypeCompletenessProjectPropertyTypeStreetProjectPropertyTypeConflationProjectPropertyType = CompareProjectPropertyType | CompletenessProjectPropertyType | ConflationProjectPropertyType | FindProjectPropertyType | StreetProjectPropertyType | ValidateImageProjectPropertyType | ValidateProjectPropertyType input CompareTutorialTaskPropertyInput { tileX: Int! @@ -332,7 +332,7 @@ type CompareTutorialTaskPropertyType { tileZ: Int! } -union CompareTutorialTaskPropertyTypeFindTutorialTaskPropertyTypeValidateTutorialTaskPropertyTypeValidateImageTutorialTaskPropertyTypeCompletenessTutorialTaskPropertyTypeStreetTutorialTaskPropertyType = CompareTutorialTaskPropertyType | CompletenessTutorialTaskPropertyType | FindTutorialTaskPropertyType | StreetTutorialTaskPropertyType | ValidateImageTutorialTaskPropertyType | ValidateTutorialTaskPropertyType +union CompareTutorialTaskPropertyTypeFindTutorialTaskPropertyTypeValidateTutorialTaskPropertyTypeValidateImageTutorialTaskPropertyTypeCompletenessTutorialTaskPropertyTypeStreetTutorialTaskPropertyTypeConflationTutorialTaskPropertyType = CompareTutorialTaskPropertyType | CompletenessTutorialTaskPropertyType | ConflationTutorialTaskPropertyType | FindTutorialTaskPropertyType | StreetTutorialTaskPropertyType | ValidateImageTutorialTaskPropertyType | ValidateTutorialTaskPropertyType input CompletenessProjectPropertyInput { """Numeric value as string""" @@ -366,6 +366,36 @@ type CompletenessTutorialTaskPropertyType { tileZ: Int! } +type ConflationObjectSourceConfig { + objectGeojsonUrl: String +} + +input ConflationObjectSourceConfigInput { + objectGeojsonUrl: String = null +} + +input ConflationProjectPropertyInput { + customOptions: [CustomOptionInput!] = null + objectSource: ConflationObjectSourceConfigInput! + tileServerProperty: ProjectRasterTileServerConfigInput! +} + +type ConflationProjectPropertyType { + customOptions: [ProjectCustomOption!] + objectSource: ConflationObjectSourceConfig! + tileServerProperty: ProjectRasterTileServerConfig! +} + +input ConflationTutorialTaskPropertyInput { + identifier: Int! + objectGeometry: String! +} + +type ConflationTutorialTaskPropertyType { + identifier: Int! + objectGeometry: String! +} + type ContributorSwipeStatType { taskDate: Date! totalSwipes: Int! @@ -1826,7 +1856,7 @@ type ProjectType implements UserResourceTypeMixin & ProjectExportAssetTypeMixin projectNumber: Int! projectType: ProjectTypeEnum! projectTypeSpecificOutputAsset: ProjectAssetType - projectTypeSpecifics: CompareProjectPropertyTypeFindProjectPropertyTypeValidateProjectPropertyTypeValidateImageProjectPropertyTypeCompletenessProjectPropertyTypeStreetProjectPropertyType + projectTypeSpecifics: CompareProjectPropertyTypeFindProjectPropertyTypeValidateProjectPropertyTypeValidateImageProjectPropertyTypeCompletenessProjectPropertyTypeStreetProjectPropertyTypeConflationProjectPropertyType region: String! """Which group, institution or community is requesting this project?""" @@ -1863,6 +1893,7 @@ type ProjectTypeAreaStatsType { enum ProjectTypeEnum { COMPARE COMPLETENESS + CONFLATION FIND STREET VALIDATE @@ -1936,6 +1967,7 @@ type ProjectTypeOffsetPaginated { input ProjectTypeSpecificInput @oneOf { compare: CompareProjectPropertyInput completeness: CompletenessProjectPropertyInput + conflation: ConflationProjectPropertyInput find: FindProjectPropertyInput street: StreetProjectPropertyInput validate: ValidateProjectPropertyInput @@ -2545,6 +2577,7 @@ input TutorialTaskInput { input TutorialTaskProjectTypeSpecificInput @oneOf { compare: CompareTutorialTaskPropertyInput completeness: CompletenessTutorialTaskPropertyInput + conflation: ConflationTutorialTaskPropertyInput find: FindTutorialTaskPropertyInput street: StreetTutorialTaskPropertyInput validate: ValidateTutorialTaskPropertyInput @@ -2559,7 +2592,7 @@ type TutorialTaskType implements UserResourceTypeMixin { id: ID! modifiedAt: DateTime! modifiedBy: UserType! - projectTypeSpecifics: CompareTutorialTaskPropertyTypeFindTutorialTaskPropertyTypeValidateTutorialTaskPropertyTypeValidateImageTutorialTaskPropertyTypeCompletenessTutorialTaskPropertyTypeStreetTutorialTaskPropertyType + projectTypeSpecifics: CompareTutorialTaskPropertyTypeFindTutorialTaskPropertyTypeValidateTutorialTaskPropertyTypeValidateImageTutorialTaskPropertyTypeCompletenessTutorialTaskPropertyTypeStreetTutorialTaskPropertyTypeConflationTutorialTaskPropertyType reference: Int! scenarioId: ID! }