diff --git a/API/hdx.py b/API/hdx.py index 7169fe78..f117e0b0 100644 --- a/API/hdx.py +++ b/API/hdx.py @@ -9,13 +9,13 @@ from .api_worker import process_hdx_request from .auth import AuthUser, UserRole, staff_required -router = APIRouter(prefix="/hdx", tags=["HDX"]) +router = APIRouter(prefix="/custom", tags=["Custom Exports"]) -@router.post("/submit/") +@router.post("/snapshot/") @limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute") @version(1) -async def process_hdx_requests( +async def process_custom_requests( request: Request, user: AuthUser = Depends(staff_required), params: DynamicCategoriesModel = Body( @@ -124,6 +124,92 @@ async def process_hdx_requests( ], }, }, + "normal_polygon_TM": { + "summary": "Example: Tasking Manager Mapping type extraction for a Project", + "description": "Example Query to extract building,roads,waterways and landuse in sample TM Project , Pokhara, Nepal", + "value": { + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [83.96919250488281, 28.194446860487773], + [83.99751663208006, 28.194446860487773], + [83.99751663208006, 28.214869548073377], + [83.96919250488281, 28.214869548073377], + [83.96919250488281, 28.194446860487773], + ] + ], + }, + "queue": "raw_default", + "dataset": { + "dataset_prefix": "hotosm_project_1", + "dataset_folder": "TM", + "dataset_title": "Tasking Manger Project 1", + }, + "categories": [ + { + "Buildings": { + "types": ["polygons"], + "select": [ + "name", + "building", + "building:levels", + "building:materials", + "addr:full", + "addr:housenumber", + "addr:street", + "addr:city", + "office", + "source", + ], + "where": "tags['building'] IS NOT NULL", + "formats": ["geojson", "shp", "kml"], + }, + "Roads": { + "types": ["lines"], + "select": [ + "name", + "highway", + "surface", + "smoothness", + "width", + "lanes", + "oneway", + "bridge", + "layer", + "source", + ], + "where": "tags['highway'] IS NOT NULL", + "formats": ["geojson", "shp", "kml"], + }, + "Waterways": { + "types": ["lines", "polygons"], + "select": [ + "name", + "waterway", + "covered", + "width", + "depth", + "layer", + "blockage", + "tunnel", + "natural", + "water", + "source", + ], + "where": "tags['waterway'] IS NOT NULL OR tags['water'] IS NOT NULL OR tags['natural'] IN ('water','wetland','bay')", + "formats": ["geojson", "shp", "kml"], + }, + "Landuse": { + "types": ["points", "polygons"], + "select": ["name", "amenity", "landuse", "leisure"], + "where": "tags['landuse'] IS NOT NULL", + "formats": ["geojson", "shp", "kml"], + }, + } + ], + }, + }, "fullset": { "summary": "Full HDX Dataset default", "description": "Full yaml conversion for dataset with iso3 example", diff --git a/src/app.py b/src/app.py index de2e29b4..3d9c4c2f 100644 --- a/src/app.py +++ b/src/app.py @@ -1533,6 +1533,7 @@ def resource_to_hdx(self, uploaded_resources, dataset_config, category): else None, }, ) + logging.info("Initiating HDX Upload") uploader.init_dataset() non_hdx_resources = [] for resource in uploaded_resources: diff --git a/src/validation/models.py b/src/validation/models.py index 642b37b1..ac4bfe42 100644 --- a/src/validation/models.py +++ b/src/validation/models.py @@ -334,7 +334,7 @@ class HDXModel(BaseModel): """ tags: List[str] = Field( - ..., + default=["geodata"], description="List of tags for the HDX model.", example=["roads", "transportation", "geodata"], ) @@ -362,11 +362,12 @@ def validate_tags(cls, value): Returns: _type_: _description_ """ - for item in value: - if item.strip() not in ALLOWED_HDX_TAGS: - raise ValueError( - f"Invalid tag {item.strip()} , Should be within {ALLOWED_HDX_TAGS}" - ) + if value: + for item in value: + if item.strip() not in ALLOWED_HDX_TAGS: + raise ValueError( + f"Invalid tag {item.strip()} , Should be within {ALLOWED_HDX_TAGS}" + ) return value @@ -382,7 +383,9 @@ class CategoryModel(BaseModel): - formats (List[str]): List of Export Formats (suffixes). """ - hdx: HDXModel + hdx: Optional[HDXModel] = Field( + default=None, description="HDX Specific configurations" + ) types: List[str] = Field( ..., description="List of feature types (points, lines, polygons).", @@ -513,7 +516,7 @@ class DatasetConfig(BaseModel): description="Dataset prefix to be appended before category name, Will be ignored if iso3 is supplied", example="hotosm_npl", ) - dataset_locations: List[str] = Field( + dataset_locations: List[str] | None = Field( default=None, description="Valid dataset locations iso3", example="['npl']", @@ -562,6 +565,10 @@ class DynamicCategoriesModel(BaseModel, GeometryValidatorMixin): max_length=3, example="USA", ) + hdx_upload: bool = Field( + default=False, + description="Enable/Disable uploading dataset to hdx, False by default", + ) dataset: Optional[DatasetConfig] = Field( default=None, description="Dataset Configurations for HDX Upload" ) @@ -573,11 +580,6 @@ class DynamicCategoriesModel(BaseModel, GeometryValidatorMixin): default=False, description="Dumps Meta db in parquet format & hdx config json to s3", ) - hdx_upload: bool = Field( - default=False, - description="Enable/Disable uploading dataset to hdx, False by default", - ) - categories: List[Dict[str, CategoryModel]] = Field( ..., description="List of dynamic categories.", @@ -622,11 +624,17 @@ def set_geometry_or_iso3(cls, value, values): if value is None and values.get("iso3") is None: raise ValueError("Either geometry or iso3 should be supplied.") if value is not None: - dataset = values.get("dataset").dict() - if dataset is None: + dataset = values.get("dataset") + if values.get("hdx_upload"): + for category in values.get("categories"): + category_name, category_data = list(category.items())[0] + if category_data.hdx is None: + raise ValueError(f"HDX is missing for category {category}") + + if dataset is None and values.get("hdx_upload"): raise ValueError("Dataset config should be supplied for custom polygon") - - for item in dataset.keys(): - if dataset.get(item) is None: - raise ValueError(f"Missing, Dataset config : {item}") + if values.get("hdx_upload"): + for item in dataset: + if item is None: + raise ValueError(f"Missing, Dataset config : {item}") return value diff --git a/tests/test_API.py b/tests/test_API.py index 416712ac..a4e43863 100644 --- a/tests/test_API.py +++ b/tests/test_API.py @@ -713,7 +713,7 @@ def test_hdx_submit_normal_iso3(): ], } - response = client.post("/v1/hdx/submit/", json=payload, headers=headers) + response = client.post("/v1/custom/snapshot/", json=payload, headers=headers) assert response.status_code == 200 res = response.json() @@ -759,7 +759,7 @@ def test_hdx_submit_normal_iso3_multiple_format(): ], } - response = client.post("/v1/hdx/submit/", json=payload, headers=headers) + response = client.post("/v1/custom/snapshot/", json=payload, headers=headers) assert response.status_code == 200 res = response.json() @@ -822,7 +822,117 @@ def test_hdx_submit_normal_custom_polygon(): ], } - response = client.post("/v1/hdx/submit/", json=payload, headers=headers) + response = client.post("/v1/custom/snapshot/", json=payload, headers=headers) + + assert response.status_code == 200 + res = response.json() + track_link = res["track_link"] + max_attempts = 6 + interval_seconds = 10 + for attempt in range(1, max_attempts + 1): + time.sleep(interval_seconds) # wait for worker to complete task + + response = client.get(f"/v1{track_link}") + assert response.status_code == 200 + res = response.json() + check_status = res["status"] + + if check_status == "SUCCESS": + break # exit the loop if the status is SUCCESS + + if attempt == max_attempts: + # If max_attempts reached and status is not SUCCESS, raise an AssertionError + assert ( + False + ), f"Task did not complete successfully after {max_attempts} attempts" + + +def test_custom_submit_normal_custom_polygon_TM_project(): + headers = {"access-token": access_token} + payload = { + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [83.96919250488281, 28.194446860487773], + [83.99751663208006, 28.194446860487773], + [83.99751663208006, 28.214869548073377], + [83.96919250488281, 28.214869548073377], + [83.96919250488281, 28.194446860487773], + ] + ], + }, + "queue": "raw_default", + "dataset": { + "dataset_prefix": "hotosm_project_1", + "dataset_folder": "TM", + "dataset_title": "Tasking Manger Project 1", + }, + "categories": [ + { + "Buildings": { + "types": ["polygons"], + "select": [ + "name", + "building", + "building:levels", + "building:materials", + "addr:full", + "addr:housenumber", + "addr:street", + "addr:city", + "office", + "source", + ], + "where": "tags['building'] IS NOT NULL", + "formats": ["geojson", "shp", "kml"], + }, + "Roads": { + "types": ["lines"], + "select": [ + "name", + "highway", + "surface", + "smoothness", + "width", + "lanes", + "oneway", + "bridge", + "layer", + "source", + ], + "where": "tags['highway'] IS NOT NULL", + "formats": ["geojson", "shp", "kml"], + }, + "Waterways": { + "types": ["lines", "polygons"], + "select": [ + "name", + "waterway", + "covered", + "width", + "depth", + "layer", + "blockage", + "tunnel", + "natural", + "water", + "source", + ], + "where": "tags['waterway'] IS NOT NULL OR tags['water'] IS NOT NULL OR tags['natural'] IN ('water','wetland','bay')", + "formats": ["geojson", "shp", "kml"], + }, + "Landuse": { + "types": ["points", "polygons"], + "select": ["name", "amenity", "landuse", "leisure"], + "where": "tags['landuse'] IS NOT NULL", + "formats": ["geojson", "shp", "kml"], + }, + } + ], + } + + response = client.post("/v1/custom/snapshot/", json=payload, headers=headers) assert response.status_code == 200 res = response.json() @@ -886,7 +996,7 @@ def test_hdx_submit_normal_custom_polygon_upload(): ], } - response = client.post("/v1/hdx/submit/", json=payload, headers=headers) + response = client.post("/v1/custom/snapshot/", json=payload, headers=headers) assert response.status_code == 200 res = response.json() @@ -1188,7 +1298,7 @@ def test_full_hdx_set_iso(): ], } - response = client.post("/v1/hdx/submit/", json=payload, headers=headers) + response = client.post("/v1/custom/snapshot/", json=payload, headers=headers) assert response.status_code == 200 res = response.json()