Skip to content

Commit

Permalink
Merge pull request #192 from hotosm/enhance/custom_with_TM
Browse files Browse the repository at this point in the history
Enhance : Custom Exports - > TM Projects
  • Loading branch information
kshitijrajsharma authored Dec 28, 2023
2 parents 15d66ee + 6c101c1 commit 3244b0d
Show file tree
Hide file tree
Showing 4 changed files with 232 additions and 27 deletions.
92 changes: 89 additions & 3 deletions API/hdx.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@
from .api_worker import process_hdx_request
from .auth import AuthUser, UserRole, staff_required

router = APIRouter(prefix="/hdx", tags=["HDX"])
router = APIRouter(prefix="/custom", tags=["Custom Exports"])


@router.post("/submit/")
@router.post("/snapshot/")
@limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute")
@version(1)
async def process_hdx_requests(
async def process_custom_requests(
request: Request,
user: AuthUser = Depends(staff_required),
params: DynamicCategoriesModel = Body(
Expand Down Expand Up @@ -124,6 +124,92 @@ async def process_hdx_requests(
],
},
},
"normal_polygon_TM": {
"summary": "Example: Tasking Manager Mapping type extraction for a Project",
"description": "Example Query to extract building,roads,waterways and landuse in sample TM Project , Pokhara, Nepal",
"value": {
"geometry": {
"type": "Polygon",
"coordinates": [
[
[83.96919250488281, 28.194446860487773],
[83.99751663208006, 28.194446860487773],
[83.99751663208006, 28.214869548073377],
[83.96919250488281, 28.214869548073377],
[83.96919250488281, 28.194446860487773],
]
],
},
"queue": "raw_default",
"dataset": {
"dataset_prefix": "hotosm_project_1",
"dataset_folder": "TM",
"dataset_title": "Tasking Manger Project 1",
},
"categories": [
{
"Buildings": {
"types": ["polygons"],
"select": [
"name",
"building",
"building:levels",
"building:materials",
"addr:full",
"addr:housenumber",
"addr:street",
"addr:city",
"office",
"source",
],
"where": "tags['building'] IS NOT NULL",
"formats": ["geojson", "shp", "kml"],
},
"Roads": {
"types": ["lines"],
"select": [
"name",
"highway",
"surface",
"smoothness",
"width",
"lanes",
"oneway",
"bridge",
"layer",
"source",
],
"where": "tags['highway'] IS NOT NULL",
"formats": ["geojson", "shp", "kml"],
},
"Waterways": {
"types": ["lines", "polygons"],
"select": [
"name",
"waterway",
"covered",
"width",
"depth",
"layer",
"blockage",
"tunnel",
"natural",
"water",
"source",
],
"where": "tags['waterway'] IS NOT NULL OR tags['water'] IS NOT NULL OR tags['natural'] IN ('water','wetland','bay')",
"formats": ["geojson", "shp", "kml"],
},
"Landuse": {
"types": ["points", "polygons"],
"select": ["name", "amenity", "landuse", "leisure"],
"where": "tags['landuse'] IS NOT NULL",
"formats": ["geojson", "shp", "kml"],
},
}
],
},
},
"fullset": {
"summary": "Full HDX Dataset default",
"description": "Full yaml conversion for dataset with iso3 example",
Expand Down
1 change: 1 addition & 0 deletions src/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -1533,6 +1533,7 @@ def resource_to_hdx(self, uploaded_resources, dataset_config, category):
else None,
},
)
logging.info("Initiating HDX Upload")
uploader.init_dataset()
non_hdx_resources = []
for resource in uploaded_resources:
Expand Down
46 changes: 27 additions & 19 deletions src/validation/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ class HDXModel(BaseModel):
"""

tags: List[str] = Field(
...,
default=["geodata"],
description="List of tags for the HDX model.",
example=["roads", "transportation", "geodata"],
)
Expand Down Expand Up @@ -362,11 +362,12 @@ def validate_tags(cls, value):
Returns:
_type_: _description_
"""
for item in value:
if item.strip() not in ALLOWED_HDX_TAGS:
raise ValueError(
f"Invalid tag {item.strip()} , Should be within {ALLOWED_HDX_TAGS}"
)
if value:
for item in value:
if item.strip() not in ALLOWED_HDX_TAGS:
raise ValueError(
f"Invalid tag {item.strip()} , Should be within {ALLOWED_HDX_TAGS}"
)
return value


Expand All @@ -382,7 +383,9 @@ class CategoryModel(BaseModel):
- formats (List[str]): List of Export Formats (suffixes).
"""

hdx: HDXModel
hdx: Optional[HDXModel] = Field(
default=None, description="HDX Specific configurations"
)
types: List[str] = Field(
...,
description="List of feature types (points, lines, polygons).",
Expand Down Expand Up @@ -513,7 +516,7 @@ class DatasetConfig(BaseModel):
description="Dataset prefix to be appended before category name, Will be ignored if iso3 is supplied",
example="hotosm_npl",
)
dataset_locations: List[str] = Field(
dataset_locations: List[str] | None = Field(
default=None,
description="Valid dataset locations iso3",
example="['npl']",
Expand Down Expand Up @@ -562,6 +565,10 @@ class DynamicCategoriesModel(BaseModel, GeometryValidatorMixin):
max_length=3,
example="USA",
)
hdx_upload: bool = Field(
default=False,
description="Enable/Disable uploading dataset to hdx, False by default",
)
dataset: Optional[DatasetConfig] = Field(
default=None, description="Dataset Configurations for HDX Upload"
)
Expand All @@ -573,11 +580,6 @@ class DynamicCategoriesModel(BaseModel, GeometryValidatorMixin):
default=False,
description="Dumps Meta db in parquet format & hdx config json to s3",
)
hdx_upload: bool = Field(
default=False,
description="Enable/Disable uploading dataset to hdx, False by default",
)

categories: List[Dict[str, CategoryModel]] = Field(
...,
description="List of dynamic categories.",
Expand Down Expand Up @@ -622,11 +624,17 @@ def set_geometry_or_iso3(cls, value, values):
if value is None and values.get("iso3") is None:
raise ValueError("Either geometry or iso3 should be supplied.")
if value is not None:
dataset = values.get("dataset").dict()
if dataset is None:
dataset = values.get("dataset")
if values.get("hdx_upload"):
for category in values.get("categories"):
category_name, category_data = list(category.items())[0]
if category_data.hdx is None:
raise ValueError(f"HDX is missing for category {category}")

if dataset is None and values.get("hdx_upload"):
raise ValueError("Dataset config should be supplied for custom polygon")

for item in dataset.keys():
if dataset.get(item) is None:
raise ValueError(f"Missing, Dataset config : {item}")
if values.get("hdx_upload"):
for item in dataset:
if item is None:
raise ValueError(f"Missing, Dataset config : {item}")
return value
120 changes: 115 additions & 5 deletions tests/test_API.py
Original file line number Diff line number Diff line change
Expand Up @@ -713,7 +713,7 @@ def test_hdx_submit_normal_iso3():
],
}

response = client.post("/v1/hdx/submit/", json=payload, headers=headers)
response = client.post("/v1/custom/snapshot/", json=payload, headers=headers)

assert response.status_code == 200
res = response.json()
Expand Down Expand Up @@ -759,7 +759,7 @@ def test_hdx_submit_normal_iso3_multiple_format():
],
}

response = client.post("/v1/hdx/submit/", json=payload, headers=headers)
response = client.post("/v1/custom/snapshot/", json=payload, headers=headers)

assert response.status_code == 200
res = response.json()
Expand Down Expand Up @@ -822,7 +822,117 @@ def test_hdx_submit_normal_custom_polygon():
],
}

response = client.post("/v1/hdx/submit/", json=payload, headers=headers)
response = client.post("/v1/custom/snapshot/", json=payload, headers=headers)

assert response.status_code == 200
res = response.json()
track_link = res["track_link"]
max_attempts = 6
interval_seconds = 10
for attempt in range(1, max_attempts + 1):
time.sleep(interval_seconds) # wait for worker to complete task

response = client.get(f"/v1{track_link}")
assert response.status_code == 200
res = response.json()
check_status = res["status"]

if check_status == "SUCCESS":
break # exit the loop if the status is SUCCESS

if attempt == max_attempts:
# If max_attempts reached and status is not SUCCESS, raise an AssertionError
assert (
False
), f"Task did not complete successfully after {max_attempts} attempts"


def test_custom_submit_normal_custom_polygon_TM_project():
headers = {"access-token": access_token}
payload = {
"geometry": {
"type": "Polygon",
"coordinates": [
[
[83.96919250488281, 28.194446860487773],
[83.99751663208006, 28.194446860487773],
[83.99751663208006, 28.214869548073377],
[83.96919250488281, 28.214869548073377],
[83.96919250488281, 28.194446860487773],
]
],
},
"queue": "raw_default",
"dataset": {
"dataset_prefix": "hotosm_project_1",
"dataset_folder": "TM",
"dataset_title": "Tasking Manger Project 1",
},
"categories": [
{
"Buildings": {
"types": ["polygons"],
"select": [
"name",
"building",
"building:levels",
"building:materials",
"addr:full",
"addr:housenumber",
"addr:street",
"addr:city",
"office",
"source",
],
"where": "tags['building'] IS NOT NULL",
"formats": ["geojson", "shp", "kml"],
},
"Roads": {
"types": ["lines"],
"select": [
"name",
"highway",
"surface",
"smoothness",
"width",
"lanes",
"oneway",
"bridge",
"layer",
"source",
],
"where": "tags['highway'] IS NOT NULL",
"formats": ["geojson", "shp", "kml"],
},
"Waterways": {
"types": ["lines", "polygons"],
"select": [
"name",
"waterway",
"covered",
"width",
"depth",
"layer",
"blockage",
"tunnel",
"natural",
"water",
"source",
],
"where": "tags['waterway'] IS NOT NULL OR tags['water'] IS NOT NULL OR tags['natural'] IN ('water','wetland','bay')",
"formats": ["geojson", "shp", "kml"],
},
"Landuse": {
"types": ["points", "polygons"],
"select": ["name", "amenity", "landuse", "leisure"],
"where": "tags['landuse'] IS NOT NULL",
"formats": ["geojson", "shp", "kml"],
},
}
],
}

response = client.post("/v1/custom/snapshot/", json=payload, headers=headers)

assert response.status_code == 200
res = response.json()
Expand Down Expand Up @@ -886,7 +996,7 @@ def test_hdx_submit_normal_custom_polygon_upload():
],
}

response = client.post("/v1/hdx/submit/", json=payload, headers=headers)
response = client.post("/v1/custom/snapshot/", json=payload, headers=headers)

assert response.status_code == 200
res = response.json()
Expand Down Expand Up @@ -1188,7 +1298,7 @@ def test_full_hdx_set_iso():
],
}

response = client.post("/v1/hdx/submit/", json=payload, headers=headers)
response = client.post("/v1/custom/snapshot/", json=payload, headers=headers)

assert response.status_code == 200
res = response.json()
Expand Down

0 comments on commit 3244b0d

Please sign in to comment.