Skip to content

Commit

Permalink
fix: multi polygon task in custom data extract using area as task for…
Browse files Browse the repository at this point in the history
… splitting algorithm
  • Loading branch information
sujanadh committed Sep 14, 2023
1 parent 0b338b4 commit fa14cab
Showing 1 changed file with 155 additions and 136 deletions.
291 changes: 155 additions & 136 deletions src/backend/app/projects/project_crud.py
Original file line number Diff line number Diff line change
Expand Up @@ -1156,27 +1156,46 @@ def upload_custom_data_extracts(

for feature in features_data["features"]:
feature_shape = shape(feature["geometry"])

if not (shape(project_geojson).contains(feature_shape)):
continue
if isinstance(feature_shape, MultiPolygon):
wkb_element = from_shape(Polygon(feature["geometry"]["coordinates"][0][0]), srid=4326)
else:
wkb_element = from_shape(feature_shape, srid=4326)

# If the osm extracts contents do not have a title, provide an empty text for that.
feature["properties"]["title"] = ""
properties = flatten_dict(feature["properties"])

feature_shape = shape(feature["geometry"])

wkb_element = from_shape(feature_shape, srid=4326)
feature_mapping = {
"project_id": project_id,
"geometry": wkb_element,
"properties": feature["properties"],
}
featuree = db_models.DbFeatures(**feature_mapping)
db.add(featuree)
db.commit()
db_feature = db_models.DbFeatures(
project_id=project_id,
geometry = wkb_element,
properties=properties
)
db.add(db_feature)
db.commit()

return True

def flatten_dict(d, parent_key='', sep='_'):
"""
Recursively flattens a nested dictionary into a single-level dictionary.
Args:
d (dict): The input dictionary.
parent_key (str): The parent key (used for recursion).
sep (str): The separator character to use in flattened keys.
Returns:
dict: The flattened dictionary.
"""
items = {}
for k, v in d.items():
new_key = f"{parent_key}{sep}{k}" if parent_key else k
if isinstance(v, dict):
items.update(flatten_dict(v, new_key, sep=sep))
else:
items[new_key] = v
return items


def generate_task_files(
db: Session,
Expand Down Expand Up @@ -1337,153 +1356,153 @@ def generate_appuser_files(
- form_type: weather the form is xls, xlsx or xml
- background_task_id: the task_id of the background task running this function.
"""
try:
project_log = log.bind(task="create_project", project_id=project_id)

project_log.info(f"Starting generate_appuser_files for project {project_id}")

# Get the project table contents.
project = table(
"projects",
column("project_name_prefix"),
column("xform_title"),
column("id"),
column("odk_central_url"),
column("odk_central_user"),
column("odk_central_password"),
column("outline"),
)
# try:
project_log = log.bind(task="create_project", project_id=project_id)

project_log.info(f"Starting generate_appuser_files for project {project_id}")

where = f"id={project_id}"
sql = select(
project.c.project_name_prefix,
project.c.xform_title,
project.c.id,
project.c.odk_central_url,
project.c.odk_central_user,
project.c.odk_central_password,
geoalchemy2.functions.ST_AsGeoJSON(project.c.outline).label("outline"),
).where(text(where))
result = db.execute(sql)
# Get the project table contents.
project = table(
"projects",
column("project_name_prefix"),
column("xform_title"),
column("id"),
column("odk_central_url"),
column("odk_central_user"),
column("odk_central_password"),
column("outline"),
)

# There should only be one match
if result.rowcount != 1:
log.warning(str(sql))
if result.rowcount < 1:
raise HTTPException(status_code=400, detail="Project not found")
else:
raise HTTPException(status_code=400, detail="Multiple projects found")
where = f"id={project_id}"
sql = select(
project.c.project_name_prefix,
project.c.xform_title,
project.c.id,
project.c.odk_central_url,
project.c.odk_central_user,
project.c.odk_central_password,
geoalchemy2.functions.ST_AsGeoJSON(project.c.outline).label("outline"),
).where(text(where))
result = db.execute(sql)

# There should only be one match
if result.rowcount != 1:
log.warning(str(sql))
if result.rowcount < 1:
raise HTTPException(status_code=400, detail="Project not found")
else:
raise HTTPException(status_code=400, detail="Multiple projects found")

one = result.first()
one = result.first()

if one:
prefix = one.project_name_prefix
if one:
prefix = one.project_name_prefix

# Get odk credentials from project.
odk_credentials = {
"odk_central_url": one.odk_central_url,
"odk_central_user": one.odk_central_user,
"odk_central_password": one.odk_central_password,
}
# Get odk credentials from project.
odk_credentials = {
"odk_central_url": one.odk_central_url,
"odk_central_user": one.odk_central_user,
"odk_central_password": one.odk_central_password,
}

odk_credentials = project_schemas.ODKCentral(**odk_credentials)
odk_credentials = project_schemas.ODKCentral(**odk_credentials)

xform_title = one.xform_title if one.xform_title else None
xform_title = one.xform_title if one.xform_title else None

if upload:
xlsform = f"/tmp/custom_form.{form_type}"
contents = upload
with open(xlsform, "wb") as f:
f.write(contents)
else:
xlsform = f"{xlsforms_path}/{xform_title}.xls"
if upload:
xlsform = f"/tmp/custom_form.{form_type}"
contents = upload
with open(xlsform, "wb") as f:
f.write(contents)
else:
xlsform = f"{xlsforms_path}/{xform_title}.xls"

category = xform_title
category = xform_title

# Data Extracts
if extracts_contents is not None:
project_log.info(f"Uploading data extracts")
upload_custom_data_extracts(db, project_id, extracts_contents)
# Data Extracts
if extracts_contents is not None:
project_log.info(f"Uploading data extracts")
upload_custom_data_extracts(db, project_id, extracts_contents)

else:
project_log.info(f"Extracting Data from OSM")

# OSM Extracts for whole project
pg = PostgresClient(settings.UNDERPASS_API_URL, "underpass")
# This file will store osm extracts
outfile = f"/tmp/{prefix}_{xform_title}.geojson"

outline = json.loads(one.outline)
outline_geojson = pg.getFeatures(
boundary=outline,
filespec=outfile,
polygon=extract_polygon,
xlsfile=f"{category}.xls",
category=category,
)
else:
project_log.info(f"Extracting Data from OSM")

# OSM Extracts for whole project
pg = PostgresClient(settings.UNDERPASS_API_URL, "underpass")
# This file will store osm extracts
outfile = f"/tmp/{prefix}_{xform_title}.geojson"

outline = json.loads(one.outline)
outline_geojson = pg.getFeatures(
boundary=outline,
filespec=outfile,
polygon=extract_polygon,
xlsfile=f"{category}.xls",
category=category,
)

updated_outline_geojson = {"type": "FeatureCollection", "features": []}
updated_outline_geojson = {"type": "FeatureCollection", "features": []}

# Collect feature mappings for bulk insert
feature_mappings = []
# Collect feature mappings for bulk insert
feature_mappings = []

for feature in outline_geojson["features"]:
# If the osm extracts contents do not have a title, provide an empty text for that.
feature["properties"]["title"] = ""
for feature in outline_geojson["features"]:
# If the osm extracts contents do not have a title, provide an empty text for that.
feature["properties"]["title"] = ""

feature_shape = shape(feature["geometry"])
feature_shape = shape(feature["geometry"])

# If the centroid of the Polygon is not inside the outline, skip the feature.
if extract_polygon and (
not shape(outline).contains(shape(feature_shape.centroid))
):
continue
# If the centroid of the Polygon is not inside the outline, skip the feature.
if extract_polygon and (
not shape(outline).contains(shape(feature_shape.centroid))
):
continue

wkb_element = from_shape(feature_shape, srid=4326)
feature_mapping = {
"project_id": project_id,
"category_title": category,
"geometry": wkb_element,
"properties": feature["properties"],
}
updated_outline_geojson["features"].append(feature)
feature_mappings.append(feature_mapping)
wkb_element = from_shape(feature_shape, srid=4326)
feature_mapping = {
"project_id": project_id,
"category_title": category,
"geometry": wkb_element,
"properties": feature["properties"],
}
updated_outline_geojson["features"].append(feature)
feature_mappings.append(feature_mapping)

# Bulk insert the osm extracts into the db.
db.bulk_insert_mappings(db_models.DbFeatures, feature_mappings)
# Bulk insert the osm extracts into the db.
db.bulk_insert_mappings(db_models.DbFeatures, feature_mappings)

# Generating QR Code, XForm and uploading OSM Extracts to the form.
# Creating app users and updating the role of that user.
tasks_list = tasks_crud.get_task_lists(db, project_id)
# Generating QR Code, XForm and uploading OSM Extracts to the form.
# Creating app users and updating the role of that user.
tasks_list = tasks_crud.get_task_lists(db, project_id)

# info = get_cpu_info()
# cores = info["count"]
# with concurrent.futures.ThreadPoolExecutor(max_workers=cores) as executor:
# futures = {executor.submit(generate_task_files_wrapper, project_id, task, xlsform, form_type, odk_credentials): task for task in tasks_list}
# info = get_cpu_info()
# cores = info["count"]
# with concurrent.futures.ThreadPoolExecutor(max_workers=cores) as executor:
# futures = {executor.submit(generate_task_files_wrapper, project_id, task, xlsform, form_type, odk_credentials): task for task in tasks_list}

# for future in concurrent.futures.as_completed(futures):
# log.debug(f"Waiting for thread to complete..")
# for future in concurrent.futures.as_completed(futures):
# log.debug(f"Waiting for thread to complete..")

for task in tasks_list:
try:
generate_task_files(
db, project_id, task, xlsform, form_type, odk_credentials,
)
except Exception as e:
log.warning(str(e))
continue
# # Update background task status to COMPLETED
update_background_task_status_in_database(
db, background_task_id, 4
) # 4 is COMPLETED
for task in tasks_list:
try:
generate_task_files(
db, project_id, task, xlsform, form_type, odk_credentials,
)
except Exception as e:
log.warning(str(e))
continue
# # Update background task status to COMPLETED
update_background_task_status_in_database(
db, background_task_id, 4
) # 4 is COMPLETED

except Exception as e:
log.warning(str(e))
# except Exception as e:
# log.warning(str(e))

# Update background task status to FAILED
update_background_task_status_in_database(
db, background_task_id, 2, str(e)
) # 2 is FAILED
# update_background_task_status_in_database(
# db, background_task_id, 2, str(e)
# ) # 2 is FAILED


def create_qrcode(
Expand Down

0 comments on commit fa14cab

Please sign in to comment.