Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

IA-2986: Have "sub org unit types to display" by default #1346

Merged
merged 7 commits into from
Jun 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 13 additions & 14 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ services:
- ./manage.py:/opt/app/manage.py
- ./hat:/opt/app/hat
- ./iaso:/opt/app/iaso
- ./setuper:/opt/app/setuper
- ./beanstalk_worker:/opt/app/beanstalk_worker
- ./django_sql_dashboard_export:/opt/app/django_sql_dashboard_export
- ./media:/opt/app/media
Expand Down Expand Up @@ -39,16 +40,16 @@ services:
RDS_DB_NAME:
THEME_PRIMARY_COLOR:
THEME_SECONDARY_COLOR:
THEME_PRIMARY_BACKGROUND_COLOR:
#OpenHexa API token
OPENHEXA_TOKEN:
#OpenHexa API url
OPENHEXA_URL:
#OpenHexa pipeline ID
LQAS_PIPELINE:
#Optional: the version of the pipeline to run
LQAS_PIPELINE_VERSION:
# "prod", "staging" or "custom". Use "custom" for local testing
THEME_PRIMARY_BACKGROUND_COLOR: #OpenHexa API token

OPENHEXA_TOKEN: #OpenHexa API url

OPENHEXA_URL: #OpenHexa pipeline ID

LQAS_PIPELINE: #Optional: the version of the pipeline to run

LQAS_PIPELINE_VERSION: # "prod", "staging" or "custom". Use "custom" for local testing

OH_PIPELINE_TARGET:

FAVICON_PATH:
Expand Down Expand Up @@ -77,17 +78,15 @@ services:
WFP_AUTH_ACCOUNT:
WFP_EMAIL_RECIPIENTS_NEW_ACCOUNT:
DISABLE_PASSWORD_LOGINS:
SERVER_URL:
# Limit logging in dev to not overflow terminal
SERVER_URL: # Limit logging in dev to not overflow terminal
logging: &iaso_logging
driver: "json-file"
options:
max-size: "5k"
command: start_dev

db:
image:
iaso/postgis
image: iaso/postgis
# Workaround until there is a stable Postgis image for Apple Silicon
build: docker/db
logging: *iaso_logging
Expand Down
31 changes: 31 additions & 0 deletions iaso/tests/gpkg/test_import_with_sub_org_unit_type.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
from iaso.gpkg.import_gpkg import import_gpkg_file
from iaso.models import Account, Project
from iaso.test import APITestCase
from setuper.pyramid import update_org_unit_sub_type


class OrgUnitImportFromGPKG(APITestCase):
@classmethod
def setUpTestData(cls):
cls.account = Account.objects.create(name="a")
cls.user_test = cls.create_user_with_profile(username="test", account=cls.account)
cls.project = Project.objects.create(name="Project 1", account=cls.account, app_id="test_app_id")

def test_minimal_import_with_sub_org_unit_type(self):
import_gpkg_file(
"./iaso/tests/fixtures/gpkg/minimal.gpkg",
project_id=self.project.id,
source_name="test",
version_number=1,
validation_status="new",
description="",
)
self.client.force_authenticate(self.user_test)
response = self.client.get("/api/v2/orgunittypes/")
self.assertJSONResponse(response, 200)
response_data = response.json()

updated_with_sub_types = update_org_unit_sub_type(self.client, self.project.id, response_data["orgUnitTypes"])

for org_unit_type_with in updated_with_sub_types:
self.assertJSONResponse(org_unit_type_with, 200)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do we know the expected new sub types here?
If so we should test that too in addition to testing the returned HTTP code.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am wondering for testing the expected new sub type, if from the line 27 we filter on the expected new sub types with ;

expected_new_sub_types = []
for org_unit_type in response_data["orgUnitTypes"]:
    org_unit_type_level = org_unit_type["depth"]
    sub_unit_type_ids = [
        org_unit_type["id"]
        for org_unit_type in response_data["orgUnitTypes"]
        if org_unit_type["depth"] == (org_unit_type_level + 1)
    ]
    expected_new_sub_types.append(sub_unit_type_ids)

But, it looks a re-implement of the logic of update_org_unit_sub_type

Then, from line 29

updated_sub_types = []
for org_unit_type_with in updated_with_sub_types:
    self.assertJSONResponse(org_unit_type_with["updated_org_unit_typs"], 200)
    updated_sub_types.append(org_unit_type_with["sub_unit_type_ids"][0])

sub_types_ids = reduce(lambda a, b: a + b, expected_new_sub_types)
self.assertEqual(updated_sub_types, sub_types_ids)

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TBH I don't know if it's doable?

My point is that In general, we know what we are expecting from a test:

expected_result = "foo"

result = test_my_function()

assert result == expected_result

Here, the only thing that you are testing is that the API call returns 200, but we don't know if the data was modified in the expected way.

We can discuss this tomorrow, it may be easier to talk about it.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are we really doing tests on the setuper? I'm thinking this might be overkill.

Empty file added setuper/__init__.py
Empty file.
31 changes: 29 additions & 2 deletions setuper/pyramid.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
def setup_orgunits(account_name, iaso_client):
def setup_orgunits(iaso_client):
project_id = iaso_client.get("/api/projects/")["projects"][0]["id"]
sources = iaso_client.get("/api/datasources/")["sources"]
data_source_id = sources[0]["id"]
Expand All @@ -15,6 +15,7 @@ def setup_orgunits(account_name, iaso_client):
test_file = "data/small_sample.gpkg"
geopackage_file = {"file": (test_file, open(test_file, "rb"), "application/octet-stream")}
task = iaso_client.post("/api/tasks/create/importgpkg/", files=geopackage_file, data=data)

print("-- Importing org units")

iaso_client.wait_task_completion(task)
Expand All @@ -26,5 +27,31 @@ def setup_orgunits(account_name, iaso_client):
"searches": [{"validation_status": "all", "color": "f4511e", "source": data_source_id}],
}
task = iaso_client.post("/api/tasks/create/orgunitsbulkupdate/", json=data)

org_unit_types = iaso_client.get("/api/v2/orgunittypes/")["orgUnitTypes"]
update_org_unit_sub_type(iaso_client, project_id, org_unit_types)
iaso_client.wait_task_completion(task)


def update_org_unit_sub_type(iaso_client, project_id, org_unit_types):
print("-- Updating org unit sub type")
updated_with_sub_types = []
for org_unit_type in org_unit_types:
org_unit_type_level = org_unit_type["depth"]
org_unit_type_id = org_unit_type["id"]
sub_unit_type_ids = [
org_unit_type["id"]
for org_unit_type in org_unit_types
if org_unit_type["depth"] == (org_unit_type_level + 1)
]
if len(sub_unit_type_ids) > 0:
current_type = {
"name": org_unit_type["name"],
"short_name": org_unit_type["short_name"],
"project_ids": [project_id],
"sub_unit_type_ids": sub_unit_type_ids,
}
# Updating default sub type
updated_with_sub_types.append(
iaso_client.patch(f"/api/v2/orgunittypes/{org_unit_type_id}/", json=current_type)
)
return updated_with_sub_types
18 changes: 10 additions & 8 deletions setuper/review_change_proposal.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ def setup_review_change_proposal(account_name, iaso_client):
"status": status,
}
validation = None
proposal_review = None
if len(groups) > 0:
data["new_groups"] = groups
approved_fields.append("new_groups")
Expand All @@ -64,11 +65,12 @@ def setup_review_change_proposal(account_name, iaso_client):
if len(new_reference_instances) > 0:
data["new_reference_instances"] = new_reference_instances
approved_fields.append("new_reference_instances")
proposal_review = iaso_client.post("/api/orgunits/changes/", json=data)
if status == "approved" or status == "rejected":
validation = {
"approved_fields": approved_fields,
"status": status,
"rejection_comment": status,
}
iaso_client.patch(f"/api/orgunits/changes/{proposal_review['id']}/", json=validation)
if len(approved_fields) > 0:
proposal_review = iaso_client.post("/api/orgunits/changes/", json=data)
if status == "approved" or status == "rejected":
validation = {
"approved_fields": approved_fields,
"status": status,
"rejection_comment": status,
}
iaso_client.patch(f"/api/orgunits/changes/{proposal_review['id']}/", json=validation)
2 changes: 1 addition & 1 deletion setuper/setuper.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def setup_account(account_name):
account_name = "".join(random.choices(string.ascii_lowercase, k=7))
print("Creating account:", account_name)
iaso_client = setup_account(account_name)
setup_orgunits(account_name, iaso_client=iaso_client)
setup_orgunits(iaso_client=iaso_client)

if seed_default_health_facility_form:
setup_health_facility_level_default_form(account_name, iaso_client=iaso_client)
Expand Down
Loading