Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WC2-523 Add storage logs to bulk update #1857

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
106 changes: 54 additions & 52 deletions iaso/api/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,61 +356,63 @@ def create(self, _, request):

This will also create a new StorageDevice if the storage_id / storage_type / account combination is not found
"""
user = request.user
import_storage_logs(request.data, request.user)

for log_data in request.data:
# We receive an array of logs, we'll process them one by one
log_id = log_data["id"]
return Response("", status=status.HTTP_201_CREATED)

try:
StorageLogEntry.objects.get(id=log_id)
# That log entry already exists, skip it
except StorageLogEntry.DoesNotExist:
# New log entry, we continue
storage_id = log_data["storage_id"]
storage_type = log_data["storage_type"]
operation_type = log_data["operation_type"]

if storage_type not in [c[1] for c in StorageDevice.STORAGE_TYPE_CHOICES]:
raise ValueError(f"Invalid storage type: {storage_type}")

if operation_type not in [c[1] for c in StorageLogEntry.OPERATION_TYPE_CHOICES]:
raise ValueError(f"Invalid operation type: {operation_type}")

performed_at = timestamp_to_utc_datetime(int(log_data["performed_at"]))

concerned_instances = Instance.objects.none()
if "instances" in log_data:
concerned_instances = Instance.objects.filter(uuid__in=log_data["instances"])

concerned_orgunit = None
if "org_unit_id" in log_data and log_data["org_unit_id"] is not None:
concerned_orgunit = OrgUnit.objects.get(id=log_data["org_unit_id"])

concerned_entity = None
entity_id = log_data.get("entity_id") or log_data.get("entity_uuid")
if entity_id:
concerned_entity = Entity.objects.get(uuid=entity_id)

account = user.iaso_profile.account

# 1. Create the storage device, if needed
device, _ = StorageDevice.objects.get_or_create(
account=account, customer_chosen_id=storage_id, type=storage_type
)

StorageLogEntry.objects.create_and_update_device(
log_id=log_id,
device=device,
operation_type=operation_type,
performed_at=performed_at,
user=user,
concerned_orgunit=concerned_orgunit,
concerned_entity=concerned_entity,
concerned_instances=concerned_instances,
)

return Response("", status=status.HTTP_201_CREATED)
def import_storage_logs(data, user):
for log_data in data:
# We receive an array of logs, we'll process them one by one
log_id = log_data["id"]

try:
StorageLogEntry.objects.get(id=log_id)
# That log entry already exists, skip it
except StorageLogEntry.DoesNotExist:
# New log entry, we continue
storage_id = log_data["storage_id"]
storage_type = log_data["storage_type"]
operation_type = log_data["operation_type"]

if storage_type not in [c[1] for c in StorageDevice.STORAGE_TYPE_CHOICES]:
raise ValueError(f"Invalid storage type: {storage_type}")

if operation_type not in [c[1] for c in StorageLogEntry.OPERATION_TYPE_CHOICES]:
raise ValueError(f"Invalid operation type: {operation_type}")

performed_at = timestamp_to_utc_datetime(int(log_data["performed_at"]))

concerned_instances = Instance.objects.none()
if "instances" in log_data:
concerned_instances = Instance.objects.filter(uuid__in=log_data["instances"])

concerned_orgunit = None
if "org_unit_id" in log_data and log_data["org_unit_id"] is not None:
concerned_orgunit = OrgUnit.objects.get(id=log_data["org_unit_id"])

concerned_entity = None
entity_id = log_data.get("entity_id") or log_data.get("entity_uuid")
if entity_id:
concerned_entity = Entity.objects.get(uuid=entity_id)

account = user.iaso_profile.account

# 1. Create the storage device, if needed
device, _ = StorageDevice.objects.get_or_create(
account=account, customer_chosen_id=storage_id, type=storage_type
)

StorageLogEntry.objects.create_and_update_device(
log_id=log_id,
device=device,
operation_type=operation_type,
performed_at=performed_at,
user=user,
concerned_orgunit=concerned_orgunit,
concerned_entity=concerned_entity,
concerned_instances=concerned_instances,
)


def logs_for_device_generate_export(
Expand Down
7 changes: 7 additions & 0 deletions iaso/tasks/process_mobile_bulk_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,13 @@
from hat.sync.views import create_instance_file, process_instance_file
from iaso.api.instances import import_data as import_instances
from iaso.api.mobile.org_units import import_data as import_org_units
from iaso.api.storage import import_storage_logs
from iaso.models import Project, Instance
from iaso.utils.s3_client import download_file

INSTANCES_JSON = "instances.json"
ORG_UNITS_JSON = "orgUnits.json"
STORAGE_LOGS_JSON = "storageLogs.json"

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -88,6 +90,11 @@ def process_mobile_bulk_upload(api_import_id, project_id, task=None):
duplicated_count = duplicate_instance_files(new_instance_files)
stats["new_instance_files"] = len(new_instance_files) + duplicated_count

if STORAGE_LOGS_JSON in zip_ref.namelist():
logger.info("Processing storage logs")
storage_logs_data = read_json_file_from_zip(zip_ref, STORAGE_LOGS_JSON)
import_storage_logs(storage_logs_data, user)

except Exception as e:
logger.exception("Exception! Rolling back import: " + str(e))
api_import.has_problem = True
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
<?xml version='1.0'?>
<data id="test_profile" version="2024052202" xmlns:h="http://www.w3.org/1999/xhtml"
xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:jr="http://openrosa.org/javarosa"
xmlns:ev="http://www.w3.org/2001/xml-events" xmlns:orx="http://openrosa.org/xforms"
xmlns:odk="http://www.opendatakit.org/xforms">
<Form>
<last_name>test</last_name>
<first_name>teszt</first_name>
<gender>m</gender>
</Form>
<previous_value />
<meta>
<instanceID>uuid:38e7cfff-4489-46ba-87e5-e436c7e8d815</instanceID>
</meta>
</data>
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
[
{
"id": "5475bfcf-5a3f-4170-9d88-245d89352362",
"created_at": 1.733226371386e9,
"updated_at": 1.733226371386e9,
"file": "/storage/emulated/0/Android/data/com.bluesquarehub.iaso/files/Documents/iaso/instances/3_2_2024-12-03_12-46-06/3_2_809f9a76-3f3f-4033-aefb-98f47fb2ccaf_2024-12-03_12-46-06.xml",
"name": "Test profile",
"formId": "1",
"orgUnitId": "1",
"entityUuid": "5475bfcf-5a3f-4170-9d88-245d89352362",
"entityTypeId": "1"
}
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
[
{
"id": "ff023d13-25fa-43c5-9717-5f427b929cb6",
"storage_id": "BGJGcuxqgA==",
"storage_type": "NFC",
"operation_type": "RESET",
"instances": [],
"org_unit_id": "1",
"performed_at": 1.733226445771e9
},
{
"id": "da14ef24-74a1-4f7b-9621-6e33e88773b8",
"storage_id": "BGJGcuxqgA==",
"storage_type": "NFC",
"entity_id": "5475bfcf-5a3f-4170-9d88-245d89352362",
"instances": ["5475bfcf-5a3f-4170-9d88-245d89352362"],
"org_unit_id": "1",
"performed_at": 1729839290.867,
"operation_type": "WRITE_PROFILE"
}
]
56 changes: 56 additions & 0 deletions iaso/tests/tasks/test_process_mobile_bulk_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -616,3 +616,59 @@ def test_duplicate_uuids_multiple_active(self, mock_logger, mock_download_file):
self.assertEqual(ent1.instances.count() + ent2.instances.count(), 3)
err_msg = f"Multiple non-deleted entities for UUID {ent1.uuid}, entity_type_id {self.default_entity_type.id}"
mock_logger.exception.assert_called_once_with(err_msg)

def test_storage_logs(self, mock_download_file):
entity_uuid = "5475bfcf-5a3f-4170-9d88-245d89352362"
files_for_zip = [
"instances.json",
"storageLogs.json",
entity_uuid, # the folder with XML submission
]
with zipfile.ZipFile(f"/tmp/{entity_uuid}.zip", "w", zipfile.ZIP_DEFLATED) as zipf:
add_to_zip(zipf, zip_fixture_dir("storage_logs_and_change_requests"), files_for_zip)

mock_download_file.return_value = f"/tmp/{entity_uuid}.zip"

self.assertEqual(m.Entity.objects.count(), 0)
self.assertEqual(m.Instance.objects.count(), 0)
self.assertEqual(m.StorageDevice.objects.count(), 0)
self.assertEqual(m.StorageLogEntry.objects.count(), 0)

process_mobile_bulk_upload(
api_import_id=self.api_import.id,
project_id=self.project.id,
task=self.task,
_immediate=True,
)

mock_download_file.assert_called_once()

# check Task status and result
self.task.refresh_from_db()
self.assertEqual(self.task.status, m.SUCCESS)
self.api_import.refresh_from_db()
self.assertEqual(self.api_import.import_type, "bulk")
self.assertFalse(self.api_import.has_problem)

# Instances (Submissions) + Entity were created
self.assertEqual(m.Entity.objects.count(), 1)
entity = m.Entity.objects.get(uuid=entity_uuid)
self.assertEqual(m.Instance.objects.count(), 1)
instance = m.Instance.objects.get(uuid=entity_uuid)

# Storage logs
self.assertEqual(m.StorageDevice.objects.count(), 1)
self.assertEqual(m.StorageLogEntry.objects.count(), 2)
storage_device = m.StorageDevice.objects.first()
self.assertEqual(storage_device.type, "NFC")
self.assertEqual(storage_device.org_unit_id, 1)
self.assertEqual(storage_device.entity, entity)

reset_log = m.StorageLogEntry.objects.get(operation_type="RESET")
self.assertEqual(reset_log.org_unit_id, 1)
self.assertIsNone(reset_log.entity)

write_log = m.StorageLogEntry.objects.get(operation_type="WRITE_PROFILE")
self.assertEqual(write_log.org_unit_id, 1)
self.assertEqual(write_log.entity, entity)
self.assertEqual(list(write_log.instances.all()), [instance])
Loading