From a6a0d3a5e096766e5f613487a30de99e4f948881 Mon Sep 17 00:00:00 2001 From: Bram Jans Date: Tue, 10 Dec 2024 13:17:51 +0100 Subject: [PATCH] WC2-523 Add storage logs to bulk update --- iaso/api/storage.py | 106 +++++++++--------- iaso/tasks/process_mobile_bulk_upload.py | 7 ++ ...-aefb-98f47fb2ccaf_2024-12-03_12-46-06.xml | 15 +++ .../instances.json | 13 +++ .../storageLogs.json | 21 ++++ .../tasks/test_process_mobile_bulk_upload.py | 56 +++++++++ 6 files changed, 166 insertions(+), 52 deletions(-) create mode 100644 iaso/tests/fixtures/mobile_bulk_uploads/storage_logs_and_change_requests/5475bfcf-5a3f-4170-9d88-245d89352362/3_2_809f9a76-3f3f-4033-aefb-98f47fb2ccaf_2024-12-03_12-46-06.xml create mode 100644 iaso/tests/fixtures/mobile_bulk_uploads/storage_logs_and_change_requests/instances.json create mode 100644 iaso/tests/fixtures/mobile_bulk_uploads/storage_logs_and_change_requests/storageLogs.json diff --git a/iaso/api/storage.py b/iaso/api/storage.py index 79913db660..99a4a4f7bb 100644 --- a/iaso/api/storage.py +++ b/iaso/api/storage.py @@ -356,61 +356,63 @@ def create(self, _, request): This will also create a new StorageDevice if the storage_id / storage_type / account combination is not found """ - user = request.user + import_storage_logs(request.data, request.user) - for log_data in request.data: - # We receive an array of logs, we'll process them one by one - log_id = log_data["id"] + return Response("", status=status.HTTP_201_CREATED) - try: - StorageLogEntry.objects.get(id=log_id) - # That log entry already exists, skip it - except StorageLogEntry.DoesNotExist: - # New log entry, we continue - storage_id = log_data["storage_id"] - storage_type = log_data["storage_type"] - operation_type = log_data["operation_type"] - - if storage_type not in [c[1] for c in StorageDevice.STORAGE_TYPE_CHOICES]: - raise ValueError(f"Invalid storage type: {storage_type}") - - if operation_type not in [c[1] for c in StorageLogEntry.OPERATION_TYPE_CHOICES]: - raise ValueError(f"Invalid operation type: {operation_type}") - - performed_at = timestamp_to_utc_datetime(int(log_data["performed_at"])) - - concerned_instances = Instance.objects.none() - if "instances" in log_data: - concerned_instances = Instance.objects.filter(uuid__in=log_data["instances"]) - - concerned_orgunit = None - if "org_unit_id" in log_data and log_data["org_unit_id"] is not None: - concerned_orgunit = OrgUnit.objects.get(id=log_data["org_unit_id"]) - - concerned_entity = None - entity_id = log_data.get("entity_id") or log_data.get("entity_uuid") - if entity_id: - concerned_entity = Entity.objects.get(uuid=entity_id) - - account = user.iaso_profile.account - - # 1. Create the storage device, if needed - device, _ = StorageDevice.objects.get_or_create( - account=account, customer_chosen_id=storage_id, type=storage_type - ) - - StorageLogEntry.objects.create_and_update_device( - log_id=log_id, - device=device, - operation_type=operation_type, - performed_at=performed_at, - user=user, - concerned_orgunit=concerned_orgunit, - concerned_entity=concerned_entity, - concerned_instances=concerned_instances, - ) - return Response("", status=status.HTTP_201_CREATED) +def import_storage_logs(data, user): + for log_data in data: + # We receive an array of logs, we'll process them one by one + log_id = log_data["id"] + + try: + StorageLogEntry.objects.get(id=log_id) + # That log entry already exists, skip it + except StorageLogEntry.DoesNotExist: + # New log entry, we continue + storage_id = log_data["storage_id"] + storage_type = log_data["storage_type"] + operation_type = log_data["operation_type"] + + if storage_type not in [c[1] for c in StorageDevice.STORAGE_TYPE_CHOICES]: + raise ValueError(f"Invalid storage type: {storage_type}") + + if operation_type not in [c[1] for c in StorageLogEntry.OPERATION_TYPE_CHOICES]: + raise ValueError(f"Invalid operation type: {operation_type}") + + performed_at = timestamp_to_utc_datetime(int(log_data["performed_at"])) + + concerned_instances = Instance.objects.none() + if "instances" in log_data: + concerned_instances = Instance.objects.filter(uuid__in=log_data["instances"]) + + concerned_orgunit = None + if "org_unit_id" in log_data and log_data["org_unit_id"] is not None: + concerned_orgunit = OrgUnit.objects.get(id=log_data["org_unit_id"]) + + concerned_entity = None + entity_id = log_data.get("entity_id") or log_data.get("entity_uuid") + if entity_id: + concerned_entity = Entity.objects.get(uuid=entity_id) + + account = user.iaso_profile.account + + # 1. Create the storage device, if needed + device, _ = StorageDevice.objects.get_or_create( + account=account, customer_chosen_id=storage_id, type=storage_type + ) + + StorageLogEntry.objects.create_and_update_device( + log_id=log_id, + device=device, + operation_type=operation_type, + performed_at=performed_at, + user=user, + concerned_orgunit=concerned_orgunit, + concerned_entity=concerned_entity, + concerned_instances=concerned_instances, + ) def logs_for_device_generate_export( diff --git a/iaso/tasks/process_mobile_bulk_upload.py b/iaso/tasks/process_mobile_bulk_upload.py index 4001432178..831fcca708 100644 --- a/iaso/tasks/process_mobile_bulk_upload.py +++ b/iaso/tasks/process_mobile_bulk_upload.py @@ -30,11 +30,13 @@ from hat.sync.views import create_instance_file, process_instance_file from iaso.api.instances import import_data as import_instances from iaso.api.mobile.org_units import import_data as import_org_units +from iaso.api.storage import import_storage_logs from iaso.models import Project, Instance from iaso.utils.s3_client import download_file INSTANCES_JSON = "instances.json" ORG_UNITS_JSON = "orgUnits.json" +STORAGE_LOGS_JSON = "storageLogs.json" logger = logging.getLogger(__name__) @@ -88,6 +90,11 @@ def process_mobile_bulk_upload(api_import_id, project_id, task=None): duplicated_count = duplicate_instance_files(new_instance_files) stats["new_instance_files"] = len(new_instance_files) + duplicated_count + if STORAGE_LOGS_JSON in zip_ref.namelist(): + logger.info("Processing storage logs") + storage_logs_data = read_json_file_from_zip(zip_ref, STORAGE_LOGS_JSON) + import_storage_logs(storage_logs_data, user) + except Exception as e: logger.exception("Exception! Rolling back import: " + str(e)) api_import.has_problem = True diff --git a/iaso/tests/fixtures/mobile_bulk_uploads/storage_logs_and_change_requests/5475bfcf-5a3f-4170-9d88-245d89352362/3_2_809f9a76-3f3f-4033-aefb-98f47fb2ccaf_2024-12-03_12-46-06.xml b/iaso/tests/fixtures/mobile_bulk_uploads/storage_logs_and_change_requests/5475bfcf-5a3f-4170-9d88-245d89352362/3_2_809f9a76-3f3f-4033-aefb-98f47fb2ccaf_2024-12-03_12-46-06.xml new file mode 100644 index 0000000000..c6c1f9db23 --- /dev/null +++ b/iaso/tests/fixtures/mobile_bulk_uploads/storage_logs_and_change_requests/5475bfcf-5a3f-4170-9d88-245d89352362/3_2_809f9a76-3f3f-4033-aefb-98f47fb2ccaf_2024-12-03_12-46-06.xml @@ -0,0 +1,15 @@ + + +
+ test + teszt + m +
+ + + uuid:38e7cfff-4489-46ba-87e5-e436c7e8d815 + +
diff --git a/iaso/tests/fixtures/mobile_bulk_uploads/storage_logs_and_change_requests/instances.json b/iaso/tests/fixtures/mobile_bulk_uploads/storage_logs_and_change_requests/instances.json new file mode 100644 index 0000000000..b2980d4261 --- /dev/null +++ b/iaso/tests/fixtures/mobile_bulk_uploads/storage_logs_and_change_requests/instances.json @@ -0,0 +1,13 @@ +[ + { + "id": "5475bfcf-5a3f-4170-9d88-245d89352362", + "created_at": 1.733226371386e9, + "updated_at": 1.733226371386e9, + "file": "/storage/emulated/0/Android/data/com.bluesquarehub.iaso/files/Documents/iaso/instances/3_2_2024-12-03_12-46-06/3_2_809f9a76-3f3f-4033-aefb-98f47fb2ccaf_2024-12-03_12-46-06.xml", + "name": "Test profile", + "formId": "1", + "orgUnitId": "1", + "entityUuid": "5475bfcf-5a3f-4170-9d88-245d89352362", + "entityTypeId": "1" + } +] diff --git a/iaso/tests/fixtures/mobile_bulk_uploads/storage_logs_and_change_requests/storageLogs.json b/iaso/tests/fixtures/mobile_bulk_uploads/storage_logs_and_change_requests/storageLogs.json new file mode 100644 index 0000000000..8272c4bb16 --- /dev/null +++ b/iaso/tests/fixtures/mobile_bulk_uploads/storage_logs_and_change_requests/storageLogs.json @@ -0,0 +1,21 @@ +[ + { + "id": "ff023d13-25fa-43c5-9717-5f427b929cb6", + "storage_id": "BGJGcuxqgA==", + "storage_type": "NFC", + "operation_type": "RESET", + "instances": [], + "org_unit_id": "1", + "performed_at": 1.733226445771e9 + }, + { + "id": "da14ef24-74a1-4f7b-9621-6e33e88773b8", + "storage_id": "BGJGcuxqgA==", + "storage_type": "NFC", + "entity_id": "5475bfcf-5a3f-4170-9d88-245d89352362", + "instances": ["5475bfcf-5a3f-4170-9d88-245d89352362"], + "org_unit_id": "1", + "performed_at": 1729839290.867, + "operation_type": "WRITE_PROFILE" + } +] diff --git a/iaso/tests/tasks/test_process_mobile_bulk_upload.py b/iaso/tests/tasks/test_process_mobile_bulk_upload.py index a1b06cb5aa..7cf77092cf 100644 --- a/iaso/tests/tasks/test_process_mobile_bulk_upload.py +++ b/iaso/tests/tasks/test_process_mobile_bulk_upload.py @@ -616,3 +616,59 @@ def test_duplicate_uuids_multiple_active(self, mock_logger, mock_download_file): self.assertEqual(ent1.instances.count() + ent2.instances.count(), 3) err_msg = f"Multiple non-deleted entities for UUID {ent1.uuid}, entity_type_id {self.default_entity_type.id}" mock_logger.exception.assert_called_once_with(err_msg) + + def test_storage_logs(self, mock_download_file): + entity_uuid = "5475bfcf-5a3f-4170-9d88-245d89352362" + files_for_zip = [ + "instances.json", + "storageLogs.json", + entity_uuid, # the folder with XML submission + ] + with zipfile.ZipFile(f"/tmp/{entity_uuid}.zip", "w", zipfile.ZIP_DEFLATED) as zipf: + add_to_zip(zipf, zip_fixture_dir("storage_logs_and_change_requests"), files_for_zip) + + mock_download_file.return_value = f"/tmp/{entity_uuid}.zip" + + self.assertEqual(m.Entity.objects.count(), 0) + self.assertEqual(m.Instance.objects.count(), 0) + self.assertEqual(m.StorageDevice.objects.count(), 0) + self.assertEqual(m.StorageLogEntry.objects.count(), 0) + + process_mobile_bulk_upload( + api_import_id=self.api_import.id, + project_id=self.project.id, + task=self.task, + _immediate=True, + ) + + mock_download_file.assert_called_once() + + # check Task status and result + self.task.refresh_from_db() + self.assertEqual(self.task.status, m.SUCCESS) + self.api_import.refresh_from_db() + self.assertEqual(self.api_import.import_type, "bulk") + self.assertFalse(self.api_import.has_problem) + + # Instances (Submissions) + Entity were created + self.assertEqual(m.Entity.objects.count(), 1) + entity = m.Entity.objects.get(uuid=entity_uuid) + self.assertEqual(m.Instance.objects.count(), 1) + instance = m.Instance.objects.get(uuid=entity_uuid) + + # Storage logs + self.assertEqual(m.StorageDevice.objects.count(), 1) + self.assertEqual(m.StorageLogEntry.objects.count(), 2) + storage_device = m.StorageDevice.objects.first() + self.assertEqual(storage_device.type, "NFC") + self.assertEqual(storage_device.org_unit_id, 1) + self.assertEqual(storage_device.entity, entity) + + reset_log = m.StorageLogEntry.objects.get(operation_type="RESET") + self.assertEqual(reset_log.org_unit_id, 1) + self.assertIsNone(reset_log.entity) + + write_log = m.StorageLogEntry.objects.get(operation_type="WRITE_PROFILE") + self.assertEqual(write_log.org_unit_id, 1) + self.assertEqual(write_log.entity, entity) + self.assertEqual(list(write_log.instances.all()), [instance])