diff --git a/README.md b/README.md index 887248af..9a785665 100644 --- a/README.md +++ b/README.md @@ -217,22 +217,11 @@ Check out the [Patient-everything operation spec](https://www.hl7.org/fhir/opera ### Bulk Data Access -The server contains functionality for the FHIR Bulk Data Import operation using the [Ping and Pull Approach](https://github.com/smart-on-fhir/bulk-import/blob/master/import-pnp.md). +The server contains functionality for the bulk $import operation as defined by the [Data Exchange for Quality Measures Implementation Guide](https://build.fhir.org/ig/HL7/davinci-deqm/branches/bulk-import-draft/bulk-import.html#data-exchange-using-bulk-import). -To implement a bulk data import operation of all the resources on a FHIR Bulk Data Export server, POST a valid FHIR parameters object to `http://localhost:3000/$import`. Use the parameter format below to specify a bulk export server. +The first step in the bulk $import operation work flow is to gather data for submission and organize that data into a set of inputs that this server will retrieve. This is outlined in detail in the [DEQM IG Data Exchange Using Bulk $import Section](https://build.fhir.org/ig/HL7/davinci-deqm/branches/bulk-import-draft/bulk-import.html#data-exchange-using-bulk-import) and _by type_ inputs can be gathered using the [bulk-export-server](https://github.com/projecttacoma/bulk-export-server) $export operation. _by subject_ and hybrid _by type_ and _by subject_ inputs are not yet implemented in this server. -To implement the bulk data import operation from the data requirements for a specific measure, first POST a valid transaction bundle. Then, POST a valid FHIR parameters object to `http://localhost:3000/4_0_1/Measure/$bulk-submit-data` or `http://localhost:3000/4_0_1/Measure//$bulk-submit-data` with the `"prefer": "respond-async"` header populated. This will kick off the "ping and pull" bulk import. - -For the bulk data import operation to be successful, the user must specify an export URL to a FHIR Bulk Data Export server in the request body of the FHIR parameters object. For example, in the `parameter` array of the FHIR parameters object, the user can include - -```bash -{ - "name": "exportUrl", - "valueString": "https://example-export.com" -} -``` - -with a valid kickoff endpoint URL for the `valueString`. +To kickoff a bulk data import operation, POST a valid [Import Manifest](https://build.fhir.org/ig/HL7/davinci-deqm/branches/bulk-import-draft/StructureDefinition-ImportManifest.html) object to `http://localhost:3000/$import`. The user can check the status of an $import or $bulk-submit-data request by copying the content-location header in the response, and sending a GET request to `http://localhost:3000/`. diff --git a/package-lock.json b/package-lock.json index 6cf1ba1a..3ea29d8b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14,7 +14,6 @@ "@projecttacoma/node-fhir-server-core": "^2.2.7", "axios": "^0.28.0", "bee-queue": "^1.4.0", - "bulk-data-utilities": "git+https://git@github.com/projecttacoma/bulk-data-utilities", "cors": "^2.8.5", "cql-exec-fhir-mongo": "git+https://git@github.com/projecttacoma/cql-exec-fhir-mongo", "dotenv": "^10.0.0", @@ -3717,24 +3716,6 @@ "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", "dev": true }, - "node_modules/bulk-data-utilities": { - "version": "0.0.1", - "resolved": "git+https://git@github.com/projecttacoma/bulk-data-utilities.git#60d874be9635da8ec26574c98cce83a476bdfdcf", - "license": "Apache-2.0", - "dependencies": { - "axios": "^0.21.2", - "fqm-execution": "^1.0.0-beta.6", - "path": "^0.12.7" - } - }, - "node_modules/bulk-data-utilities/node_modules/axios": { - "version": "0.21.4", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz", - "integrity": "sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==", - "dependencies": { - "follow-redirects": "^1.14.0" - } - }, "node_modules/bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", @@ -8355,15 +8336,6 @@ "node": ">= 0.4.0" } }, - "node_modules/path": { - "version": "0.12.7", - "resolved": "https://registry.npmjs.org/path/-/path-0.12.7.tgz", - "integrity": "sha512-aXXC6s+1w7otVF9UletFkFcDsJeO7lSZBPUQhtb5O0xJe8LtYhj/GxldoL09bBj9+ZmE2hNoHqQSFMN5fikh4Q==", - "dependencies": { - "process": "^0.11.1", - "util": "^0.10.3" - } - }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -8509,14 +8481,6 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "engines": { - "node": ">= 0.6.0" - } - }, "node_modules/process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", @@ -10061,24 +10025,11 @@ "requires-port": "^1.0.0" } }, - "node_modules/util": { - "version": "0.10.4", - "resolved": "https://registry.npmjs.org/util/-/util-0.10.4.tgz", - "integrity": "sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A==", - "dependencies": { - "inherits": "2.0.3" - } - }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, - "node_modules/util/node_modules/inherits": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==" - }, "node_modules/utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", @@ -13484,25 +13435,6 @@ "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", "dev": true }, - "bulk-data-utilities": { - "version": "git+https://git@github.com/projecttacoma/bulk-data-utilities.git#60d874be9635da8ec26574c98cce83a476bdfdcf", - "from": "bulk-data-utilities@git+https://git@github.com/projecttacoma/bulk-data-utilities", - "requires": { - "axios": "^0.21.2", - "fqm-execution": "^1.0.0-beta.6", - "path": "^0.12.7" - }, - "dependencies": { - "axios": { - "version": "0.21.4", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz", - "integrity": "sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==", - "requires": { - "follow-redirects": "^1.14.0" - } - } - } - }, "bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", @@ -16974,15 +16906,6 @@ "resolved": "https://registry.npmjs.org/passport-strategy/-/passport-strategy-1.0.0.tgz", "integrity": "sha512-CB97UUvDKJde2V0KDWWB3lyf6PC3FaZP7YxZ2G8OAtn9p4HI9j9JLP9qjOGZFvyl8uwNT8qM+hGnz/n16NI7oA==" }, - "path": { - "version": "0.12.7", - "resolved": "https://registry.npmjs.org/path/-/path-0.12.7.tgz", - "integrity": "sha512-aXXC6s+1w7otVF9UletFkFcDsJeO7lSZBPUQhtb5O0xJe8LtYhj/GxldoL09bBj9+ZmE2hNoHqQSFMN5fikh4Q==", - "requires": { - "process": "^0.11.1", - "util": "^0.10.3" - } - }, "path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -17081,11 +17004,6 @@ } } }, - "process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==" - }, "process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", @@ -18283,21 +18201,6 @@ "requires-port": "^1.0.0" } }, - "util": { - "version": "0.10.4", - "resolved": "https://registry.npmjs.org/util/-/util-0.10.4.tgz", - "integrity": "sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A==", - "requires": { - "inherits": "2.0.3" - }, - "dependencies": { - "inherits": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==" - } - } - }, "util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", diff --git a/package.json b/package.json index e2c21f9e..cba0185d 100644 --- a/package.json +++ b/package.json @@ -29,7 +29,6 @@ "@projecttacoma/node-fhir-server-core": "^2.2.7", "axios": "^0.28.0", "bee-queue": "^1.4.0", - "bulk-data-utilities": "git+https://git@github.com/projecttacoma/bulk-data-utilities", "cors": "^2.8.5", "cql-exec-fhir-mongo": "git+https://git@github.com/projecttacoma/cql-exec-fhir-mongo", "dotenv": "^10.0.0", @@ -54,4 +53,4 @@ "./test/globalSetup.js" ] } -} \ No newline at end of file +} diff --git a/src/database/dbOperations.js b/src/database/dbOperations.js index d036f9ca..f6c6e8fc 100644 --- a/src/database/dbOperations.js +++ b/src/database/dbOperations.js @@ -131,7 +131,7 @@ const findResourcesWithAggregation = async (query, resourceType) => { * which can be queried to get updates on the status of the bulk import * @returns {string} the id of the inserted client */ -const addPendingBulkImportRequest = async () => { +const addPendingBulkImportRequest = async body => { const collection = db.collection('bulkImportStatuses'); const clientId = uuidv4(); const bulkImportClient = { @@ -146,7 +146,8 @@ const addPendingBulkImportRequest = async () => { totalFileCount: -1, exportedResourceCount: -1, totalResourceCount: -1, - failedOutcomes: [] + failedOutcomes: [], + importManifest: body }; logger.debug(`Adding a bulkImportStatus for clientId: ${clientId}`); await collection.insertOne(bulkImportClient); @@ -195,6 +196,30 @@ const pushBulkFailedOutcomes = async (clientId, failedOutcomes) => { await collection.findOneAndUpdate({ id: clientId }, { $push: { failedOutcomes: { $each: failedOutcomes } } }); }; +/** + * Pushes an array of error messages to a ndjson status entry to later be converted to + * OperationOutcomes and made accessible via ndjson file to requestor + * @param {String} clientId The id associated with the bulkImport request + * @param {String} fileUrl The url for the resource ndjson + * @param {Array} failedOutcomes An array of strings with messages detailing why the resource failed import + */ +const pushNdjsonFailedOutcomes = async (clientId, fileUrl, failedOutcomes) => { + const collection = db.collection('ndjsonStatuses'); + await collection.insertOne({ id: clientId + fileUrl, failedOutcomes: failedOutcomes }); + return clientId; +}; + +/** + * Wrapper for the findResourceById function that only searches ndjsonStatuses db + * @param {string} clientId The id signifying the bulk status request + * @param {string} fileUrl The ndjson fileUrl + * @returns {Object} The ndjson status entry for the passed in clientId and fileUrl + */ +const getNdjsonFileStatus = async (clientId, fileUrl) => { + const status = await findResourceById(clientId + fileUrl, 'ndjsonStatuses'); + return status; +}; + /** * Wrapper for the findResourceById function that only searches bulkImportStatuses db * @param {string} clientId The id signifying the bulk status request @@ -318,5 +343,7 @@ module.exports = { removeResource, updateResource, updateSuccessfulImportCount, - getCountOfCollection + getCountOfCollection, + pushNdjsonFailedOutcomes, + getNdjsonFileStatus }; diff --git a/src/server/importWorker.js b/src/server/importWorker.js index d2f310fe..136c79be 100644 --- a/src/server/importWorker.js +++ b/src/server/importWorker.js @@ -1,7 +1,6 @@ // Sets up queue which processes the jobs pushed to Redis // This queue is run in a child process when the server is started const Queue = require('bee-queue'); -const { BulkImportWrappers } = require('bulk-data-utilities'); const { failBulkImportRequest, initializeBulkFileCount } = require('../database/dbOperations'); const mongoUtil = require('../database/connection'); const ndjsonQueue = require('../queue/ndjsonProcessQueue'); @@ -16,16 +15,14 @@ const importQueue = new Queue('import', { removeOnSuccess: true }); -// This handler pulls down the jobs on Redis to handle importQueue.process(async job => { // Payload of createJob exists on job.data - const { clientEntry, exportURL, exportType, measureBundle, useTypeFilters } = job.data; + const { clientEntry, inputUrls } = job.data; logger.info(`import-worker-${process.pid}: Processing Request: ${clientEntry}`); await mongoUtil.client.connect(); - // Call the existing export ndjson function that writes the files - logger.info(`import-worker-${process.pid}: Kicking off export request: ${exportURL}`); - const result = await executePingAndPull(clientEntry, exportURL, exportType, measureBundle, useTypeFilters); + // Call function to get the ndjson files + const result = await executeImportWorkflow(clientEntry, inputUrls); if (result) { logger.info(`import-worker-${process.pid}: Enqueued jobs for: ${clientEntry}`); } else { @@ -34,50 +31,20 @@ importQueue.process(async job => { await mongoUtil.client.close(); }); -/** - * Calls the bulk-data-utilities wrapper function to get data requirements for the passed in measure, convert those to - * export requests from a bulk export server, then retrieve ndjson from that server and parse it into valid transaction bundles. - * Finally, uploads the resulting transaction bundles to the server and updates the bulkstatus endpoint - * @param {string} clientEntryId The unique identifier which corresponds to the bulkstatus content location for update - * @param {string} exportUrl The url of the bulk export fhir server - * @param {string} exportType The code of the exportType - * @param {Object} measureBundle The measure bundle for which to retrieve data requirements - * @param {boolean} useTypeFilters optional boolean for whether to use type filters for bulk submit data - */ -const executePingAndPull = async (clientEntryId, exportUrl, exportType, measureBundle, useTypeFilters) => { +const executeImportWorkflow = async (clientEntryId, inputUrls) => { try { - // Default to not use typeFilters for measure specific import - const output = await BulkImportWrappers.executeBulkImport( - exportUrl, - exportType, - measureBundle, - useTypeFilters || false - ); - - if (output.length === 0) { - throw new Error('Export server failed to export any resources'); - } - // Calculate number of resources to export, if available. Otherwise, set to -1. - const resourceCount = output.reduce((resources, fileInfo) => { - if (resources === -1 || fileInfo.count === undefined) { - return -1; - } - return resources + fileInfo.count; - }, 0); - - await initializeBulkFileCount(clientEntryId, output.length, resourceCount); + await initializeBulkFileCount(clientEntryId, inputUrls.length, -1); // Enqueue a parsing job for each ndjson file await ndjsonQueue.saveAll( - output.map(locationInfo => + inputUrls.map(url => ndjsonQueue.createJob({ - fileUrl: locationInfo.url, + fileUrl: url.url, clientId: clientEntryId, - resourceCount: resourceCount === -1 ? -1 : locationInfo.count + resourceCount: -1 }) ) ); - return true; } catch (e) { await failBulkImportRequest(clientEntryId, e); diff --git a/src/server/ndjsonWorker.js b/src/server/ndjsonWorker.js index a03b7877..bfa70c67 100644 --- a/src/server/ndjsonWorker.js +++ b/src/server/ndjsonWorker.js @@ -2,7 +2,7 @@ // This queue is run in a child process when the server is started const Queue = require('bee-queue'); const axios = require('axios'); -const { updateResource, pushBulkFailedOutcomes } = require('../database/dbOperations'); +const { updateResource, pushBulkFailedOutcomes, pushNdjsonFailedOutcomes } = require('../database/dbOperations'); const mongoUtil = require('../database/connection'); const { checkSupportedResource } = require('../util/baseUtils'); const logger = require('./logger'); @@ -39,7 +39,19 @@ ndjsonWorker.process(async job => { logger.info(`ndjson-worker-${process.pid}: processing ${fileName}`); await mongoUtil.client.connect(); - const ndjsonResources = await retrieveNDJSONFromLocation(fileUrl); + + let ndjsonResources; + try { + ndjsonResources = await retrieveNDJSONFromLocation(fileUrl); + } catch (e) { + const outcome = [`ndjson retrieval of ${fileUrl} failed with message: ${e.message}`]; + + await pushNdjsonFailedOutcomes(clientId, fileUrl, outcome); + await pushBulkFailedOutcomes(clientId, outcome); + process.send({ clientId, resourceCount: 0, successCount: 0 }); + logger.info(`ndjson-worker-${process.pid}: failed to fetch ${fileName}`); + return; + } const insertions = ndjsonResources .trim() @@ -68,15 +80,19 @@ ndjsonWorker.process(async job => { const outcomes = await Promise.allSettled(insertions); const failedOutcomes = outcomes.filter(outcome => outcome.status === 'rejected'); - const succesfulOutcomes = outcomes.filter(outcome => outcome.status === 'fulfilled'); + const successfulOutcomes = outcomes.filter(outcome => outcome.status === 'fulfilled'); const outcomeData = []; failedOutcomes.forEach(out => { outcomeData.push(out.reason.message); }); + + // keep track of failed outcomes for individual ndjson files + await pushNdjsonFailedOutcomes(clientId, fileUrl, outcomeData); + await pushBulkFailedOutcomes(clientId, outcomeData); - const successCount = succesfulOutcomes.length; + const successCount = successfulOutcomes.length; logger.info(`ndjson-worker-${process.pid}: processed ${fileName}`); process.send({ clientId, resourceCount, successCount }); diff --git a/src/services/bulkstatus.service.js b/src/services/bulkstatus.service.js index 5d52c4b5..37dadda9 100644 --- a/src/services/bulkstatus.service.js +++ b/src/services/bulkstatus.service.js @@ -1,5 +1,5 @@ const { NotFoundError, BulkStatusError } = require('../util/errorUtils'); -const { getBulkImportStatus } = require('../database/dbOperations'); +const { getBulkImportStatus, getNdjsonFileStatus } = require('../database/dbOperations'); const { resolveSchema } = require('@projecttacoma/node-fhir-server-core'); const { v4: uuidv4 } = require('uuid'); const fs = require('fs'); @@ -17,6 +17,7 @@ async function checkBulkStatus(req, res) { const clientId = req.params.client_id; logger.debug(`Retrieving bulkStatus entry for client: ${clientId}`); const bulkStatus = await getBulkImportStatus(clientId); + let response; if (!bulkStatus) { const outcome = {}; @@ -39,13 +40,7 @@ async function checkBulkStatus(req, res) { } logger.debug(`Retrieved the following bulkStatus entry for client: ${clientId}. ${JSON.stringify(bulkStatus)}`); - if (bulkStatus.status === 'Failed') { - logger.debug(`bulkStatus entry is failed`); - throw new BulkStatusError( - bulkStatus.error.message || `An unknown error occurred during bulk import with id: ${clientId}`, - bulkStatus.error.code || 'UnknownError' - ); - } else if (bulkStatus.status === 'In Progress') { + if (bulkStatus.status === 'In Progress') { logger.debug(`bulkStatus entry is in progress`); res.status(202); // Compute percent of files or resources exported @@ -60,65 +55,109 @@ async function checkBulkStatus(req, res) { } res.set('X-Progress', `${(percentComplete * 100).toFixed(2)}% Done`); res.set('Retry-After', 120); - } else if (bulkStatus.status === 'Completed') { - logger.debug(`bulkStatus entry is completed`); + } else if (bulkStatus.status === 'Failed') { + logger.debug(`bulkStatus entry is failed`); res.status(200); - res.set('Expires', 'EXAMPLE_EXPIRATION_DATE'); - // Create and respond with operation outcome - const outcome = {}; - outcome.id = uuidv4(); - outcome.issue = [ - { - severity: 'information', - code: 'informational', - details: { - coding: [ - { - code: 'MSG_CREATED', - display: 'New resource created' + // Create FHIR Bundle response + response = { + resourceType: 'Bundle', + type: 'batch-response', + entry: [ + { + response: { + status: '400', + outcome: { + resourceType: 'OperationOutcome', + issue: [ + { + severity: 'fatal', + code: 'transient', + details: { + text: bulkStatus.error.message ?? "Internal System Error: '$import' request not processed." + } + } + ] } - ], - text: `Bulk import successfully completed, successfully imported ${bulkStatus.successCount} resources` + } } - } - ]; - const OperationOutcome = resolveSchema(req.params.base_version, 'operationoutcome'); - writeToFile(JSON.parse(JSON.stringify(new OperationOutcome(outcome).toJSON())), 'OperationOutcome', clientId); + ] + }; + + return response; + } else if (bulkStatus.status === 'Completed') { + logger.debug(`bulkStatus entry is completed`); + res.status(200); + res.set('Expires', 'EXAMPLE_EXPIRATION_DATE'); - const response = { - transactionTime: '2021-01-01T00:00:00Z', - requiresAccessToken: true, - outcome: [ + response = { + resourceType: 'Bundle', + type: 'batch-response', + entry: [ { - type: 'OperationOutcome', - url: `http://${process.env.SERVER_HOST}:${process.env.SERVER_PORT}/${req.params.base_version}/file/${clientId}/OperationOutcome.ndjson` + response: { + status: '200' + }, + resource: { + resourceType: 'Parameters', + parameter: [] + } } - ], - extension: { 'https://example.com/extra-property': true } + ] }; + if (bulkStatus.importManifest.parameter.find(p => p.name === 'requestIdentity')) { + response.entry[0].resource.parameter.push( + bulkStatus.importManifest.parameter.find(p => p.name === 'requestIdentity') + ); + } + + // check if there were any errors with the ndjson files if (bulkStatus.failedOutcomes.length > 0) { - logger.debug(`bulkStatus entry contains failed outcomes`); - bulkStatus.failedOutcomes.forEach(fail => { - const failOutcome = {}; - failOutcome.id = uuidv4(); - failOutcome.issue = [ + logger.debug('bulkStatus entry contains failed outcomes'); + + // Go through all of the failed outcomes and add them as + // outcomes on the Parameters resource + + for (const parameter of bulkStatus.importManifest.parameter) { + if (parameter.name === 'input') { + const url = parameter.part.find(p => p.name === 'url'); + const ndjsonStatus = await getNdjsonFileStatus(clientId, url.valueUrl); + if (ndjsonStatus) { + ndjsonStatus.failedOutcomes.forEach(fail => { + const inputResult = { + name: 'outcome', + part: [ + { name: 'associatedInputUrl', valueUrl: url.valueUrl }, + { + name: 'operationOutcome', + resource: { + resourceType: 'OperationOutcome', + issue: [{ severity: 'error', code: 'processing', details: { text: fail } }] + } + } + ] + }; + response.entry[0].resource.parameter.push(inputResult); + }); + } + } + } + } else { + response.entry[0].resource.parameter.push({ + name: 'outcome', + part: [ { - severity: 'error', - code: 'BadRequest', - details: { - text: fail + name: 'operationOutcome', + resource: { + resourceType: 'OperationOutcome', + issue: [{ severity: 'information', code: 'informational', details: { text: 'All OK' } }] } } - ]; - writeToFile(JSON.parse(JSON.stringify(new OperationOutcome(failOutcome).toJSON())), 'Errors', clientId); - }); - response.outcome.push({ - type: 'OperationOutcome', - url: `http://${process.env.SERVER_HOST}:${process.env.SERVER_PORT}/${req.params.base_version}/file/${clientId}/Errors.ndjson` + ] }); } + return response; } else { const outcome = {}; diff --git a/src/services/import.service.js b/src/services/import.service.js index 0bedf48a..7823138e 100644 --- a/src/services/import.service.js +++ b/src/services/import.service.js @@ -1,5 +1,6 @@ -const { addPendingBulkImportRequest } = require('../database/dbOperations'); -const { retrieveExportUrl, retrieveExportType } = require('../util/exportUtils'); +const { addPendingBulkImportRequest, failBulkImportRequest } = require('../database/dbOperations'); +const { retrieveInputUrls } = require('../util/exportUtils'); +const { checkContentTypeHeader } = require('../util/baseUtils'); const importQueue = require('../queue/importQueue'); const logger = require('../server/logger'); @@ -9,23 +10,28 @@ const logger = require('../server/logger'); * @param {Object} res The response object returned to the client by the server */ async function bulkImport(req, res) { - logger.info('Base >>> $import'); - logger.debug(`Request headers: ${JSON.stringify(req.header)}`); + logger.info('Base >>> Import'); + logger.debug(`Request headers: ${JSON.stringify(req.headers)}`); logger.debug(`Request body: ${JSON.stringify(req.body)}`); logger.debug(`Request params: ${JSON.stringify(req.params)}`); + checkContentTypeHeader(req.headers); + // ID assigned to the requesting client - const clientEntry = await addPendingBulkImportRequest(); + const clientEntry = await addPendingBulkImportRequest(req.body); + + try { + const inputUrls = retrieveInputUrls(req.body.parameter); - const exportURL = retrieveExportUrl(req.body.parameter); - const exportType = retrieveExportType(req.body.parameter); + const jobData = { + clientEntry, + inputUrls + }; + await importQueue.createJob(jobData).save(); + } catch (e) { + await failBulkImportRequest(clientEntry, e); + } - const jobData = { - clientEntry, - exportURL, - exportType - }; - await importQueue.createJob(jobData).save(); res.status(202); res.setHeader( 'Content-Location', diff --git a/src/services/measure.service.js b/src/services/measure.service.js index 8c6294ce..9af7a6e3 100644 --- a/src/services/measure.service.js +++ b/src/services/measure.service.js @@ -2,21 +2,15 @@ const { BadRequestError, ResourceNotFoundError } = require('../util/errorUtils') const { Calculator } = require('fqm-execution'); const { baseCreate, baseSearchById, baseRemove, baseUpdate, baseSearch } = require('./base.service'); const { createTransactionBundleClass } = require('../resources/transactionBundle'); -const { executePingAndPull } = require('./import.service'); const { handleSubmitDataBundles } = require('./bundle.service'); const importQueue = require('../queue/importQueue'); -const { retrieveExportType, retrieveExportUrl } = require('../util/exportUtils'); const { validateEvalMeasureParams, validateCareGapsParams, gatherParams, checkSubmitDataBody } = require('../util/operationValidationUtils'); -const { - getMeasureBundleFromId, - assembleCollectionBundleFromMeasure, - getQueryFromReference -} = require('../util/bundleUtils'); +const { getMeasureBundleFromId, assembleCollectionBundleFromMeasure } = require('../util/bundleUtils'); const { getPatientDataCollectionBundle, retrievePatientIds, @@ -30,6 +24,7 @@ const { findResourceById } = require('../database/dbOperations'); const { getResourceReference } = require('../util/referenceUtils'); +const { retrieveInputUrls } = require('../util/exportUtils'); const logger = require('../server/logger'); const { ScaledCalculation } = require('../queue/execQueue'); @@ -132,9 +127,7 @@ const submitData = async (args, { req }) => { }; /** - * Retrieves measure bundle from the measure ID and - * maps data requirements into an export request, which is - * returned to the initial import client. + * $bulk-submit-data follows the same workflow as bulk import * @param {Object} args the args object passed in by the user * @param {Object} req the request object passed in by the user */ @@ -143,48 +136,16 @@ const bulkSubmitData = async (args, { req }) => { logger.debug(`Request headers: ${JSON.stringify(req.header)}`); logger.debug(`Request body: ${JSON.stringify(req.body)}`); - checkSubmitDataBody(req.body); - // id of inserted client - const clientEntry = await addPendingBulkImportRequest(); + const clientEntry = await addPendingBulkImportRequest(req.body); const res = req.res; - // use measure ID and export server location to map to data-requirements - let measureId; - let measureBundle; - const parameters = req.body.parameter; - // case 1: request is in Measure//$bulk-submit-data format - if (req.params.id) { - measureId = req.params.id; - measureBundle = await getMeasureBundleFromId(measureId); - } - // case 2: request is in Measure/$bulk-submit-data format - else { - const measureReport = parameters.filter(param => param.resource?.resourceType === 'MeasureReport')[0]; - // get measure resource from db that matches measure param since no id is present in request - const query = getQueryFromReference(measureReport.resource.measure); - const measureResource = await findOneResourceWithQuery(query, 'Measure'); - measureId = measureResource.id; - measureBundle = await getMeasureBundleFromId(measureId); - } - // retrieve data requirements - const exportURL = retrieveExportUrl(parameters); - const exportType = retrieveExportType(parameters); - - // retrieve useTypeFilters boolean - const useTypeFiltersArray = parameters.filter(param => param.name === 'useTypeFilters'); - let useTypeFilters; - if (useTypeFiltersArray.length > 0) { - useTypeFilters = useTypeFiltersArray[0].valueBoolean; - } + const inputUrls = retrieveInputUrls(req.body.parameter); const jobData = { clientEntry, - exportURL, - exportType, - measureBundle, - useTypeFilters + inputUrls }; await importQueue.createJob(jobData).save(); res.status(202); @@ -573,6 +534,5 @@ module.exports = { bulkSubmitData, dataRequirements, evaluateMeasure, - careGaps, - executePingAndPull + careGaps }; diff --git a/src/util/errorUtils.js b/src/util/errorUtils.js index 38d2c27d..a7baf29f 100644 --- a/src/util/errorUtils.js +++ b/src/util/errorUtils.js @@ -5,7 +5,7 @@ const { ServerError } = require('@projecttacoma/node-fhir-server-core'); */ class CustomServerError extends ServerError { constructor(message, customStatusCode, customCode) { - super(null, { + super(message, { statusCode: customStatusCode, issue: [ { diff --git a/src/util/exportUtils.js b/src/util/exportUtils.js index bc9f60c6..0cacb99f 100644 --- a/src/util/exportUtils.js +++ b/src/util/exportUtils.js @@ -2,84 +2,37 @@ const { BadRequestError } = require('./errorUtils'); const logger = require('../server/logger'); /** - * Uses request body parameter to search for the export server URL. Validates that - * only one URL is present. + * Uses request body parameter to get all of the ndjson URLs * @param {Object} parameters - request body parameter - * @returns export server URL string + * @returns array of ndjson fileUrls */ -const retrieveExportUrl = parameters => { - logger.debug(`Retrieving export URL from parameters: ${JSON.stringify(parameters)}`); - const exportUrlArray = parameters.filter(param => param.name === 'exportUrl'); - checkExportUrlArray(exportUrlArray); - let exportUrl = exportUrlArray[0].valueUrl; - - // Retrieve comma-delimited list of type filters from parameters - const typesString = parameters - .filter(param => param.name === '_type') - .map(function (type) { - logger.debug(`Adding type ${type} to exportUrl type parameter`); - return type.valueString; - }) - .toString(); - - const typeFilterString = parameters - .filter(param => param.name === '_typeFilter') - .map(function (typeFilter) { - logger.debug(`Adding typeFilter ${typeFilter} to exportUrl typeFilter parameter`); - return typeFilter.valueString; - }) - .toString(); - - if (typesString) { - if (exportUrl.includes(`_type=`)) { - console.warn('_type parameter already supplied in exportUrl. Omitting entries from parameter array'); - } else { - // add types from parameters to exportUrl - exportUrl += `${exportUrl.includes('_typeFilter=') ? '&' : '?'}_type=${typesString}`; - } - } - - if (typeFilterString) { - if (exportUrl.includes(`_typeFilter=`)) { - console.warn('_typeFilter parameter already supplied in exportUrl. Omitting entries from parameter array'); - } else { - // add type filters from parameters to exportUrl - exportUrl += `${exportUrl.includes('_type=') ? '&' : '?'}_typeFilter=${typeFilterString}`; - } - } - return exportUrl; +const retrieveInputUrls = parameters => { + logger.debug(`Retrieving all input URLs from parameters: ${JSON.stringify(parameters)}`); + const inputParamArray = parameters.filter(param => param.name === 'input'); + checkInputUrlArray(inputParamArray); + + const inputUrlArray = inputParamArray.flatMap(param => + param.part + .filter(p => p.name === 'url') + .map(part => ({ type: part.valueUrl.split('.ndjson')[0].split('/').at(-1), url: part.valueUrl })) + ); + return inputUrlArray; }; /** - * Checks whether the export URL array contains exactly one exportUrl - * @param {Array} exportUrlArray array of export URLs provided in request + * Checks whether the input URL array contains at least one input url and that + * it contains a valueUrl + * @param {Array} inputUrlArray array of input URLs provided in the request */ -const checkExportUrlArray = exportUrlArray => { - if (exportUrlArray.length === 0) { - throw new BadRequestError(`No exportUrl parameter was found.`); - } - if (exportUrlArray.length !== 1) { - throw new BadRequestError(`Expected exactly one export URL. Received: ${exportUrlArray.length}`); - } - // if one export URL exists, check that valueUrl exists - if (!exportUrlArray[0].valueUrl) { - throw new BadRequestError(`Expected a valueUrl for the exportUrl, but none was found`); - } -}; - -/** - * Uses request body parameter to search for the export server type if there is one. - * If there is none, defaults to dynamic. - */ -const retrieveExportType = parameters => { - logger.debug(`Retrieving export type from parameters: ${JSON.stringify(parameters)}`); - const exportType = parameters.find(param => param.name === 'exportType'); - - if (!exportType) { - return 'dynamic'; - } else { - return exportType.valueCode; +const checkInputUrlArray = inputParamArray => { + if (inputParamArray.length === 0) { + throw new BadRequestError('No inputUrl parameters were found.'); } + inputParamArray.forEach(inputUrl => { + if (!inputUrl.part.find(p => p.name === 'url').valueUrl) { + throw new BadRequestError('Expected a valueUrl for the inputUrl, but none were found.'); + } + }); }; -module.exports = { retrieveExportUrl, checkExportUrlArray, retrieveExportType }; +module.exports = { checkInputUrlArray, retrieveInputUrls }; diff --git a/test/fixtures/fhir-resources/parameters/paramExportUrlMultipleTypeDeclarations.json b/test/fixtures/fhir-resources/parameters/paramExportUrlMultipleTypeDeclarations.json deleted file mode 100644 index d9c84453..00000000 --- a/test/fixtures/fhir-resources/parameters/paramExportUrlMultipleTypeDeclarations.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "resourceType": "Parameters", - "parameter": [ - { - "name": "exportUrl", - "valueUrl": "http://example.com/$export?_type=Patient,Encounter,Condition" - }, - { - "name": "_type", - "valueString": "Procedure" - } - ] - } \ No newline at end of file diff --git a/test/fixtures/fhir-resources/parameters/paramExportUrlMultipleTypeFilterDeclarations.json b/test/fixtures/fhir-resources/parameters/paramExportUrlMultipleTypeFilterDeclarations.json deleted file mode 100644 index cdfab1c4..00000000 --- a/test/fixtures/fhir-resources/parameters/paramExportUrlMultipleTypeFilterDeclarations.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "resourceType": "Parameters", - "parameter": [ - { - "name": "exportUrl", - "valueUrl": "http://example.com/$export?_type=Encounter&_typeFilter=Encounter%3Fcode%3Ain=TEST_VALUE_SET" - }, - { - "name": "_typeFilter", - "valueString": "Encounter%3Fcode%3Ain=TEST_VALUE_SET" - } - ] - } - \ No newline at end of file diff --git a/test/fixtures/fhir-resources/parameters/paramExportUrlWithTypeFilter.json b/test/fixtures/fhir-resources/parameters/paramExportUrlWithTypeFilter.json deleted file mode 100644 index a4d661b5..00000000 --- a/test/fixtures/fhir-resources/parameters/paramExportUrlWithTypeFilter.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "resourceType": "Parameters", - "parameter": [ - { - "name": "exportUrl", - "valueUrl": "http://example.com/$export?_type=Encounter" - }, - { - "name": "_typeFilter", - "valueString": "Encounter%3Fcode%3Ain=TEST_VALUE_SET" - } - ] -} diff --git a/test/fixtures/fhir-resources/parameters/paramExportUrlWithTypes.json b/test/fixtures/fhir-resources/parameters/paramExportUrlWithTypes.json deleted file mode 100644 index 4f4cabc0..00000000 --- a/test/fixtures/fhir-resources/parameters/paramExportUrlWithTypes.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "resourceType": "Parameters", - "parameter": [ - { - "name": "exportUrl", - "valueUrl": "http://example.com/$export" - }, - { - "name": "_type", - "valueString": "Patient" - }, - { - "name": "_type", - "valueString": "Encounter" - }, - { - "name": "_type", - "valueString": "Condition" - } - ] -} diff --git a/test/fixtures/fhir-resources/parameters/paramExportUrlWithTypesAndFilters.json b/test/fixtures/fhir-resources/parameters/paramExportUrlWithTypesAndFilters.json deleted file mode 100644 index 8582aef5..00000000 --- a/test/fixtures/fhir-resources/parameters/paramExportUrlWithTypesAndFilters.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "resourceType": "Parameters", - "parameter": [ - { - "name": "exportUrl", - "valueUrl": "http://example.com/$export" - }, - { - "name": "_type", - "valueString": "Patient" - }, - { - "name": "_type", - "valueString": "Encounter" - }, - { - "name": "_type", - "valueString": "Condition" - }, - { - "name": "_typeFilter", - "valueString": "Encounter%3Fcode%3Ain=TEST_VALUE_SET" - } - ] -} diff --git a/test/fixtures/fhir-resources/parameters/paramInvalidExport.json b/test/fixtures/fhir-resources/parameters/paramInvalidExport.json deleted file mode 100644 index 1f10fba4..00000000 --- a/test/fixtures/fhir-resources/parameters/paramInvalidExport.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "resourceType": "Parameters", - "parameter": [ - { - "name": "measureReport", - "resource": { - "resourceType": "MeasureReport", - "id": "measurereport-testMeasure", - "measure": "Measure/testMeasure" - } - }, - { - "name": "exportUrl", - "valueUrl": "not-real-url" - } - ] -} diff --git a/test/fixtures/fhir-resources/parameters/paramNoValueUrl.json b/test/fixtures/fhir-resources/parameters/paramNoInput.json similarity index 86% rename from test/fixtures/fhir-resources/parameters/paramNoValueUrl.json rename to test/fixtures/fhir-resources/parameters/paramNoInput.json index e93a880c..cff2d034 100644 --- a/test/fixtures/fhir-resources/parameters/paramNoValueUrl.json +++ b/test/fixtures/fhir-resources/parameters/paramNoInput.json @@ -8,9 +8,6 @@ "id": "measurereport-testMeasure", "measure": "Measure/testMeasure" } - }, - { - "name": "exportUrl" } ] } diff --git a/test/fixtures/fhir-resources/parameters/paramNoInputValueUrl.json b/test/fixtures/fhir-resources/parameters/paramNoInputValueUrl.json new file mode 100644 index 00000000..e3c370b0 --- /dev/null +++ b/test/fixtures/fhir-resources/parameters/paramNoInputValueUrl.json @@ -0,0 +1,13 @@ +{ + "resourceType": "Parameters", + "parameter": [ + { + "name": "input", + "part": [ + { + "name": "url" + } + ] + } + ] +} diff --git a/test/fixtures/fhir-resources/parameters/paramOneInput.json b/test/fixtures/fhir-resources/parameters/paramOneInput.json new file mode 100644 index 00000000..fd72b47c --- /dev/null +++ b/test/fixtures/fhir-resources/parameters/paramOneInput.json @@ -0,0 +1,14 @@ +{ + "resourceType": "Parameters", + "parameter": [ + { + "name": "input", + "part": [ + { + "name": "url", + "valueUrl": "http://example.com/Example1.ndjson" + } + ] + } + ] +} diff --git a/test/fixtures/fhir-resources/parameters/paramTwoExports.json b/test/fixtures/fhir-resources/parameters/paramTwoExports.json deleted file mode 100644 index 6526014b..00000000 --- a/test/fixtures/fhir-resources/parameters/paramTwoExports.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "resourceType": "Parameters", - "parameter": [ - { - "name": "measureReport", - "resource": { - "resourceType": "MeasureReport", - "id": "measurereport-testMeasure", - "measure": "Measure/testMeasure" - } - }, - { - "name": "exportUrl", - "valueUrl": "https://bulk-data.smarthealthit.org/eyJlcnIiOiIiLCJwYWdlIjoxMDAwMCwiZHVyIjoxMCwidGx0IjoxNSwibSI6MSwic3R1IjozLCJkZWwiOjB9/fhir" - }, - { - "name": "exportUrl", - "valueUrl": "secondUrl" - } - ] -} diff --git a/test/fixtures/fhir-resources/parameters/paramTwoInputs.json b/test/fixtures/fhir-resources/parameters/paramTwoInputs.json new file mode 100644 index 00000000..a2b72c78 --- /dev/null +++ b/test/fixtures/fhir-resources/parameters/paramTwoInputs.json @@ -0,0 +1,23 @@ +{ + "resourceType": "Parameters", + "parameter": [ + { + "name": "input", + "part": [ + { + "name": "url", + "valueUrl": "http://example.com/Example1.ndjson" + } + ] + }, + { + "name": "input", + "part": [ + { + "name": "url", + "valueUrl": "http://example.com/Example2.ndjson" + } + ] + } + ] +} diff --git a/test/fixtures/testBulkStatus.json b/test/fixtures/testBulkStatus.json index 60bc0061..48c6ba68 100644 --- a/test/fixtures/testBulkStatus.json +++ b/test/fixtures/testBulkStatus.json @@ -1,5 +1,18 @@ [ - { "id": "PENDING_REQUEST", "status": "In Progress", "error": { "code": null, "message": null } }, + { + "id": "PENDING_REQUEST", + "status": "In Progress", + "error": { "code": null, "message": null }, + "importManifest": { + "resourceType": "Parameters", + "parameter": [ + { + "name": "requestIdentity", + "valueString": "request-identity-example" + } + ] + } + }, { "id": "PENDING_REQUEST_WITH_FILE_COUNT", "status": "In Progress", @@ -8,7 +21,16 @@ "totalResourceCount": -1, "exportedResourceCount": -1, "error": { "code": null, "message": null }, - "failedOutcomes": [] + "failedOutcomes": [], + "importManifest": { + "resourceType": "Parameters", + "parameter": [ + { + "name": "requestIdentity", + "valueString": "request-identity-example" + } + ] + } }, { "id": "PENDING_REQUEST_WITH_RESOURCE_COUNT", @@ -18,7 +40,16 @@ "totalResourceCount": 500, "exportedResourceCount": 200, "error": { "code": null, "message": null }, - "failedOutcomes": [] + "failedOutcomes": [], + "importManifest": { + "resourceType": "Parameters", + "parameter": [ + { + "name": "requestIdentity", + "valueString": "request-identity-example" + } + ] + } }, { "id": "ALMOST_COMPLETE_PENDING_REQUEST", @@ -28,31 +59,61 @@ "totalResourceCount": 500, "exportedResourceCount": 10, "error": { "code": null, "message": null }, - "failedOutcomes": [] + "failedOutcomes": [], + "importManifest": { + "resourceType": "Parameters", + "parameter": [ + { + "name": "requestIdentity", + "valueString": "request-identity-example" + } + ] + } }, { "id": "COMPLETED_REQUEST", "status": "Completed", "error": { "code": null, "message": null }, - "failedOutcomes": [] + "failedOutcomes": [], + "importManifest": { + "resourceType": "Parameters", + "parameter": [ + { + "name": "requestIdentity", + "valueString": "request-identity-example" + } + ] + } }, { "id": "COMPLETED_REQUEST_WITH_RESOURCE_ERRORS", "status": "Completed", "error": { "code": null, "message": null }, - "failedOutcomes": ["Test error message"] + "failedOutcomes": ["Test error message"], + "importManifest": { + "resourceType": "Parameters", + "parameter": [ + { + "name": "requestIdentity", + "valueString": "request-identity-example" + } + ] + } }, { - "id": "KNOWN_ERROR_REQUEST", - "status": "Error", - "error": { "code": "ErrorCode", "message": "Known Error Occurred!" }, - "failedOutcomes": [] - }, - { - "id": "UNKNOWN_ERROR_REQUEST", - "status": "Error", - "error": { "code": null, "message": null }, - "failedOutcomes": [] + "id": "ERROR_REQUEST", + "status": "Failed", + "error": { "code": "ErrorCode", "message": "Error Occurred!" }, + "failedOutcomes": [], + "importManifest": { + "resourceType": "Parameters", + "parameter": [ + { + "name": "requestIdentity", + "valueString": "request-identity-example" + } + ] + } }, { "id": "COMPLETED_REQUEST_WITH_RESOURCE_COUNT", @@ -63,6 +124,15 @@ "exportedResourceCount": 200, "successCount": 200, "error": { "code": null, "message": null }, - "failedOutcomes": [] + "failedOutcomes": [], + "importManifest": { + "resourceType": "Parameters", + "parameter": [ + { + "name": "requestIdentity", + "valueString": "request-identity-example" + } + ] + } } ] diff --git a/test/services/bulkstatus.service.test.js b/test/services/bulkstatus.service.test.js index 329d3040..c2abf2da 100644 --- a/test/services/bulkstatus.service.test.js +++ b/test/services/bulkstatus.service.test.js @@ -10,8 +10,9 @@ describe('bulkstatus.service', () => { server = initialize(config); await bulkStatusSetup(); }); + describe('checkBulkStatus logic', () => { - test('check 202 returned for pending request', async () => { + it('returns 202 status for pending request', async () => { await supertest(server.app) .get('/4_0_1/bulkstatus/PENDING_REQUEST') .expect(202) @@ -19,60 +20,27 @@ describe('bulkstatus.service', () => { expect(response.headers['retry-after']).toEqual('120'); }); }); - test('check 200 returned for completed request', async () => { + + it('returns 200 status for completed request and All OK Operation Outcome with no errors', async () => { const response = await supertest(server.app).get('/4_0_1/bulkstatus/COMPLETED_REQUEST').expect(200); expect(response.headers.expires).toBeDefined(); expect(response.headers['content-type']).toEqual('application/json; charset=utf-8'); expect(response.body).toBeDefined(); - expect(response.body.outcome[0].type).toEqual('OperationOutcome'); - await supertest(server.app) - .get(response.body.outcome[0].url.replace(`http://${process.env.SERVER_HOST}:${process.env.SERVER_PORT}`, '')) //TODO: may need to break apart base_url to get slug - .expect(200); - }); - test('check single OperationOutcome response for completed request', async () => { - const response = await supertest(server.app).get('/4_0_1/bulkstatus/COMPLETED_REQUEST').expect(200); - const operationResponse = await supertest(server.app) - .get(response.body.outcome[0].url.replace(`http://${process.env.SERVER_HOST}:${process.env.SERVER_PORT}`, '')) - .expect(200); - const count = (operationResponse.text.match(/OperationOutcome/g) || []).length; - expect(count).toEqual(1); + expect(response.body.entry[0].response.status).toEqual('200'); + // add check for All OK OperationOutcome ? }); - test('check 200 returned with error OperationOutcome ndjson file when it exists', async () => { - const response = await supertest(server.app) - .get('/4_0_1/bulkstatus/COMPLETED_REQUEST_WITH_RESOURCE_ERRORS') - .expect(200); - expect(response.headers.expires).toBeDefined(); + + it('returns 200 status and a batch-response bundle with 400 status when $import failed', async () => { + const response = await supertest(server.app).get('/4_0_1/bulkstatus/ERROR_REQUEST').expect(200); expect(response.headers['content-type']).toEqual('application/json; charset=utf-8'); expect(response.body).toBeDefined(); - expect(response.body.outcome.length).toEqual(2); - expect(response.body.outcome[1].type).toEqual('OperationOutcome'); - await supertest(server.app) - .get(response.body.outcome[1].url.replace(`http://${process.env.SERVER_HOST}:${process.env.SERVER_PORT}`, '')) //TODO: may need to break apart base_url to get slug - .expect(200); + expect(response.body.entry[0].response.status).toEqual('400'); + // add check for fatal OperationOutcome ? }); - test('check 500 and error returned for failed request with known error', async () => { - await supertest(server.app) - .get('/4_0_1/bulkstatus/KNOWN_ERROR_REQUEST') - .expect(500) - .then(response => { - expect(response.body.issue[0].code).toEqual('ErrorCode'); - expect(response.headers['content-type']).toEqual('application/json; charset=utf-8'); - expect(response.body.issue[0].details.text).toEqual('Known Error Occurred!'); - }); - }); - test('check 500 and generic error returned for request with unknown error', async () => { - await supertest(server.app) - .get('/4_0_1/bulkstatus/UNKNOWN_ERROR_REQUEST') - .expect(500) - .then(response => { - expect(response.body.issue[0].code).toEqual('UnknownError'); - expect(response.headers['content-type']).toEqual('application/json; charset=utf-8'); - expect(response.body.issue[0].details.text).toEqual( - 'An unknown error occurred during bulk import with id: UNKNOWN_ERROR_REQUEST' - ); - }); - }); - test('check 404 error returned for request with unknown ID', async () => { + + // TODO: Add tests for when a 200 status is returned but there were failed outcomes + + it('returns 404 status for request with unknown ID', async () => { await supertest(server.app) .get('/4_0_1/bulkstatus/INVALID_ID') .expect(404) @@ -81,16 +49,6 @@ describe('bulkstatus.service', () => { expect(response.body.issue[0].details.text).toEqual('Could not find bulk import request with id: INVALID_ID'); }); }); - test('check operationOutcome includes the number of resources when available', async () => { - await supertest(server.app).get('/4_0_1/bulkstatus/COMPLETED_REQUEST_WITH_RESOURCE_COUNT').expect(200); - const response = await supertest(server.app).get( - '/4_0_1/file/COMPLETED_REQUEST_WITH_RESOURCE_COUNT/OperationOutcome.ndjson' - ); - const data = JSON.parse(response.text); - expect(data.issue[0].details.text).toEqual( - 'Bulk import successfully completed, successfully imported 200 resources' - ); - }); }); describe('Dynamic X-Progress logic', () => { diff --git a/test/services/import.service.test.js b/test/services/import.service.test.js index 25b3164d..b8494b0d 100644 --- a/test/services/import.service.test.js +++ b/test/services/import.service.test.js @@ -1,27 +1,27 @@ const supertest = require('supertest'); const { buildConfig } = require('../../src/config/profileConfig'); const { initialize } = require('../../src/server/server'); -const validParam = require('../fixtures/fhir-resources/parameters/paramWithExport.json'); -const paramNoExport = require('../fixtures/fhir-resources/parameters/paramNoExport.json'); -const testParamTwoExports = require('../fixtures/fhir-resources/parameters/paramTwoExports.json'); -const testParamNoValString = require('../fixtures/fhir-resources/parameters/paramNoValueUrl.json'); +const paramOneInput = require('../fixtures/fhir-resources/parameters/paramOneInput.json'); +const paramTwoInputs = require('../fixtures/fhir-resources/parameters/paramTwoInputs.json'); +const paramNoInput = require('../fixtures/fhir-resources/parameters/paramNoInput.json'); +const paramNoInputValueUrl = require('../fixtures/fhir-resources/parameters/paramNoInputValueUrl.json'); const { SINGLE_AGENT_PROVENANCE } = require('../fixtures/provenanceFixtures'); const { client } = require('../../src/database/connection'); const { cleanUpTest } = require('../populateTestData'); let server; -describe('Testing $import with no specified measure bundle', () => { +describe('Testing $import', () => { beforeAll(async () => { const config = buildConfig(); server = initialize(config); await client.connect(); }); - test('Returns 202 on Valid Request', async () => { + it.only('Returns 202 on Valid Request with one input', async () => { await supertest(server.app) .post('/4_0_1/$import') - .send(validParam) + .send(paramOneInput) .set('Accept', 'application/json+fhir') .set('content-type', 'application/json+fhir') .set('x-provenance', JSON.stringify(SINGLE_AGENT_PROVENANCE)) @@ -30,37 +30,38 @@ describe('Testing $import with no specified measure bundle', () => { expect(response.headers['content-location']).toBeDefined(); }); }); - test('Returns 400 on missing exportUrl', async () => { + + it('Returns 202 on Valid Request with two inputs', async () => { await supertest(server.app) .post('/4_0_1/$import') - .send(paramNoExport) + .send(paramTwoInputs) .set('Accept', 'application/json+fhir') .set('content-type', 'application/json+fhir') .set('x-provenance', JSON.stringify(SINGLE_AGENT_PROVENANCE)) - .expect(400) + .expect(202) .then(response => { - expect(response.body.resourceType).toEqual('OperationOutcome'); - expect(response.body.issue[0].details.text).toEqual('No exportUrl parameter was found.'); + expect(response.headers['content-location']).toBeDefined(); }); }); - test('FHIR Parameters object has two export URLs', async () => { + + it('Returns 400 on missing exportUrl', async () => { await supertest(server.app) .post('/4_0_1/$import') - .send(testParamTwoExports) + .send(paramNoInput) .set('Accept', 'application/json+fhir') .set('content-type', 'application/json+fhir') - .set('prefer', 'respond-async') + .set('x-provenance', JSON.stringify(SINGLE_AGENT_PROVENANCE)) .expect(400) .then(response => { expect(response.body.resourceType).toEqual('OperationOutcome'); - expect(response.body.issue[0].details.text).toEqual('Expected exactly one export URL. Received: 2'); + expect(response.body.issue[0].details.text).toEqual('No inputUrl parameters were found.'); }); }); - test('FHIR Parameters object is missing valueUrl for export URL', async () => { + it('Returns 400 for FHIR Parameters object missing valueUrl for ndjson URL', async () => { await supertest(server.app) .post('/4_0_1/$import') - .send(testParamNoValString) + .send(paramNoInputValueUrl) .set('Accept', 'application/json+fhir') .set('content-type', 'application/json+fhir') .set('prefer', 'respond-async') @@ -68,7 +69,7 @@ describe('Testing $import with no specified measure bundle', () => { .then(response => { expect(response.body.resourceType).toEqual('OperationOutcome'); expect(response.body.issue[0].details.text).toEqual( - 'Expected a valueUrl for the exportUrl, but none was found' + 'Expected a valueUrl for the inputUrl, but none were found.' ); }); }); diff --git a/test/services/measure.service.test.js b/test/services/measure.service.test.js index 83ae92d7..d0df7af4 100644 --- a/test/services/measure.service.test.js +++ b/test/services/measure.service.test.js @@ -9,9 +9,9 @@ const testGroup = require('../fixtures/fhir-resources/testGroup.json'); const testOrganization = require('../fixtures/fhir-resources/testOrganization.json'); const testOrganization2 = require('../fixtures/fhir-resources/testOrganization2.json'); const testParam = require('../fixtures/fhir-resources/parameters/paramNoExport.json'); +const paramNoInput = require('../fixtures/fhir-resources/parameters/paramNoInput.json'); const testParamWithExport = require('../fixtures/fhir-resources/parameters/paramWithExport.json'); -const testParamTwoExports = require('../fixtures/fhir-resources/parameters/paramTwoExports.json'); -const testParamNoValString = require('../fixtures/fhir-resources/parameters/paramNoValueUrl.json'); +const paramNoInputValueUrl = require('../fixtures/fhir-resources/parameters/paramNoInputValueUrl.json'); const testParamInvalidResourceType = require('../fixtures/fhir-resources/parameters/paramInvalidType.json'); const testEmptyParam = require('../fixtures/fhir-resources/parameters/emptyParam.json'); const testParamTwoMeasureReports = require('../fixtures/fhir-resources/parameters/paramTwoMeasureReports.json'); @@ -78,38 +78,24 @@ describe('measure.service', () => { }); describe('$bulk-submit-data', () => { - test('FHIR Parameters object is missing export URL returns 400', async () => { + it('Returns 400 if FHIR Parameters object is missing input URL', async () => { await supertest(server.app) .post('/4_0_1/Measure/$bulk-submit-data') - .send(testParam) - .set('Accept', 'application/json+fhir') - .set('content-type', 'application/json+fhir') - .set('prefer', 'respond-async') - .expect(400) - .then(response => { - expect(response.body.resourceType).toEqual('OperationOutcome'); - expect(response.body.issue[0].details.text).toEqual('No exportUrl parameter was found.'); - }); - }); - - test('FHIR Parameters object has two export URLs', async () => { - await supertest(server.app) - .post('/4_0_1/Measure/$bulk-submit-data') - .send(testParamTwoExports) + .send(paramNoInput) .set('Accept', 'application/json+fhir') .set('content-type', 'application/json+fhir') .set('prefer', 'respond-async') .expect(400) .then(response => { expect(response.body.resourceType).toEqual('OperationOutcome'); - expect(response.body.issue[0].details.text).toEqual('Expected exactly one export URL. Received: 2'); + expect(response.body.issue[0].details.text).toEqual('No inputUrl parameters were found.'); }); }); test('FHIR Parameters object is missing valueUrl for export URL', async () => { await supertest(server.app) .post('/4_0_1/Measure/$bulk-submit-data') - .send(testParamNoValString) + .send(paramNoInputValueUrl) .set('Accept', 'application/json+fhir') .set('content-type', 'application/json+fhir') .set('prefer', 'respond-async') @@ -117,7 +103,7 @@ describe('measure.service', () => { .then(response => { expect(response.body.resourceType).toEqual('OperationOutcome'); expect(response.body.issue[0].details.text).toEqual( - 'Expected a valueUrl for the exportUrl, but none was found' + 'Expected a valueUrl for the inputUrl, but none were found.' ); }); }); @@ -1025,21 +1011,6 @@ describe('measure.service', () => { }); }); - test('bulk import fails if measure bundle cannot be found', async () => { - await supertest(server.app) - .post('/4_0_1/Measure/invalid-id/$bulk-submit-data') - .send(testParam) - .set('Accept', 'application/json+fhir') - .set('content-type', 'application/json+fhir') - .set('prefer', 'respond-async') - .expect(404) - .then(response => { - expect(response.body.issue[0].code).toEqual('ResourceNotFound'); - expect(response.body.issue[0].details.text).toEqual( - 'Measure with id invalid-id does not exist in the server' - ); - }); - }); afterEach(() => { jest.clearAllMocks(); }); diff --git a/test/util/exportUtils.test.js b/test/util/exportUtils.test.js index 98531f82..4f259d1a 100644 --- a/test/util/exportUtils.test.js +++ b/test/util/exportUtils.test.js @@ -1,131 +1,49 @@ -const { retrieveExportType, retrieveExportUrl, checkExportUrlArray } = require('../../src/util/exportUtils'); -const exportWithTypeParams = require('../fixtures/fhir-resources/parameters/paramExportUrlWithTypes.json'); -const exportWithTypeAndFilterParams = require('../fixtures/fhir-resources/parameters/paramExportUrlWithTypesAndFilters.json'); -const exportWithTypeFilterParams = require('../fixtures/fhir-resources/parameters/paramExportUrlWithTypeFilter.json'); -const exportWithMultipleTypeDeclarations = require('../fixtures/fhir-resources/parameters/paramExportUrlMultipleTypeDeclarations.json'); -const exportWithMultipleTypeFilterDeclarations = require('../fixtures/fhir-resources/parameters/paramExportUrlMultipleTypeFilterDeclarations.json'); +const { checkInputUrlArray, retrieveInputUrls } = require('../../src/util/exportUtils'); +const paramOneInput = require('../fixtures/fhir-resources/parameters/paramOneInput.json'); +const paramTwoInputs = require('../fixtures/fhir-resources/parameters/paramTwoInputs.json'); +const paramNoInputValueUrl = require('../fixtures/fhir-resources/parameters/paramNoInputValueUrl.json'); -const ASSEMBLED_EXPORT_URL = 'http://example.com/$export?_type=Patient,Encounter,Condition'; -const ASSEMBLED_EXPORT_URL_WITH_FILTER_MULTIPLE_TYPES = - 'http://example.com/$export?_type=Patient,Encounter,Condition&_typeFilter=Encounter%3Fcode%3Ain=TEST_VALUE_SET'; -const ASSEMBLED_EXPORT_URL_WITH_FILTER = - 'http://example.com/$export?_type=Encounter&_typeFilter=Encounter%3Fcode%3Ain=TEST_VALUE_SET'; -const DYNAMIC_EXPORT_TYPE_PARAMETERS = { - resourceType: 'Parameters', - parameter: [ - { - name: 'exportUrl', - valueUrl: 'http://localhost:3001/$export' - }, - { - name: 'exportType', - valueCode: 'dynamic' - } - ] -}; -const NO_EXPORT_TYPE_PARAMETERS = { - resourceType: 'Parameters', - parameter: [ - { - name: 'exportUrl', - valueUrl: 'http://localhost:3001/$export' - } - ] -}; -const STATIC_EXPORT_TYPE_PARAMETERS = { - resourceType: 'Parameters', - parameter: [ - { - name: 'exportUrl', - valueUrl: 'http://localhost:3001/$export' - }, - { - name: 'exportType', - valueCode: 'static' - } - ] -}; - -describe('Test export Url configuration with type and typeFilter parameters', () => { - test('retrieveExportUrl successfully includes type params as comma-delimited string', () => { - expect(retrieveExportUrl(exportWithTypeParams.parameter)).toEqual(ASSEMBLED_EXPORT_URL); - }); - - test('retrieveExportUrl successfully includes type and typeFilter params from bulk submit data request', () => { - expect(retrieveExportUrl(exportWithTypeAndFilterParams.parameter)).toEqual( - ASSEMBLED_EXPORT_URL_WITH_FILTER_MULTIPLE_TYPES - ); +describe('Test retrieveInputUrls', () => { + it('returns an array with one object that contains the input resource type and ndjson url', () => { + expect(retrieveInputUrls(paramOneInput.parameter)).toEqual([ + { type: 'Example1', url: 'http://example.com/Example1.ndjson' } + ]); }); - test('retrieveExportUrl successfully includes typeFilter param when type param already included in export url', () => { - expect(retrieveExportUrl(exportWithTypeFilterParams.parameter)).toEqual(ASSEMBLED_EXPORT_URL_WITH_FILTER); + it('returns an array with two objects that contain the input resource types and ndjson urls', () => { + expect(retrieveInputUrls(paramTwoInputs.parameter)).toEqual([ + { type: 'Example1', url: 'http://example.com/Example1.ndjson' }, + { type: 'Example2', url: 'http://example.com/Example2.ndjson' } + ]); }); +}); - test('console.warn thrown and _type parameter (from param array) not added when _type is already appended to exportUrl', () => { - const warningSpy = jest.spyOn(global.console, 'warn'); - expect(retrieveExportUrl(exportWithMultipleTypeDeclarations.parameter)).toEqual(ASSEMBLED_EXPORT_URL); - expect(warningSpy).toHaveBeenCalled(); +describe('Test checkInputUrlArray', () => { + it('does not throw error for valid input', () => { + expect(checkInputUrlArray(paramOneInput.parameter)).toBeUndefined(); }); - test('console.warn thrown and _typeFilter parameter (from param array) not added when _typeFilter is already appended to exportUrl', () => { - const warningSpy = jest.spyOn(global.console, 'warn'); - expect(retrieveExportUrl(exportWithMultipleTypeFilterDeclarations.parameter)).toEqual( - ASSEMBLED_EXPORT_URL_WITH_FILTER - ); - expect(warningSpy).toHaveBeenCalled(); + it('does not throw error for valid input with multiple inputUrls', () => { + expect(checkInputUrlArray(paramTwoInputs.parameter)).toBeUndefined(); }); - afterEach(() => { - jest.clearAllMocks(); - }); -}); - -describe('Test checkExportUrlArray', () => { - test('does not throw error for valid input', () => { - expect(checkExportUrlArray([{ name: 'exportUrl', valueUrl: 'http://www.example.com' }])).toBeUndefined(); - }); - test('throws BadRequest error for missing export url parameter', () => { + it('throws BadRequest error for missing inputUrl parameter', () => { try { - expect(checkExportUrlArray([])); + expect(checkInputUrlArray([])); expect.fail('checkExportUrl failed to throw error for missing export url parameter'); } catch (e) { expect(e.statusCode).toEqual(400); - expect(e.issue[0].details.text).toEqual('No exportUrl parameter was found.'); + expect(e.issue[0].details.text).toEqual('No inputUrl parameters were found.'); } }); - test('throws BadRequest error for multiple export urls', () => { - try { - expect( - checkExportUrlArray([ - { name: 'exportUrl', valueUrl: 'http://www.example.com' }, - { name: 'exportUrl', valueUrl: 'http://www.example2.com' } - ]) - ); - expect.fail('checkExportUrl failed to throw error for multiple export url parameters'); - } catch (e) { - expect(e.statusCode).toEqual(400); - expect(e.issue[0].details.text).toEqual('Expected exactly one export URL. Received: 2'); - } - }); - test('throws BadRequest error for parameter with no valueUrl', () => { + + it('throws BadRequest error for parameter with no valueUrl', () => { try { - expect(checkExportUrlArray([{ name: 'exportUrl' }])); - expect.fail('checkExportUrl failed to throw error for parameter with no valueUrl'); + expect(checkInputUrlArray(paramNoInputValueUrl.parameter)); + expect.fail('checkInputUrlArray failed to throw error for parameter with no valueUrl'); } catch (e) { expect(e.statusCode).toEqual(400); - expect(e.issue[0].details.text).toEqual('Expected a valueUrl for the exportUrl, but none was found'); + expect(e.issue[0].details.text).toEqual('Expected a valueUrl for the inputUrl, but none were found.'); } }); }); - -describe('Test retrieveExportType', () => { - test('returns dynamic if the exportType is dynamic', () => { - expect(retrieveExportType(DYNAMIC_EXPORT_TYPE_PARAMETERS.parameter)).toEqual('dynamic'); - }); - test('returns dynamic if there is no exportType', () => { - expect(retrieveExportType(NO_EXPORT_TYPE_PARAMETERS.parameter)).toEqual('dynamic'); - }); - test('returns static if the exportType is static', () => { - expect(retrieveExportType(STATIC_EXPORT_TYPE_PARAMETERS.parameter)).toEqual('static'); - }); -});