From eb9b5e25ed942d9a16c21b3aee264328d414a260 Mon Sep 17 00:00:00 2001 From: kikislater Date: Mon, 16 Sep 2024 16:21:12 +0000 Subject: [PATCH] Apptainer/Singularity, more output, sync to NodeODM Part 2 --- .dockerignore | 3 + .gitignore | 6 +- Dockerfile | 4 +- README.md | 4 +- apptainer.def | 18 ++ config.js | 5 + data/.gitignore | 1 - index.js | 5 + libs/Directories.js | 10 +- libs/ProgressReceiver.js | 10 +- libs/S3.js | 213 +++++++++++++++-------- libs/Task.js | 245 ++++++++++++++++++++------- libs/TaskManager.js | 39 ++++- libs/apps.js | 34 ++++ libs/logger.js | 15 +- libs/odmInfo.js | 47 ++++-- libs/odmRunner.js | 99 ++++++----- libs/processRunner.js | 64 +++++-- libs/statusCodes.js | 10 +- libs/taskNew.js | 355 +++++++++++++++++++++++---------------- libs/utils.js | 3 +- libs/ziputils.js | 38 +++++ 22 files changed, 847 insertions(+), 381 deletions(-) create mode 100644 apptainer.def delete mode 100644 data/.gitignore create mode 100644 libs/apps.js create mode 100644 libs/ziputils.js diff --git a/.dockerignore b/.dockerignore index 0cc72dd..a51b8e3 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,3 +1,6 @@ node_modules tests tmp +data +nodemicmac.sif +node diff --git a/.gitignore b/.gitignore index f9d5184..e0a2791 100644 --- a/.gitignore +++ b/.gitignore @@ -50,4 +50,8 @@ package-lock.json micmac/build micmac/build/* -*.pyc \ No newline at end of file +*.pyc + +data +node +nodemicmac.sif diff --git a/Dockerfile b/Dockerfile index 0adcf2c..5c72c65 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,7 @@ RUN apt-get update RUN apt-get install -y -qq --no-install-recommends software-properties-common build-essential cmake git \ exiv2 libimage-exiftool-perl proj-bin gdal-bin figlet imagemagick pdal libpdal-dev \ libboost-all-dev libtbb-dev libssl-dev libcurl4-openssl-dev pkg-config libpth-dev \ - curl libx11-dev python3-pip python3-setuptools python3-shapely apt-utils + curl libx11-dev python3-pip python3-setuptools python3-shapely apt-utils p7zip-full RUN pip3 install -U shyaml RUN pip3 install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org appsettings @@ -23,7 +23,7 @@ RUN pip3 install --trusted-host pypi.org --trusted-host pypi.python.org --truste RUN curl --silent --location https://deb.nodesource.com/setup_14.x | bash - RUN apt-get install -y nodejs -RUN npm install -g nodemon +RUN npm install --production # Build Entwine WORKDIR "/staging" diff --git a/README.md b/README.md index fd725f3..4d959e2 100644 --- a/README.md +++ b/README.md @@ -277,11 +277,11 @@ cd build cmake -DCMAKE_BUILD_TYPE=Release -DLASZIP_INCLUDE_DIRS=/staging/LAStools/LASzip/dll -DLASZIP_LIBRARY=/staging/LAStools/LASzip/build/src/liblaszip.a .. make && sudo make install ``` -2] Install gdal2tiles.py script, node.js and npm dependencies +2] Install gdal2tiles.py script, node.js, npm dependencies and 7zip: ```bash sudo curl --silent --location https://deb.nodesource.com/setup_6.x | sudo bash - -sudo apt-get install -y nodejs python-gdal +sudo apt-get install -y nodejs python-gdal p7zip-full git clone hhttps://github.com/OpenDroneMap/NodeMICMAC.git cd NodeMICMAC npm install diff --git a/apptainer.def b/apptainer.def new file mode 100644 index 0000000..76828b2 --- /dev/null +++ b/apptainer.def @@ -0,0 +1,18 @@ +Bootstrap: docker +From: opendronemap/nodemicmac:master + +%labels + Author Sylvain POULAIN + Version 1.0 + Description Apptainer container for NodeMICMAC and WebGIS with Entwine and MicMac. + +%environment + export PATH=$PATH:/code/micmac/bin + export python=$(which python3) + +%runscript + cd "/var/www" + exec /usr/bin/node /var/www/index.js "$@" +%startscript + cd "/var/www" + exec /usr/bin/node /var/www/index.js "$@" diff --git a/config.js b/config.js index eb8e33e..f7aec6a 100644 --- a/config.js +++ b/config.js @@ -20,6 +20,7 @@ along with this program. If not, see . let fs = require('fs'); let argv = require('minimist')(process.argv.slice(2)); let utils = require('./libs/utils'); +const spawnSync = require('child_process').spawnSync; if (argv.help){ console.log(` @@ -113,4 +114,8 @@ config.s3SignatureVersion = argv.s3_signature_version || fromConfigFile("s3Signa config.s3UploadEverything = argv.s3_upload_everything || fromConfigFile("s3UploadEverything", false); config.maxConcurrency = parseInt(argv.max_concurrency || fromConfigFile("maxConcurrency", 0)); +// Detect 7z availability +const childProcess = spawnSync("7z", ['--help']); +config.has7z = childProcess.status === 0; + module.exports = config; diff --git a/data/.gitignore b/data/.gitignore deleted file mode 100644 index f59ec20..0000000 --- a/data/.gitignore +++ /dev/null @@ -1 +0,0 @@ -* \ No newline at end of file diff --git a/index.js b/index.js index 4460d0e..a6ceae0 100644 --- a/index.js +++ b/index.js @@ -873,6 +873,11 @@ if (config.test) { if (config.testDropUploads) logger.info("Uploads will drop at random"); } + +if (!config.has7z){ + logger.warn("The 7z program is not installed, falling back to legacy (zipping will be slower)"); +} + let commands = [ cb => odmInfo.initialize(cb), cb => auth.initialize(cb), diff --git a/libs/Directories.js b/libs/Directories.js index c63e387..88a7208 100644 --- a/libs/Directories.js +++ b/libs/Directories.js @@ -1,9 +1,9 @@ /* -Node-OpenDroneMap Node.js App and REST API to access OpenDroneMap. -Copyright (C) 2016 Node-OpenDroneMap Contributors +NodeODM App and REST API to access ODM. +Copyright (C) 2016 NodeODM Contributors This program is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by +it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. @@ -12,7 +12,7 @@ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -You should have received a copy of the GNU General Public License +You should have received a copy of the GNU Affero General Public License along with this program. If not, see . */ "use strict"; @@ -25,4 +25,4 @@ class Directories{ } } -module.exports = Directories; \ No newline at end of file +module.exports = Directories; diff --git a/libs/ProgressReceiver.js b/libs/ProgressReceiver.js index aa33467..9a6caea 100644 --- a/libs/ProgressReceiver.js +++ b/libs/ProgressReceiver.js @@ -1,9 +1,9 @@ /* -Node-OpenDroneMap Node.js App and REST API to access OpenDroneMap. -Copyright (C) 2016 Node-OpenDroneMap Contributors +NodeODM App and REST API to access ODM. +Copyright (C) 2016 NodeODM Contributors This program is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by +it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. @@ -12,7 +12,7 @@ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -You should have received a copy of the GNU General Public License +You should have received a copy of the GNU Affero General Public License along with this program. If not, see . */ "use strict"; @@ -21,7 +21,7 @@ const dgram = require('dgram'); module.exports = class ProgressReceiver{ constructor(){ - const server = dgram.createSocket('udp4'); + const server = dgram.createSocket({type: 'udp4', reuseAddr: true}); this.callbacks = []; server.on('error', (err) => { diff --git a/libs/S3.js b/libs/S3.js index f79a23d..160163f 100644 --- a/libs/S3.js +++ b/libs/S3.js @@ -1,9 +1,9 @@ /* -Node-OpenDroneMap Node.js App and REST API to access OpenDroneMap. -Copyright (C) 2016 Node-OpenDroneMap Contributors +NodeODM App and REST API to access ODM. +Copyright (C) 2016 NodeODM Contributors This program is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by +it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. @@ -12,7 +12,7 @@ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -You should have received a copy of the GNU General Public License +You should have received a copy of the GNU Affero General Public License along with this program. If not, see . */ "use strict"; @@ -23,6 +23,8 @@ const glob = require('glob'); const path = require('path'); const logger = require('./logger'); const config = require('../config'); +const https = require('https'); +const si = require('systeminformation'); let s3 = null; @@ -32,14 +34,34 @@ module.exports = { }, initialize: function(cb){ - if (config.s3Endpoint && config.s3Bucket && config.s3AccessKey && config.s3SecretKey){ + if (config.s3Endpoint && config.s3Bucket){ + if (config.s3IgnoreSSL){ + AWS.config.update({ + httpOptions: { + agent: new https.Agent({ + rejectUnauthorized: false + }) + } + }); + } + const spacesEndpoint = new AWS.Endpoint(config.s3Endpoint); - s3 = new AWS.S3({ + + const s3Config = { endpoint: spacesEndpoint, signatureVersion: ('v' + config.s3SignatureVersion) || 'v4', - accessKeyId: config.s3AccessKey, - secretAccessKey: config.s3SecretKey - }); + s3ForcePathStyle: config.s3ForcePathStyle, + }; + + // If we are not using IAM roles then we need to pass access key and secret key in our config + if (config.s3AccessKey && config.s3SecretKey) { + s3Config['accessKeyId'] = config.s3AccessKey; + s3Config['secretAccessKey'] = config.s3SecretKey; + }else{ + logger.info("Secret Key and Access ID not passed. Using the IAM role"); + }; + + s3 = new AWS.S3(s3Config); // Test connection s3.putObject({ @@ -51,7 +73,7 @@ module.exports = { logger.info("Connected to S3"); cb(); }else{ - cb(new Error("Cannot connect to S3. Check your S3 configuration: " + err.code)); + cb(new Error(`Cannot connect to S3. Check your S3 configuration: ${err.message} (${err.code})`)); } }); }else cb(); @@ -66,80 +88,125 @@ module.exports = { uploadPaths: function(srcFolder, bucket, dstFolder, paths, cb, onOutput){ if (!s3) throw new Error("S3 is not initialized"); - const PARALLEL_UPLOADS = 5; + const PARALLEL_UPLOADS = 4; // Upload these many files at the same time const MAX_RETRIES = 6; + const MIN_PART_SIZE = 5 * 1024 * 1024; - const q = async.queue((file, done) => { - logger.debug(`Uploading ${file.src} --> ${file.dest}`); - s3.upload({ - Bucket: bucket, - Key: file.dest, - Body: fs.createReadStream(file.src), - ACL: 'public-read' - }, {partSize: 5 * 1024 * 1024, queueSize: 1}, err => { - if (err){ - logger.debug(err); - const msg = `Cannot upload file to S3: ${err.code}, retrying... ${file.retries}`; - if (onOutput) onOutput(msg); - if (file.retries < MAX_RETRIES){ - file.retries++; - setTimeout(() => { - q.push(file, errHandler); - done(); - }, (2 ** file.retries) * 1000); - }else{ - done(new Error(msg)); - } - }else done(); - }); - }, PARALLEL_UPLOADS); + // Get available memory, as on low-powered machines + // we might not be able to upload many large chunks at once + si.mem(memory => { + let concurrency = 10; // Upload these many parts per file at the same time + let progress = {}; - const errHandler = err => { - if (err){ - q.kill(); - if (!cbCalled){ - cbCalled = true; - cb(err); - } + let partSize = 100 * 1024 * 1024; + let memoryRequirement = partSize * concurrency * PARALLEL_UPLOADS; // Conservative + + // Try reducing concurrency first + while(memoryRequirement > memory.available && concurrency > 1){ + concurrency--; + memoryRequirement = partSize * concurrency * PARALLEL_UPLOADS; } - }; - let uploadList = []; + // Try reducing partSize afterwards + while(memoryRequirement > memory.available && partSize > MIN_PART_SIZE){ + partSize = Math.max(MIN_PART_SIZE, Math.floor(partSize * 0.80)); + memoryRequirement = partSize * concurrency * PARALLEL_UPLOADS; + } - paths.forEach(p => { - const fullPath = path.join(srcFolder, p); - - // Skip non-existing items - if (!fs.existsSync(fullPath)) return; + const q = async.queue((file, done) => { + logger.debug(`Uploading ${file.src} --> ${file.dest}`); + const filename = path.basename(file.dest); + progress[filename] = 0; - if (fs.lstatSync(fullPath).isDirectory()){ - let globPaths = glob.sync(`${p}/**`, { cwd: srcFolder, nodir: true, nosort: true }); + let uploadCfg = { + Bucket: bucket, + Key: file.dest, + Body: fs.createReadStream(file.src) + } + + if (config.s3ACL != "none") { + uploadCfg.ACL = config.s3ACL; + } - globPaths.forEach(gp => { + s3.upload(uploadCfg, {partSize, queueSize: concurrency}, err => { + if (err){ + logger.debug(err); + const msg = `Cannot upload file to S3: ${err.message} (${err.code}), retrying... ${file.retries}`; + if (onOutput) onOutput(msg); + if (file.retries < MAX_RETRIES){ + file.retries++; + concurrency = Math.max(1, Math.floor(concurrency * 0.66)); + progress[filename] = 0; + + setTimeout(() => { + q.push(file, errHandler); + done(); + }, (2 ** file.retries) * 1000); + }else{ + done(new Error(msg)); + } + }else done(); + }).on('httpUploadProgress', p => { + const perc = Math.round((p.loaded / p.total) * 100) + if (perc % 5 == 0 && progress[filename] < perc){ + progress[filename] = perc; + if (onOutput) { + onOutput(`Uploading ${filename}... ${progress[filename]}%`); + if (progress[filename] == 100){ + onOutput(`Finalizing ${filename} upload, this could take a bit...`); + } + } + } + }); + }, PARALLEL_UPLOADS); + + const errHandler = err => { + if (err){ + q.kill(); + if (!cbCalled){ + cbCalled = true; + cb(err); + } + } + }; + + let uploadList = []; + + paths.forEach(p => { + const fullPath = path.join(srcFolder, p); + + // Skip non-existing items + if (!fs.existsSync(fullPath)) return; + + if (fs.lstatSync(fullPath).isDirectory()){ + let globPaths = glob.sync(`${p}/**`, { cwd: srcFolder, nodir: true, nosort: true }); + + globPaths.forEach(gp => { + uploadList.push({ + src: path.join(srcFolder, gp), + dest: path.join(dstFolder, gp), + retries: 0 + }); + }); + }else{ uploadList.push({ - src: path.join(srcFolder, gp), - dest: path.join(dstFolder, gp), + src: fullPath, + dest: path.join(dstFolder, p), retries: 0 }); - }); - }else{ - uploadList.push({ - src: fullPath, - dest: path.join(dstFolder, p), - retries: 0 - }); - } + } + }); + + let cbCalled = false; + q.drain = () => { + if (!cbCalled){ + cbCalled = true; + cb(); + } + }; + + if (onOutput) onOutput(`Uploading ${uploadList.length} files to S3...`); + q.push(uploadList, errHandler); }); - - let cbCalled = false; - q.drain = () => { - if (!cbCalled){ - cbCalled = true; - cb(); - } - }; - - if (onOutput) onOutput(`Uploading ${uploadList.length} files to S3...`); - q.push(uploadList, errHandler); } }; diff --git a/libs/Task.js b/libs/Task.js index 296debb..2fcb13e 100644 --- a/libs/Task.js +++ b/libs/Task.js @@ -19,44 +19,70 @@ along with this program. If not, see . const config = require('../config'); const async = require('async'); +const os = require('os'); const assert = require('assert'); const logger = require('./logger'); const fs = require('fs'); -const glob = require("glob"); const path = require('path'); const rmdir = require('rimraf'); const odmRunner = require('./odmRunner'); const odmInfo = require('./odmInfo'); const processRunner = require('./processRunner'); -const archiver = require('archiver'); const Directories = require('./Directories'); const kill = require('tree-kill'); const S3 = require('./S3'); const request = require('request'); const utils = require('./utils'); +const archiver = require('archiver'); const statusCodes = require('./statusCodes'); module.exports = class Task{ - constructor(uuid, name, options = [], webhook = null, skipPostProcessing = false, outputs = [], dateCreated = new Date().getTime(), done = () => {}){ + constructor(uuid, name, options = [], webhook = null, skipPostProcessing = false, outputs = [], dateCreated = new Date().getTime(), imagesCountEstimate = -1){ assert(uuid !== undefined, "uuid must be set"); - assert(done !== undefined, "ready must be set"); this.uuid = uuid; this.name = name !== "" ? name : "Task of " + (new Date()).toISOString(); this.dateCreated = isNaN(parseInt(dateCreated)) ? new Date().getTime() : parseInt(dateCreated); + this.dateStarted = 0; this.processingTime = -1; this.setStatus(statusCodes.QUEUED); this.options = options; this.gcpFiles = []; + this.geoFiles = []; + this.imageGroupsFiles = []; this.output = []; this.runningProcesses = []; this.webhook = webhook; this.skipPostProcessing = skipPostProcessing; this.outputs = utils.parseUnsafePathsList(outputs); this.progress = 0; - - async.series([ + this.imagesCountEstimate = imagesCountEstimate; + this.initialized = false; + } + + initialize(done, additionalSteps = []){ + async.series(additionalSteps.concat([ + // Handle post-processing options logic + cb => { + // If we need to post process results + // if pc-ept is supported (build entwine point cloud) + // we automatically add the pc-ept option to the task options by default + if (this.skipPostProcessing) cb(); + else{ + odmInfo.supportsOption("pc-ept", (err, supported) => { + if (err){ + console.warn(`Cannot check for supported option pc-ept: ${err}`); + }else if (supported){ + if (!this.options.find(opt => opt.name === "pc-ept")){ + this.options.push({ name: 'pc-ept', value: true }); + } + } + cb(); + }); + } + }, + // Read images info cb => { fs.readdir(this.getImagesFolderPath(), (err, files) => { @@ -75,44 +101,52 @@ module.exports = class Task{ if (err) cb(err); else{ files.forEach(file => { - if (/\.txt$/gi.test(file)){ + if (/^geo\.txt$/gi.test(file)){ + this.geoFiles.push(file); + }else if (/^image_groups\.txt$/gi.test(file)){ + this.imageGroupsFiles.push(file); + }else if (/\.txt$/gi.test(file)){ this.gcpFiles.push(file); } }); logger.debug(`Found ${this.gcpFiles.length} GCP files (${this.gcpFiles.join(" ")}) for ${this.uuid}`); + logger.debug(`Found ${this.geoFiles.length} GEO files (${this.geoFiles.join(" ")}) for ${this.uuid}`); + logger.debug(`Found ${this.imageGroupsFiles.length} image groups files (${this.imageGroupsFiles.join(" ")}) for ${this.uuid}`); cb(null); } }); } - ], err => { + ]), err => { + this.initialized = true; done(err, this); }); } static CreateFromSerialized(taskJson, done){ - new Task(taskJson.uuid, + const task = new Task(taskJson.uuid, taskJson.name, - taskJson.options, + taskJson.options, taskJson.webhook, taskJson.skipPostProcessing, taskJson.outputs, - taskJson.dateCreated, - (err, task) => { - if (err) done(err); - else{ - // Override default values with those - // provided in the taskJson - for (let k in taskJson){ - task[k] = taskJson[k]; - } - - // Tasks that were running should be put back to QUEUED state - if (task.status.code === statusCodes.RUNNING){ - task.status.code = statusCodes.QUEUED; - } - done(null, task); + taskJson.dateCreated); + + task.initialize((err, task) => { + if (err) done(err); + else{ + // Override default values with those + // provided in the taskJson + for (let k in taskJson){ + task[k] = taskJson[k]; } - }); + + // Tasks that were running should be put back to QUEUED state + if (task.status.code === statusCodes.RUNNING){ + task.status.code = statusCodes.QUEUED; + } + done(null, task); + } + }); } // Get path where images are stored for this task @@ -138,13 +172,6 @@ module.exports = class Task{ getAssetsArchivePath(filename){ if (filename == 'all.zip'){ // OK, do nothing - }else if (filename == 'orthophoto.tif'){ - if (config.test){ - if (config.testSkipOrthophotos) return false; - else filename = path.join('..', '..', 'processing_results', 'odm_orthophoto', `odm_${filename}`); - }else{ - filename = path.join('odm_orthophoto', `odm_${filename}`); - } }else{ return false; // Invalid } @@ -208,6 +235,10 @@ module.exports = class Task{ return this.status.code === statusCodes.CANCELED; } + isRunning(){ + return this.status.code === statusCodes.RUNNING; + } + // Cancels the current task (unless it's already canceled) cancel(cb){ if (this.status.code !== statusCodes.CANCELED){ @@ -245,6 +276,40 @@ module.exports = class Task{ const postProcess = () => { const createZipArchive = (outputFilename, files) => { + return (done) => { + this.output.push(`Compressing ${outputFilename}\n`); + + const zipFile = path.resolve(this.getAssetsArchivePath(outputFilename)); + const sourcePath = !config.test ? + this.getProjectFolderPath() : + path.join("tests", "processing_results"); + + const pathsToArchive = []; + files.forEach(f => { + if (fs.existsSync(path.join(sourcePath, f))){ + pathsToArchive.push(f); + } + }); + + processRunner.sevenZip({ + destination: zipFile, + pathsToArchive, + cwd: sourcePath + }, (err, code, _) => { + if (err){ + logger.error(`Could not archive .zip file: ${err.message}`); + done(err); + }else{ + if (code === 0){ + this.updateProgress(97); + done(); + }else done(new Error(`Could not archive .zip file, 7z exited with code ${code}`)); + } + }); + }; + }; + + const createZipArchiveLegacy = (outputFilename, files) => { return (done) => { this.output.push(`Compressing ${outputFilename}\n`); @@ -323,13 +388,13 @@ module.exports = class Task{ this.runningProcesses.push( processRunner.runPostProcessingScript({ projectFolderPath: this.getProjectFolderPath() - }, (err, code, signal) => { + }, (err, code, _) => { if (err) done(err); else{ if (code === 0){ this.updateProgress(93); done(); - }else done(new Error(`Process exited with code ${code}`)); + }else done(new Error(`Postprocessing failed (${code})`)); } }, output => { this.output.push(output); @@ -338,11 +403,29 @@ module.exports = class Task{ }; }; + const saveTaskOutput = (destination) => { + return (done) => { + fs.writeFile(destination, this.output.join("\n"), err => { + if (err) logger.info(`Cannot write log at ${destination}, skipping...`); + done(); + }); + }; + } + // All paths are relative to the project directory (./data//) let allPaths = ['odm_orthophoto/odm_orthophoto.tif', 'odm_orthophoto/odm_orthophoto.mbtiles', 'odm_georeferencing', 'odm_texturing', 'odm_dem/dsm.tif', 'odm_dem/dtm.tif', 'dsm_tiles', 'dtm_tiles', - 'orthophoto_tiles', 'potree_pointcloud', 'entwine_pointcloud', 'images.json']; + 'orthophoto_tiles', 'potree_pointcloud', 'entwine_pointcloud', 'task_output.txt', 'log.json']; +// let allPaths = ['odm_orthophoto/odm_orthophoto.tif', +// 'odm_orthophoto/odm_orthophoto.png', +// 'odm_orthophoto/odm_orthophoto.mbtiles', +// 'odm_georeferencing', 'odm_texturing', +// 'odm_dem/dsm.tif', 'odm_dem/dtm.tif', 'dsm_tiles', 'dtm_tiles', +// 'orthophoto_tiles', 'potree_pointcloud', 'entwine_pointcloud', +// 'images.json', 'cameras.json', +// 'task_output.txt', +// 'odm_report']; // Did the user request different outputs than the default? if (this.outputs.length > 0) allPaths = this.outputs; @@ -380,19 +463,30 @@ module.exports = class Task{ } - if (!this.skipPostProcessing) tasks.push(runPostProcessingScript()); - tasks.push(createZipArchive('all.zip', allPaths)); + // postprocess.sh is still here for legacy/backward compatibility + // purposes, but we might remove it in the future. The new logic + // instructs the processing engine to do the necessary processing + // of outputs without post processing steps (build EPT). + // We're leaving it here only for Linux/docker setups, but will not + // be triggered on Windows. + if (os.platform() !== "win32" && !this.skipPostProcessing){ + tasks.push(runPostProcessingScript()); + } + + const taskOutputFile = path.join(this.getProjectFolderPath(), 'task_output.txt'); + tasks.push(saveTaskOutput(taskOutputFile)); + + const archiveFunc = config.has7z ? createZipArchive : createZipArchiveLegacy; + tasks.push(archiveFunc('all.zip', allPaths)); // Upload to S3 all paths + all.zip file (if config says so) if (S3.enabled()){ tasks.push((done) => { let s3Paths; - if (config.test){ - s3Paths = ['all.zip']; // During testing only upload all.zip - }else if (config.s3UploadEverything){ + if (config.s3UploadEverything){ s3Paths = ['all.zip'].concat(allPaths); }else{ - s3Paths = ['all.zip', 'odm_orthophoto/odm_orthophoto.tif']; + s3Paths = ['all.zip']; } S3.uploadPaths(this.getProjectFolderPath(), config.s3Bucket, this.uuid, s3Paths, @@ -416,6 +510,7 @@ module.exports = class Task{ if (this.status.code === statusCodes.QUEUED){ this.startTrackingProcessingTime(); + this.dateStarted = new Date().getTime(); this.setStatus(statusCodes.RUNNING); let runnerOptions = this.options.reduce((result, opt) => { @@ -428,6 +523,12 @@ module.exports = class Task{ if (this.gcpFiles.length > 0){ runnerOptions.gcp = fs.realpathSync(path.join(this.getGcpFolderPath(), this.gcpFiles[0])); } + if (this.geoFiles.length > 0){ + runnerOptions.geo = fs.realpathSync(path.join(this.getGcpFolderPath(), this.geoFiles[0])); + } + if (this.imageGroupsFiles.length > 0){ + runnerOptions["split-image-groups"] = fs.realpathSync(path.join(this.getGcpFolderPath(), this.imageGroupsFiles[0])); + } this.runningProcesses.push(odmRunner.run(runnerOptions, this.uuid, (err, code, signal) => { if (err){ @@ -439,7 +540,27 @@ module.exports = class Task{ if (code === 0){ postProcess(); }else{ - this.setStatus(statusCodes.FAILED, {errorMessage: `Process exited with code ${code}`}); + let errorMessage = ""; + switch(code){ + case 1: + case 139: + case 134: + errorMessage = `Cannot process dataset`; + break; + case 137: + errorMessage = `Not enough memory`; + break; + case 132: + errorMessage = `Unsupported CPU`; + break; + case 3: + errorMessage = `Installation issue`; + break; + default: + errorMessage = `Processing failed (${code})`; + break; + } + this.setStatus(statusCodes.FAILED, { errorMessage }); finished(); } }else{ @@ -466,10 +587,12 @@ module.exports = class Task{ // Re-executes the task (by setting it's state back to QUEUED) // Only tasks that have been canceled, completed or have failed can be restarted. restart(options, cb){ - if ([statusCodes.CANCELED, statusCodes.FAILED, statusCodes.COMPLETED].indexOf(this.status.code) !== -1){ + if ([statusCodes.CANCELED, statusCodes.FAILED, statusCodes.COMPLETED].indexOf(this.status.code) !== -1 && this.initialized){ this.setStatus(statusCodes.QUEUED); this.dateCreated = new Date().getTime(); + this.dateStarted = 0; this.output = []; + this.progress = 0; this.stopTrackingProcessingTime(true); if (options !== undefined) this.options = options; cb(null); @@ -487,7 +610,7 @@ module.exports = class Task{ processingTime: this.processingTime, status: this.status, options: this.options, - imagesCount: this.images.length, + imagesCount: this.images !== undefined ? this.images.length : this.imagesCountEstimate, progress: this.progress }; } @@ -501,28 +624,21 @@ module.exports = class Task{ // Reads the contents of the tasks's // images.json and returns its JSON representation readImagesDatabase(callback){ - const imagesDbPath = !config.test ? + const imagesDbPath = !config.test ? path.join(this.getProjectFolderPath(), 'images.json') : path.join('tests', 'processing_results', 'images.json'); - - try { - if (fs.existsSync(imagesDbPath)) { - fs.readFile(imagesDbPath, 'utf8', (err, data) => { - if (err) callback(err); - else{ - try{ - const json = JSON.parse(data); - callback(null, json); - }catch(e){ - callback(e); - } - } - }); + + fs.readFile(imagesDbPath, 'utf8', (err, data) => { + if (err) callback(err); + else{ + try{ + const json = JSON.parse(data); + callback(null, json); + }catch(e){ + callback(e); + } } - } catch(err) { - logger.info('images.json doesn\'t exist:' + err); - callback(null, JSON.parse('{}')); - } + }); } callWebhooks(){ @@ -569,6 +685,7 @@ module.exports = class Task{ uuid: this.uuid, name: this.name, dateCreated: this.dateCreated, + dateStarted: this.dateStarted, status: this.status, options: this.options, webhook: this.webhook, diff --git a/libs/TaskManager.js b/libs/TaskManager.js index 540681e..8ffa42e 100644 --- a/libs/TaskManager.js +++ b/libs/TaskManager.js @@ -1,9 +1,9 @@ /* -Node-OpenDroneMap Node.js App and REST API to access OpenDroneMap. -Copyright (C) 2016 Node-OpenDroneMap Contributors +NodeODM App and REST API to access ODM. +Copyright (C) 2016 NodeODM Contributors This program is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by +it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. @@ -12,7 +12,7 @@ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -You should have received a copy of the GNU General Public License +You should have received a copy of the GNU Affero General Public License along with this program. If not, see . */ "use strict"; @@ -59,6 +59,13 @@ class TaskManager{ this.dumpTaskList(); this.removeStaleUploads(); }); + + if (config.maxRuntime > 0){ + // Every minute + schedule.scheduleJob('* * * * *', () => { + this.checkTimeouts(); + }); + } cb(); } @@ -151,7 +158,8 @@ class TaskManager{ try{ tasks = JSON.parse(data.toString()); }catch(e){ - done(new Error(`Could not load task list. It looks like the ${TASKS_DUMP_FILE} is corrupted (${e.message}). Please manually delete the file and try again.`)); + logger.warn(`Could not load task list. It looks like the ${TASKS_DUMP_FILE} is corrupted (${e.message}).`); + if (done !== undefined) done(); return; } @@ -177,7 +185,7 @@ class TaskManager{ // Finds the first QUEUED task. findNextTaskToProcess(){ for (let uuid in this.tasks){ - if (this.tasks[uuid].getStatus() === statusCodes.QUEUED){ + if (this.tasks[uuid].getStatus() === statusCodes.QUEUED && this.tasks[uuid].initialized){ return this.tasks[uuid]; } } @@ -306,6 +314,23 @@ class TaskManager{ } return count; } + + checkTimeouts(){ + if (config.maxRuntime > 0){ + let now = new Date().getTime(); + + for (let uuid in this.tasks){ + let task = this.tasks[uuid]; + + if (task.isRunning() && task.dateStarted > 0 && (now - task.dateStarted) > config.maxRuntime * 60 * 1000){ + task.output.push(`Task timed out after ${Math.ceil(task.processingTime / 60 / 1000)} minutes.\n`); + this.cancel(uuid, () => { + logger.warn(`Task ${uuid} timed out`); + }); + } + } + } + } } module.exports = { @@ -313,4 +338,4 @@ module.exports = { initialize: function(cb){ taskManager = new TaskManager(cb); } -}; \ No newline at end of file +}; diff --git a/libs/apps.js b/libs/apps.js new file mode 100644 index 0000000..64c6956 --- /dev/null +++ b/libs/apps.js @@ -0,0 +1,34 @@ +/* +NodeODM App and REST API to access ODM. +Copyright (C) 2016 NodeODM Contributors + +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU Affero General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU Affero General Public License +along with this program. If not, see . +*/ +const fs = require('fs'); +const path = require('path'); + +let sevenZ = "7z"; +let unzip = "unzip"; + +if (fs.existsSync(path.join("apps", "7z", "7z.exe"))){ + sevenZ = path.resolve(path.join("apps", "7z", "7z.exe")); +} + +if (fs.existsSync(path.join("apps", "unzip", "unzip.exe"))){ + unzip = path.resolve(path.join("apps", "unzip", "unzip.exe")); +} + +module.exports = { + sevenZ, unzip +}; diff --git a/libs/logger.js b/libs/logger.js index f65fcad..17db890 100644 --- a/libs/logger.js +++ b/libs/logger.js @@ -1,9 +1,9 @@ /* -Node-OpenDroneMap Node.js App and REST API to access OpenDroneMap. -Copyright (C) 2016 Node-OpenDroneMap Contributors +NodeODM App and REST API to access ODM. +Copyright (C) 2016 NodeODM Contributors This program is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by +it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. @@ -12,7 +12,7 @@ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -You should have received a copy of the GNU General Public License +You should have received a copy of the GNU Affero General Public License along with this program. If not, see . */ "use strict"; @@ -45,7 +45,7 @@ if (!config.deamon){ let logger = winston.createLogger({ transports }); logger.add(new winston.transports.File({ - format: winston.format.simple(), + format: winston.format.simple(), filename: logPath, // Write to projectname.log json: false, // Write in plain text, not JSON maxsize: config.logger.maxFileSize, // Max size of each file @@ -53,9 +53,4 @@ logger.add(new winston.transports.File({ level: config.logger.level // Level of log messages })); -if (config.deamon){ - // Console transport is no use to us when running as a daemon - logger.remove(winston.transports.Console); -} - module.exports = logger; diff --git a/libs/odmInfo.js b/libs/odmInfo.js index 6cdc16b..e1ed26b 100644 --- a/libs/odmInfo.js +++ b/libs/odmInfo.js @@ -1,9 +1,9 @@ /* -Node-OpenDroneMap Node.js App and REST API to access OpenDroneMap. -Copyright (C) 2016 Node-OpenDroneMap Contributors +NodeODM App and REST API to access ODM. +Copyright (C) 2016 NodeODM Contributors This program is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by +it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. @@ -12,7 +12,7 @@ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -You should have received a copy of the GNU General Public License +You should have received a copy of the GNU Affero General Public License along with this program. If not, see . */ "use strict"; @@ -24,6 +24,7 @@ const logger = require('./logger'); let odmOptions = null; let odmVersion = null; +let engine = null; module.exports = { initialize: function(done){ @@ -39,7 +40,31 @@ module.exports = { return; } - odmRunner.getVersion(done); + odmRunner.getVersion((err, version) => { + odmVersion = version; + done(null, version); + }); + }, + + getEngine: function(done){ + if (engine){ + done(null, engine); + return; + } + + odmRunner.getEngine((err, eng) => { + engine = eng; + done(null, eng); + }); + }, + + supportsOption: function(optName, cb){ + this.getOptions((err, json) => { + if (err) cb(err); + else{ + cb(null, !!json.find(opt => opt.name === optName)); + } + }); }, getOptions: function(done){ @@ -57,7 +82,8 @@ module.exports = { // (num cores can be set programmatically, so can gcpFile, etc.) if (["-h", "--project-path", "--cmvs-maxImages", "--time", "--zip-results", "--pmvs-num-cores", - "--start-with", "--gcp", "--images", + "--start-with", "--gcp", "--images", "--geo", "--align", + "--split-image-groups", "--copy-to", "--rerun-all", "--rerun", "--slam-config", "--video", "--version", "name"].indexOf(option) !== -1) continue; @@ -124,10 +150,7 @@ module.exports = { // is in the list of choices if (domain.indexOf(value) === -1) domain.unshift(value); } - - help = help.replace(/^One of: \%\(choices\)s. /, ""); - help = help.replace(/\%\(default\)s/g, value); - + odmOptions.push({ name, type, value, domain, help }); @@ -224,7 +247,6 @@ module.exports = { } } }, - { regex: /^(json)$/, validate: function(matches, value){ @@ -237,7 +259,6 @@ module.exports = { } } }, - { regex: /^(string|path)$/, validate: function(){ @@ -318,4 +339,4 @@ module.exports = { done(e); } } -}; \ No newline at end of file +}; diff --git a/libs/odmRunner.js b/libs/odmRunner.js index 1ec5462..3ec253e 100644 --- a/libs/odmRunner.js +++ b/libs/odmRunner.js @@ -1,9 +1,9 @@ /* -Node-OpenDroneMap Node.js App and REST API to access OpenDroneMap. -Copyright (C) 2016 Node-OpenDroneMap Contributors +NodeODM App and REST API to access ODM. +Copyright (C) 2016 NodeODM Contributors This program is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by +it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. @@ -12,11 +12,12 @@ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -You should have received a copy of the GNU General Public License +You should have received a copy of the GNU Affero General Public License along with this program. If not, see . */ "use strict"; let fs = require('fs'); +let os = require('os'); let path = require('path'); let assert = require('assert'); let spawn = require('child_process').spawn; @@ -28,8 +29,8 @@ module.exports = { run: function(options, projectName, done, outputReceived){ assert(projectName !== undefined, "projectName must be specified"); assert(options["project-path"] !== undefined, "project-path must be defined"); - - const command = path.join(config.odm_path, "run.sh"), + + const command = path.join(config.odm_path, os.platform() === "win32" ? "run.bat" : "run.sh"), params = []; for (var name in options){ @@ -70,7 +71,9 @@ module.exports = { } // Launch - let childProcess = spawn(command, params, {cwd: config.odm_path}); + const env = utils.clone(process.env); + env.ODM_NONINTERACTIVE = 1; + let childProcess = spawn(command, params, {cwd: config.odm_path, env}); childProcess .on('exit', (code, signal) => done(null, code, signal)) @@ -89,6 +92,13 @@ module.exports = { }); }, + getEngine: function(done){ + fs.readFile(path.join(config.odm_path, 'ENGINE'), {encoding: 'utf8'}, (err, content) => { + if (err) done(null, "odm"); // Assumed + else done(null, content.split("\n").map(l => l.trim())[0]); + }); + }, + getJsonOptions: function(done){ // In test mode, we don't call ODM, // instead we return a mock @@ -112,38 +122,51 @@ module.exports = { return; // Skip rest } - // Launch - const env = utils.clone(process.env); - env.ODM_OPTIONS_TMP_FILE = utils.tmpPath(".json"); - let childProcess = spawn("python", [path.join(__dirname, "..", "helpers", "odmOptionsToJson.py"), - "--project-path", config.odm_path, "bogusname"], { env }); - - // Cleanup on done - let handleResult = (err, result) => { - fs.exists(env.ODM_OPTIONS_TMP_FILE, exists => { - if (exists) fs.unlink(env.ODM_OPTIONS_TMP_FILE, err => { - if (err) console.warning(`Cannot cleanup ${env.ODM_OPTIONS_TMP_FILE}`); + const getOdmOptions = (pythonExe, done) => { + // Launch + const env = utils.clone(process.env); + env.ODM_OPTIONS_TMP_FILE = utils.tmpPath(".json"); + env.ODM_PATH = config.odm_path; + let childProcess = spawn(pythonExe, [path.join(__dirname, "..", "helpers", "odmOptionsToJson.py"), + "--project-path", config.odm_path, "bogusname"], { env }); + + // Cleanup on done + let handleResult = (err, result) => { + fs.exists(env.ODM_OPTIONS_TMP_FILE, exists => { + if (exists) fs.unlink(env.ODM_OPTIONS_TMP_FILE, err => { + if (err) console.warning(`Cannot cleanup ${env.ODM_OPTIONS_TMP_FILE}`); + }); }); + + // Don't wait + done(err, result); + }; + + childProcess + .on('exit', (code, signal) => { + try{ + fs.readFile(env.ODM_OPTIONS_TMP_FILE, { encoding: "utf8" }, (err, data) => { + if (err) handleResult(new Error(`Cannot read list of options from ODM (from temporary file). Is ODM installed in ${config.odm_path}?`)); + else{ + let json = JSON.parse(data); + handleResult(null, json); + } + }); + }catch(err){ + handleResult(new Error(`Could not load list of options from ODM. Is ODM installed in ${config.odm_path}? Make sure that OpenDroneMap is installed and that --odm_path is set properly: ${err.message}`)); + } + }) + .on('error', handleResult); + } + + if (os.platform() === "win32"){ + getOdmOptions("helpers\\odm_python.bat", done); + }else{ + // Try Python3 first + getOdmOptions("python3", (err, result) => { + if (err) getOdmOptions("python", done); + else done(null, result); }); - - // Don't wait - done(err, result); - }; - - childProcess - .on('exit', (code, signal) => { - try{ - fs.readFile(env.ODM_OPTIONS_TMP_FILE, { encoding: "utf8" }, (err, data) => { - if (err) handleResult(new Error(`Cannot read list of options from ODM (from temporary file). Is ODM installed in ${config.odm_path}?`)); - else{ - let json = JSON.parse(data); - handleResult(null, json); - } - }); - }catch(err){ - handleResult(new Error(`Could not load list of options from ODM. Is ODM installed in ${config.odm_path}? Make sure that OpenDroneMap is installed and that --odm_path is set properly: ${err.message}`)); - } - }) - .on('error', handleResult); + } } }; diff --git a/libs/processRunner.js b/libs/processRunner.js index 155118e..d857d94 100644 --- a/libs/processRunner.js +++ b/libs/processRunner.js @@ -1,9 +1,9 @@ /* -Node-OpenDroneMap Node.js App and REST API to access OpenDroneMap. -Copyright (C) 2016 Node-OpenDroneMap Contributors +NodeODM App and REST API to access ODM. +Copyright (C) 2016 NodeODM Contributors This program is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by +it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. @@ -12,19 +12,21 @@ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -You should have received a copy of the GNU General Public License +You should have received a copy of the GNU Affero General Public License along with this program. If not, see . */ "use strict"; let fs = require('fs'); +let apps = require('./apps'); let path = require('path'); let assert = require('assert'); let spawn = require('child_process').spawn; let config = require('../config.js'); let logger = require('./logger'); +let utils = require('./utils'); -function makeRunner(command, args, requiredOptions = [], outputTestFile = null){ +function makeRunner(command, args, requiredOptions = [], outputTestFile = null, skipOnTest = true){ return function(options, done, outputReceived){ for (let requiredOption of requiredOptions){ assert(options[requiredOption] !== undefined, `${requiredOption} must be defined`); @@ -35,14 +37,16 @@ function makeRunner(command, args, requiredOptions = [], outputTestFile = null){ logger.info(`About to run: ${command} ${commandArgs.join(" ")}`); - if (config.test){ + if (config.test && skipOnTest){ logger.info("Test mode is on, command will not execute"); if (outputTestFile){ fs.readFile(path.resolve(__dirname, outputTestFile), 'utf8', (err, text) => { if (!err){ - let lines = text.split("\n"); - lines.forEach(line => outputReceived(line)); + if (outputReceived !== undefined){ + let lines = text.split("\n"); + lines.forEach(line => outputReceived(line)); + } done(null, 0, null); }else{ @@ -58,14 +62,25 @@ function makeRunner(command, args, requiredOptions = [], outputTestFile = null){ } // Launch - let childProcess = spawn(command, commandArgs); + const env = utils.clone(process.env); + env.LD_LIBRARY_PATH = path.join(config.odm_path, "SuperBuild", "install", "lib"); + + let cwd = undefined; + if (options.cwd) cwd = options.cwd; + + let childProcess = spawn(command, commandArgs, { env, cwd }); childProcess .on('exit', (code, signal) => done(null, code, signal)) .on('error', done); - childProcess.stdout.on('data', chunk => outputReceived(chunk.toString())); - childProcess.stderr.on('data', chunk => outputReceived(chunk.toString())); + if (outputReceived !== undefined){ + childProcess.stdout.on('data', chunk => outputReceived(chunk.toString())); + childProcess.stderr.on('data', chunk => outputReceived(chunk.toString())); + }else{ + childProcess.stdout.on('data', () => {}); + childProcess.stderr.on('data', () => {}); + } return childProcess; }; @@ -76,5 +91,30 @@ module.exports = { function(options){ return [options.projectFolderPath]; }, - ["projectFolderPath"]) + ["projectFolderPath"]), + + sevenZip: makeRunner(apps.sevenZ, function(options){ + return ["a", "-mx=0", "-y", "-r", "-bd", options.destination].concat(options.pathsToArchive); + }, + ["destination", "pathsToArchive", "cwd"], + null, + false), + + sevenUnzip: makeRunner(apps.sevenZ, function(options){ + let cmd = "x"; // eXtract files with full paths + if (options.noDirectories) cmd = "e"; //Extract files from archive (without using directory names) + + return [cmd, "-aoa", "-bd", "-y", `-o${options.destination}`, options.file]; + }, + ["destination", "file"], + null, + false), + + unzip: makeRunner(apps.unzip, function(options){ + const opts = options.noDirectories ? ["-j"] : []; + return opts.concat(["-qq", "-o", options.file, "-d", options.destination]); + }, + ["destination", "file"], + null, + false) }; diff --git a/libs/statusCodes.js b/libs/statusCodes.js index 82af6e4..4c712ee 100644 --- a/libs/statusCodes.js +++ b/libs/statusCodes.js @@ -1,9 +1,9 @@ /* -Node-OpenDroneMap Node.js App and REST API to access OpenDroneMap. -Copyright (C) 2016 Node-OpenDroneMap Contributors +NodeODM App and REST API to access ODM. +Copyright (C) 2016 NodeODM Contributors This program is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by +it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. @@ -12,7 +12,7 @@ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -You should have received a copy of the GNU General Public License +You should have received a copy of the GNU Affero General Public License along with this program. If not, see . */ "use strict"; @@ -22,4 +22,4 @@ module.exports = { FAILED: 30, COMPLETED: 40, CANCELED: 50 -}; \ No newline at end of file +}; diff --git a/libs/taskNew.js b/libs/taskNew.js index 61bec62..2f1233b 100644 --- a/libs/taskNew.js +++ b/libs/taskNew.js @@ -1,9 +1,9 @@ /* -Node-OpenDroneMap Node.js App and REST API to access OpenDroneMap. -Copyright (C) 2016 Node-OpenDroneMap Contributors +NodeODM App and REST API to access ODM. +Copyright (C) 2016 NodeODM Contributors This program is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by +it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. @@ -12,7 +12,7 @@ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -You should have received a copy of the GNU General Public License +You should have received a copy of the GNU Affero General Public License along with this program. If not, see . */ @@ -24,13 +24,14 @@ const uuidv4 = require('uuid/v4'); const config = require('../config.js'); const rmdir = require('rimraf'); const Directories = require('./Directories'); -const unzip = require('node-unzip-2'); const mv = require('mv'); const Task = require('./Task'); const async = require('async'); const odmInfo = require('./odmInfo'); const request = require('request'); -const utils = require('./utils'); +const ziputils = require('./ziputils'); +const statusCodes = require('./statusCodes'); +const logger = require('./logger'); const download = function(uri, filename, callback) { request.head(uri, function(err, res, body) { @@ -48,6 +49,24 @@ const removeDirectory = function(dir, cb = () => {}){ }); }; +const assureUniqueFilename = (dstPath, filename, cb) => { + const dstFile = path.join(dstPath, filename); + fs.exists(dstFile, exists => { + if (!exists) cb(null, filename); + else{ + const parts = filename.split("."); + if (parts.length > 1){ + assureUniqueFilename(dstPath, + `${parts.slice(0, parts.length - 1).join(".")}_.${parts[parts.length - 1]}`, + cb); + }else{ + // Filename without extension? Strange.. + assureUniqueFilename(dstPath, filename + "_", cb); + } + } + }); +}; + const upload = multer({ storage: multer.diskStorage({ destination: (req, file, cb) => { @@ -63,9 +82,11 @@ const upload = multer({ }); }, filename: (req, file, cb) => { - let filename = utils.sanitize(file.originalname); + let filename = file.originalname; if (filename === "body.json") filename = "_body.json"; - cb(null, filename); + + let dstPath = path.join("tmp", req.id); + assureUniqueFilename(dstPath, filename, cb); } }) }); @@ -78,7 +99,7 @@ module.exports = { const userUuid = req.get('set-uuid'); // Valid UUID and no other task with same UUID? - if (/^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(userUuid) && !TaskManager.singleton().find(userUuid)){ + if (/^[0-9a-f]{8}-[0-9a-f]{4}-[1-7][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(userUuid) && !TaskManager.singleton().find(userUuid)){ req.id = userUuid; next(); }else{ @@ -123,7 +144,7 @@ module.exports = { if (req.files && req.files.length > 0){ res.json({success: true}); }else{ - res.json({error: "Need at least 1 file."}); + res.json({error: "Need at least 1 file.", noRetry: true}); } }, @@ -143,7 +164,7 @@ module.exports = { else cb(null, body); }); }catch(e){ - cb("Malformed body.json"); + cb(new Error("Malformed body.json")); } } }); @@ -187,11 +208,10 @@ module.exports = { }, cb => { fs.stat(srcPath, (err, stat) => { - if (err && err.code === 'ENOENT') cb(); - else cb(new Error(`Directory exists (should not have happened: ${err.code})`)); + if (err && err.code === 'ENOENT') fs.mkdir(srcPath, undefined, cb); + else cb(); // Dir already exists }); }, - cb => fs.mkdir(srcPath, undefined, cb), cb => { fs.writeFile(bodyFile, JSON.stringify(req.body), {encoding: 'utf8'}, cb); }, @@ -205,10 +225,6 @@ module.exports = { }, createTask: (req, res) => { - // IMPROVEMENT: consider doing the file moving in the background - // and return a response more quickly instead of a long timeout. - req.setTimeout(1000 * 60 * 20); - const srcPath = path.join("tmp", req.id); // Print error message and cleanup @@ -216,152 +232,207 @@ module.exports = { res.json({error}); removeDirectory(srcPath); }; + + let destPath = path.join(Directories.data, req.id); + let destImagesPath = path.join(destPath, "images"); + let destGcpPath = path.join(destPath, "gcp"); - if (req.error !== undefined){ - die(req.error); - }else{ - let destPath = path.join(Directories.data, req.id); - let destImagesPath = path.join(destPath, "images"); - let destGcpPath = path.join(destPath, "gcp"); + const checkMaxImageLimits = (cb) => { + if (!config.maxImages) cb(); + else{ + fs.readdir(destImagesPath, (err, files) => { + if (err) cb(err); + else if (files.length > config.maxImages) cb(new Error(`${files.length} images uploaded, but this node can only process up to ${config.maxImages}.`)); + else cb(); + }); + } + }; - async.series([ - cb => { - odmInfo.filterOptions(req.body.options, (err, options) => { - if (err) cb(err); - else { - req.body.options = options; - cb(null); - } + let initSteps = [ + // Check if dest directory already exists + cb => { + if (req.files && req.files.length > 0) { + fs.stat(destPath, (err, stat) => { + if (err && err.code === 'ENOENT') cb(); + else{ + // Directory already exists, this could happen + // if a previous attempt at upload failed and the user + // used set-uuid to specify the same UUID over the previous run + // Try to remove it + removeDirectory(destPath, err => { + if (err) cb(new Error(`Directory exists and we couldn't remove it.`)); + else cb(); + }); + } }); - }, - - // Check if dest directory already exists - cb => { - if (req.files && req.files.length > 0) { - fs.stat(destPath, (err, stat) => { - if (err && err.code === 'ENOENT') cb(); - else cb(new Error(`Directory exists (should not have happened: ${err.code})`)); - }); - } else { - cb(); - } - }, + } else { + cb(); + } + }, - // Unzips zip URL to tmp// (if any) - cb => { - if (req.body.zipurl) { - let archive = "zipurl.zip"; + // Unzips zip URL to tmp// (if any) + cb => { + if (req.body.zipurl) { + let archive = "zipurl.zip"; - upload.storage.getDestination(req, archive, (err, dstPath) => { - if (err) cb(err); - else{ - let archiveDestPath = path.join(dstPath, archive); + upload.storage.getDestination(req, archive, (err, dstPath) => { + if (err) cb(err); + else{ + let archiveDestPath = path.join(dstPath, archive); - download(req.body.zipurl, archiveDestPath, cb); + download(req.body.zipurl, archiveDestPath, cb); + } + }); + } else { + cb(); + } + }, + + // Move all uploads to data//images dir (if any) + cb => fs.mkdir(destPath, undefined, cb), + cb => fs.mkdir(destGcpPath, undefined, cb), + cb => { + // We attempt to do this multiple times, + // as antivirus software sometimes is scanning + // the folder while we try to move it, resulting in + // an operation not permitted error + let retries = 0; + + const move = () => { + mv(srcPath, destImagesPath, err => { + if (!err) cb(); // Done + else{ + if (++retries < 20){ + logger.warn(`Cannot move ${srcPath}, probably caused by antivirus software (please disable it or add an exception), retrying (${retries})...`); + setTimeout(move, 2000); + } else { + logger.error(`Unable to move temp images (${srcPath}) after 20 retries. Error: ${err}`); + cb(err); } - }); - } else { - cb(); - } - }, + } + }); + } + move(); + }, + // Zip files handling + cb => { + const handleSeed = (cb) => { + const seedFileDst = path.join(destPath, "seed.zip"); + + async.series([ + // Move to project root + cb => mv(path.join(destImagesPath, "seed.zip"), seedFileDst, cb), + + // Extract + cb => { + ziputils.unzip(seedFileDst, destPath, cb); + }, + + // Remove + cb => { + fs.exists(seedFileDst, exists => { + if (exists) fs.unlink(seedFileDst, cb); + else cb(); + }); + } + ], cb); + } - // Move all uploads to data//images dir (if any) - cb => fs.mkdir(destPath, undefined, cb), - cb => fs.mkdir(destGcpPath, undefined, cb), - cb => mv(srcPath, destImagesPath, cb), - - // Zip files handling - cb => { - const handleSeed = (cb) => { - const seedFileDst = path.join(destPath, "seed.zip"); - - async.series([ - // Move to project root - cb => mv(path.join(destImagesPath, "seed.zip"), seedFileDst, cb), - - // Extract - cb => { - fs.createReadStream(seedFileDst).pipe(unzip.Extract({ path: destPath })) - .on('close', cb) - .on('error', cb); - }, - - // Verify max images limit - cb => { - fs.readdir(destImagesPath, (err, files) => { - if (config.maxImages && files.length > config.maxImages) cb(`${files.length} images uploaded, but this node can only process up to ${config.maxImages}.`); - else cb(err); - }); - } - ], cb); + const handleZipUrl = (cb) => { + // Extract images + ziputils.unzip(path.join(destImagesPath, "zipurl.zip"), + destImagesPath, + cb, true); + } + + // Find and handle zip files and extract + fs.readdir(destImagesPath, (err, entries) => { + if (err) cb(err); + else { + async.eachSeries(entries, (entry, cb) => { + if (entry === "seed.zip"){ + handleSeed(cb); + }else if (entry === "zipurl.zip") { + handleZipUrl(cb); + } else cb(); + }, cb); } + }); + }, - const handleZipUrl = (cb) => { - let filesCount = 0; - fs.createReadStream(path.join(destImagesPath, "zipurl.zip")).pipe(unzip.Parse()) - .on('entry', function(entry) { - if (entry.type === 'File') { - filesCount++; - entry.pipe(fs.createWriteStream(path.join(destImagesPath, path.basename(entry.path)))); - } else { - entry.autodrain(); - } - }) - .on('close', () => { - // Verify max images limit - if (config.maxImages && filesCount > config.maxImages) cb(`${filesCount} images uploaded, but this node can only process up to ${config.maxImages}.`); - else cb(); - }) - .on('error', cb); + // Verify max images limit + cb => { + checkMaxImageLimits(cb); + }, + + cb => { + // Find any *.txt (GCP) file or alignment file and move it to the data//gcp directory + // also remove any lingering zipurl.zip + fs.readdir(destImagesPath, (err, entries) => { + if (err) cb(err); + else { + async.eachSeries(entries, (entry, cb) => { + if (/\.txt$/gi.test(entry) || /^align\.(las|laz|tif)$/gi.test(entry)) { + mv(path.join(destImagesPath, entry), path.join(destGcpPath, entry), cb); + }else if (/\.zip$/gi.test(entry)){ + fs.unlink(path.join(destImagesPath, entry), cb); + } else cb(); + }, cb); } + }); + } + ]; - // Find and handle zip files and extract - fs.readdir(destImagesPath, (err, entries) => { - if (err) cb(err); - else { - async.eachSeries(entries, (entry, cb) => { - if (entry === "seed.zip"){ - handleSeed(cb); - }else if (entry === "zipurl.zip") { - handleZipUrl(cb); - } else cb(); - }, cb); - } + if (req.error !== undefined){ + die(req.error); + }else{ + let imagesCountEstimate = -1; + + async.series([ + cb => { + // Basic path check + fs.exists(srcPath, exists => { + if (exists) cb(); + else cb(new Error(`Invalid UUID`)); }); }, - cb => { - // Find any *.txt (GCP) file and move it to the data//gcp directory - // also remove any lingering zipurl.zip - fs.readdir(destImagesPath, (err, entries) => { + odmInfo.filterOptions(req.body.options, (err, options) => { if (err) cb(err); else { - async.eachSeries(entries, (entry, cb) => { - if (/\.txt$/gi.test(entry)) { - mv(path.join(destImagesPath, entry), path.join(destGcpPath, entry), cb); - }else if (/\.zip$/gi.test(entry)){ - fs.unlink(path.join(destImagesPath, entry), cb); - } else cb(); - }, cb); + req.body.options = options; + cb(null); } }); }, - - // Create task cb => { - new Task(req.id, req.body.name, req.body.options, - req.body.webhook, - req.body.skipPostProcessing === 'true', - req.body.outputs, - req.body.dateCreated, - (err, task) => { - if (err) cb(err); - else { - TaskManager.singleton().addNew(task); - res.json({ uuid: req.id }); - cb(); - } + fs.readdir(srcPath, (err, entries) => { + if (!err) imagesCountEstimate = entries.length; + cb(); }); + }, + cb => { + const task = new Task(req.id, req.body.name, req.body.options, + req.body.webhook, + req.body.skipPostProcessing === 'true', + req.body.outputs, + req.body.dateCreated, + imagesCountEstimate + ); + TaskManager.singleton().addNew(task); + res.json({ uuid: req.id }); + cb(); + + // We return a UUID right away but continue + // doing processing in the background + + task.initialize(err => { + if (err) { + // Cleanup + removeDirectory(srcPath); + removeDirectory(destPath); + } else TaskManager.singleton().processNextTask(); + }, initSteps); } ], err => { if (err) die(err.message); diff --git a/libs/utils.js b/libs/utils.js index afa9bcc..0b43fca 100644 --- a/libs/utils.js +++ b/libs/utils.js @@ -55,4 +55,5 @@ module.exports = { tmpPath: function(extension = ".txt"){ return path.join(os.tmpdir(), `nodeodm_${crypto.randomBytes(6).readUIntLE(0,6).toString(36)}${extension}`); } -}; \ No newline at end of file +}; + diff --git a/libs/ziputils.js b/libs/ziputils.js new file mode 100644 index 0000000..2b61086 --- /dev/null +++ b/libs/ziputils.js @@ -0,0 +1,38 @@ +const processRunner = require('./processRunner'); +const nodeUnzip = require('node-unzip-2'); +const config = require('../config'); +const fs = require('fs'); + +module.exports = { + unzip: function(file, outputDir, cb, noDirectories = false){ + if (config.hasUnzip){ + processRunner.unzip({ + file: file, + destination: outputDir, + noDirectories + }, (err, code, _) => { + if (err) cb(err); + else{ + if (code === 0) cb(); + else cb(new Error(`Could not extract .zip file, unzip exited with code ${code}`)); + } + }); + }else if (config.has7z){ + processRunner.sevenUnzip({ + file: file, + destination: outputDir, + noDirectories + }, (err, code, _) => { + if (err) cb(err); + else{ + if (code === 0) cb(); + else cb(new Error(`Could not extract .zip file, 7z exited with code ${code}`)); + } + }); + }else{ + fs.createReadStream(file).pipe(nodeUnzip.Extract({ path: outputDir })) + .on('close', cb) + .on('error', cb); + } + } +}