diff --git a/aws-neptune-for-graphql-1.1.0.tgz b/aws-neptune-for-graphql-1.1.0.tgz new file mode 100644 index 0000000..d553ca1 Binary files /dev/null and b/aws-neptune-for-graphql-1.1.0.tgz differ diff --git a/package.json b/package.json index 5669a4d..4da4ee4 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@aws/neptune-for-graphql", - "version": "1.0.0", + "version": "1.1.0", "description": "CLI utility to create and maintain a GraphQL API for Amazon Neptune", "keywords": [ "Amazon Neptune", diff --git a/src/CDKPipelineApp.js b/src/CDKPipelineApp.js index e7d64dd..5e963ee 100644 --- a/src/CDKPipelineApp.js +++ b/src/CDKPipelineApp.js @@ -16,6 +16,7 @@ import { readFile, writeFile } from 'fs/promises'; import fs from 'fs'; import archiver from 'archiver'; import ora from 'ora'; +import { loggerLog } from "./logger.js"; let NAME = ''; let REGION = ''; @@ -32,7 +33,7 @@ let APPSYNC_ATTACH_QUERY = []; let APPSYNC_ATTACH_MUTATION = []; let SCHEMA_MODEL = null; let thisOutputFolderPath = './output'; - +let msg = ''; function yellow(text) { return '\x1b[33m' + text + '\x1b[0m'; @@ -68,7 +69,9 @@ async function createDeploymentFile(folderPath, zipFilePath) { archive.file('./output/output.resolver.graphql.js', { name: 'output.resolver.graphql.js' }) await archive.finalize(); } catch (err) { - console.error('Creating deployment zip file: ' + err); + msg = 'Creating deployment zip file: ' + err; + console.error(msg); + loggerLog(msg); } } @@ -125,6 +128,8 @@ async function createAWSpipelineCDK (pipelineName, neptuneDBName, neptuneDBregio NEPTUNE_IAM_POLICY_RESOURCE = neptuneClusterInfo.iamPolicyResource; } catch (error) { + msg = 'Error getting Neptune Cluster Info: ' + JSON.stringify(error); + loggerLog(msg); if (!quiet) spinner.fail("Error getting Neptune Cluster Info."); if (!isNeptuneIAMAuth) { spinner.clear(); diff --git a/src/NeptuneSchema.js b/src/NeptuneSchema.js index d830f18..2edc115 100644 --- a/src/NeptuneSchema.js +++ b/src/NeptuneSchema.js @@ -14,15 +14,17 @@ import axios from "axios"; import { aws4Interceptor } from "aws4-axios"; import { fromNodeProviderChain } from "@aws-sdk/credential-providers"; import { NeptunedataClient, ExecuteOpenCypherQueryCommand } from "@aws-sdk/client-neptunedata"; +import { loggerLog } from "./logger.js"; let HOST = ''; let PORT = 8182; let REGION = '' let SAMPLE = 5000; -let VERBOSE = false; +let VERBOSE = false; +let NEPTUNE_TYPE = 'neptune-db'; let language = 'openCypher'; let useSDK = false; - +let msg = ''; async function getAWSCredentials() { const credentialProvider = fromNodeProviderChain(); @@ -31,7 +33,7 @@ async function getAWSCredentials() { const interceptor = aws4Interceptor({ options: { region: REGION, - service: "neptune-db", + service: NEPTUNE_TYPE, }, credentials: cred }); @@ -55,6 +57,7 @@ function consoleOut(text) { if (VERBOSE) { console.log(text); } + loggerLog(text); } @@ -67,11 +70,18 @@ async function queryNeptune(q) { const response = await axios.post(`https://${HOST}:${PORT}/${language}`, `query=${encodeURIComponent(q)}`); return response.data; } catch (error) { - console.error("Http query request failed: ", error.message); - consoleOut("Trying with the AWS SDK"); - const response = await queryNeptuneSDK(q); - useSDK = true; - return response; + msg = `Http query request failed: ${error.message}`; + consoleOut.error(msg); + loggerLog(msg + ': ' + JSON.stringify(error)); + + if (NEPTUNE_TYPE == 'neptune-db') { + consoleOut("Trying with the AWS SDK"); + const response = await queryNeptuneSDK(q); + useSDK = true; + return response; + } + + throw new Error('AWS SDK for Neptune Analytics is not available, yet.'); } } } @@ -91,7 +101,9 @@ async function queryNeptuneSDK(q) { return response; } catch (error) { - console.error("SDK query request failed: ", error.message); + msg = `SDK query request failed: ${error.message}`; + consoleOut.error(msg); + loggerLog(msg + ': ' + JSON.stringify(error)); process.exit(1); } } @@ -100,15 +112,18 @@ async function queryNeptuneSDK(q) { async function getNodesNames() { let query = `MATCH (a) RETURN labels(a), count(a)`; let response = await queryNeptune(query); + loggerLog('Getting nodes names'); try { response.results.forEach(result => { - schema.nodeStructures.push({ label: result['labels(a)'][0], properties: []}); + schema.nodeStructures.push({ label: result['labels(a)'][0], properties: []}); consoleOut(' Found Node: ' + yellow(result['labels(a)'][0])); }); } catch (e) { - consoleOut(" No nodes found"); + msg = " No nodes found"; + consoleOut(msg); + loggerLog(msg + ': ' + JSON.stringify(e)); return; } } @@ -117,6 +132,7 @@ async function getNodesNames() { async function getEdgesNames() { let query = `MATCH ()-[e]->() RETURN type(e), count(e)`; let response = await queryNeptune(query); + loggerLog('Getting edges names'); try { response.results.forEach(result => { @@ -125,35 +141,31 @@ async function getEdgesNames() { }); } catch (e) { - consoleOut(" No edges found"); + msg = " No edges found"; + consoleOut(msg); + loggerLog(msg + ': ' + JSON.stringify(e)); return; } } -async function checkEdgeDirection(direction) { - let query = `MATCH (from:${direction.from})-[r:${direction.edge.label}]->(to:${direction.to}) RETURN r as edge LIMIT 1`; +async function findFromAndToLabels(edgeStructure) { + let query = `MATCH (from)-[r:${edgeStructure.label}]->(to) RETURN DISTINCT labels(from) as fromLabel, labels(to) as toLabel`; let response = await queryNeptune(query); - let result = response.results[0]; - if (result !== undefined) { - direction.edge.directions.push({from:direction.from, to:direction.to}); - consoleOut(' Found edge: ' + yellow(direction.edge.label) + ' direction: ' + yellow(direction.from) + ' -> ' + yellow(direction.to)); + for (let result of response.results) { + for (let fromLabel of result.fromLabel) { + for (let toLabel of result.toLabel) { + edgeStructure.directions.push({from:fromLabel, to:toLabel}); + consoleOut(' Found edge: ' + yellow(edgeStructure.label) + ' direction: ' + yellow(fromLabel) + ' -> ' + yellow(toLabel)); + } + } } } async function getEdgesDirections() { - let possibleDirections = []; - for (const edge of schema.edgeStructures) { - for (const fromNode of schema.nodeStructures) { - for (const toNode of schema.nodeStructures) { - possibleDirections.push({edge:edge, from:fromNode.label, to:toNode.label}); - } - } - } - - await Promise.all(possibleDirections.map(checkEdgeDirection)) + await Promise.all(schema.edgeStructures.map(findFromAndToLabels)) } @@ -196,7 +208,8 @@ function addUpdateEdgeProperty(edgeName, name, value) { async function getEdgeProperties(edge) { - let query = `MATCH ()-[n:${edge.label}]->() RETURN properties(n) as properties LIMIT ${SAMPLE}`; + let query = `MATCH ()-[n:${edge.label}]->() RETURN properties(n) as properties LIMIT ${SAMPLE}`; + loggerLog(`Getting properties for edge: ${query}`); try { let response = await queryNeptune(query); let result = response.results; @@ -207,7 +220,9 @@ async function getEdgeProperties(edge) { }); } catch (e) { - consoleOut(" No properties found for edge: " + edge.label); + msg = " No properties found for edge: " + edge.label; + consoleOut(msg); + loggerLog(msg + ': ' + JSON.stringify(e)); } } @@ -220,7 +235,8 @@ async function getEdgesProperties() { async function getNodeProperties(node) { - let query = `MATCH (n:${node.label}) RETURN properties(n) as properties LIMIT ${SAMPLE}`; + let query = `MATCH (n:${node.label}) RETURN properties(n) as properties LIMIT ${SAMPLE}`; + loggerLog(`Getting properties for node: ${query}`); try { let response = await queryNeptune(query); let result = response.results; @@ -231,7 +247,9 @@ async function getNodeProperties(node) { }); } catch (e) { - consoleOut(" No properties found for node: " + node.label); + msg = " No properties found for node: " + node.label; + consoleOut(msg); + loggerLog(msg + ': ' + JSON.stringify(e)); } } @@ -244,10 +262,12 @@ async function getNodesProperties() { async function checkEdgeDirectionCardinality(d) { - let queryFrom = `MATCH (from:${d.from})-[r:${d.edge.label}]->(to:${d.to}) WITH to, count(from) as rels WHERE rels > 1 RETURN rels LIMIT 1`; + let queryFrom = `MATCH (from:${d.from})-[r:${d.edge.label}]->(to:${d.to}) WITH to, count(from) as rels WHERE rels > 1 RETURN rels LIMIT 1`; + loggerLog(`Checking edge direction cardinality: ${queryFrom}`); let responseFrom = await queryNeptune(queryFrom); let resultFrom = responseFrom.results[0]; - let queryTo = `MATCH (from:${d.from})-[r:${d.edge.label}]->(to:${d.to}) WITH from, count(to) as rels WHERE rels > 1 RETURN rels LIMIT 1`; + let queryTo = `MATCH (from:${d.from})-[r:${d.edge.label}]->(to:${d.to}) WITH from, count(to) as rels WHERE rels > 1 RETURN rels LIMIT 1`; + loggerLog(`Checking edge direction cardinality: ${queryTo}`); let responseTo = await queryNeptune(queryTo); let resultTo = responseTo.results[0]; let c = ''; @@ -283,11 +303,12 @@ async function getEdgesDirectionsCardinality() { } -function setGetNeptuneSchemaParameters(host, port, region, verbose = false) { +function setGetNeptuneSchemaParameters(host, port, region, verbose = false, neptuneType) { HOST = host; PORT = port; REGION = region; VERBOSE = verbose; + NEPTUNE_TYPE = neptuneType; } @@ -307,6 +328,7 @@ async function getSchemaViaSummaryAPI() { return true; } catch (error) { + loggerLog(`Getting the schema via Neptune Summary API failed: ${JSON.stringify(error)}`); return false; } } @@ -318,8 +340,10 @@ async function getNeptuneSchema(quiet) { try { await getAWSCredentials(); - } catch (error) { - consoleOut("There are no AWS credetials configured. \nGetting the schema from an Amazon Neptune database with IAM authentication works only with AWS credentials."); + } catch (error) { + msg = "There are no AWS credetials configured. \nGetting the schema from an Amazon Neptune database with IAM authentication works only with AWS credentials."; + consoleOut(msg); + loggerLog(msg + ': ' + JSON.stringify(error)); } if (await getSchemaViaSummaryAPI()) { diff --git a/src/graphdb.js b/src/graphdb.js index 17f43ea..9065db1 100644 --- a/src/graphdb.js +++ b/src/graphdb.js @@ -69,12 +69,16 @@ function graphDBInferenceSchema (graphbSchema, addMutations) { } r += '\t_id: ID! @id\n'; - + + let properties = []; node.properties.forEach(property => { + properties.push(property.name); + if (property.name == 'id') r+= `\tid: ID\n`; else r+= `\t${property.name}: ${property.type}\n`; + }); let edgeTypes = []; @@ -127,11 +131,15 @@ function graphDBInferenceSchema (graphbSchema, addMutations) { }); // Add edge types - edgeTypes.forEach(edgeType => { + edgeTypes.forEach(edgeType => { + let collision = ''; + if (properties.includes(edgeType)) + collision = '_'; + if (changeCase) { - r += `\t${edgeType}:${toPascalCase(edgeType)}` + r += `\t${collision + edgeType}:${toPascalCase(edgeType)}` } else { - r += `\t${edgeType}:${edgeType}` + r += `\t${collision + edgeType}:${edgeType}` } }); diff --git a/src/lambdaZip.js b/src/lambdaZip.js index 5084854..d75d854 100644 --- a/src/lambdaZip.js +++ b/src/lambdaZip.js @@ -1,5 +1,8 @@ import fs from 'fs'; import archiver from 'archiver'; +import { loggerLog } from "./logger.js"; + +let msg = ''; async function createLambdaDeploymentPackage(templatePath, zipFilePath) { try { @@ -10,7 +13,9 @@ async function createLambdaDeploymentPackage(templatePath, zipFilePath) { archive.file('./output/output.resolver.graphql.js', { name: 'output.resolver.graphql.js' }) await archive.finalize(); } catch (error) { - console.error('Lambda deployment package creation failed. '+ error); + msg = 'Lambda deployment zip file: ' + JSON.stringify(error); + loggerLog(msg); + console.error('Lambda deployment package creation failed. '+ error.message); } } diff --git a/src/logger.js b/src/logger.js new file mode 100644 index 0000000..9b84397 --- /dev/null +++ b/src/logger.js @@ -0,0 +1,22 @@ +import fs from 'fs'; + +let LOG_FILE = './output/log.txt'; + +function loggerInit(file) { + LOG_FILE = file; + fs.writeFileSync(LOG_FILE, '', (err) => { + return console.log(err); + }); +} + +function loggerLog(text) { + // remove yellow escape from text + text = text.replaceAll(/\x1b\[33m/g, ''); + text = text.replaceAll(/\x1b\[0m/g, ''); + + fs.appendFileSync(LOG_FILE, (new Date()).toISOString() + ' ' + text + '\n', (err) => { + return console.log(err); + }) +} + +export { loggerInit, loggerLog }; \ No newline at end of file diff --git a/src/main.js b/src/main.js index 9bee38f..03b8366 100644 --- a/src/main.js +++ b/src/main.js @@ -11,7 +11,7 @@ permissions and limitations under the License. */ import { readFileSync, writeFileSync, mkdirSync} from 'fs'; -import { helpTxt } from './help.js'; +import { helpTxt } from './help.js'; import { graphDBInferenceSchema } from './graphdb.js'; import { changeGraphQLSchema } from './changes.js'; import { schemaParser, schemaStringify } from './schemaParser.js'; @@ -21,6 +21,7 @@ import { getNeptuneSchema, setGetNeptuneSchemaParameters } from './NeptuneSchema import { createUpdateAWSpipeline, removeAWSpipelineResources } from './pipelineResources.js' import { createAWSpipelineCDK } from './CDKPipelineApp.js' import { createLambdaDeploymentPackage } from './lambdaZip.js' +import { loggerInit, loggerLog } from './logger.js'; import ora from 'ora'; let spinner = null; @@ -33,6 +34,7 @@ import path from 'path'; import { fileURLToPath } from 'url'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); +let msg = ''; // get version const version = JSON.parse(readFileSync(__dirname + '/../package.json')).version; @@ -52,17 +54,22 @@ let isNeptuneIAMAuth = false; let createUpdatePipeline = false; let createUpdatePipelineName = ''; let createUpdatePipelineEndpoint = ''; +let createUpdatePipelineEndpointRO = ''; +let createUpdatePipelineDualLambdas = false; let createUpdatePipelineRegion = 'us-east-1'; let createUpdatePipelineNeptuneDatabaseName = ''; let removePipelineName = ''; let inputCDKpipeline = false; let inputCDKpipelineName = ''; let inputCDKpipelineEnpoint = ''; +let inputCDKpipelineEnpointRO = ''; +let inputCDKpipelineDualLambdas = false; let inputCDKpipelineFile = ''; let inputCDKpipelineRegion = ''; let inputCDKpipelineDatabaseName = ''; let createLambdaZip = true; let outputFolderPath = './output'; +let neptuneType = 'neptune-db'; // or neptune-graph // Outputs @@ -189,6 +196,14 @@ function processArgs() { case '--create-update-aws-pipeline-neptune-endpoint': createUpdatePipelineEndpoint = array[index + 1]; break; + case 'pro': + case '--create-update-aws-pipeline-neptune-endpoint-ro': + createUpdatePipelineEndpointRO = array[index + 1]; + break; + case '-p2l': + case '--create-update-aws-pipeline-dual-lambdas': + createUpdatePipelineDualLambdas = true; + break; case '-pd': case '--create-update-aws-pipeline-neptune-database-name': createUpdatePipelineNeptuneDatabaseName = array[index + 1]; @@ -205,6 +220,13 @@ function processArgs() { case '--output-aws-pipeline-cdk-neptume-endpoint': inputCDKpipelineEnpoint = array[index + 1]; break; + case 'cro': + case '--output-aws-pipeline-cdk-neptume-endpoint-ro': + inputCDKpipelineEnpointRO = array[index + 1]; + break; + case '-c2l': + case '--output-aws-pipeline-cdk-neptume-dual-lambas': + inputCDKpipelineDualLambdas = true; case '-cd': case '--output-aws-pipeline-cdk-neptume-database-name': inputCDKpipelineDatabaseName = array[index + 1]; @@ -258,43 +280,73 @@ async function main() { processArgs(); + // Init output folder + mkdirSync(outputFolderPath, { recursive: true }); + + // Init the logger + loggerInit(outputFolderPath + '/log_' + (new Date()).toISOString() + '.txt'); + loggerLog('Starting neptune-for-graphql version: ' + version); + loggerLog('Input arguments: ' + process.argv); + // Get graphDB schema from file if (inputGraphDBSchemaFile != '' && inputGraphQLSchema == '' && inputGraphQLSchemaFile == '') { try { inputGraphDBSchema = readFileSync(inputGraphDBSchemaFile, 'utf8'); - if (!quiet) console.log('Loaded graphDB schema from file: ' + inputGraphDBSchemaFile); + msg = 'Loaded graphDB schema from file: ' + yellow(inputGraphDBSchemaFile); + if (!quiet) console.log(msg); + loggerLog(msg); } catch (err) { - console.error('Error reading graphDB schema file: ' + inputGraphDBSchemaFile); + msg = 'Error reading graphDB schema file: ' + yellow(inputGraphDBSchemaFile); + console.error(msg); + loggerLog(msg +": " + JSON.stringify(err)); process.exit(1); } } + // Check if Neptune target is db or graph + if ( inputGraphDBSchemaNeptuneEndpoint.includes('neptune-graph') || + createUpdatePipelineEndpoint.includes('neptune-graph') || + inputCDKpipelineEnpoint.includes('neptune-graph')) + neptuneType = 'neptune-graph'; + // Get Neptune schema from endpoint if (inputGraphDBSchemaNeptuneEndpoint != '' && inputGraphDBSchema == '' && inputGraphDBSchemaFile == '') { let endpointParts = inputGraphDBSchemaNeptuneEndpoint.split(':'); if (endpointParts.length < 2) { - console.error('Neptune endpoint must be in the form of host:port'); + msg = 'Neptune endpoint must be in the form of host:port'; + console.error(msg); + loggerLog(msg); process.exit(1); } let neptuneHost = endpointParts[0]; let neptunePort = endpointParts[1]; - let neptuneRegionParts = inputGraphDBSchemaNeptuneEndpoint.split('.'); - let neptuneRegion = neptuneRegionParts[2]; + let neptuneRegionParts = inputGraphDBSchemaNeptuneEndpoint.split('.'); + let neptuneRegion = ''; + if (neptuneType == 'neptune-db') + neptuneRegion = neptuneRegionParts[2]; + else + neptuneRegion = neptuneRegionParts[1]; - if (!quiet) console.log('Getting Neptune schema from endpoint: ' + yellow(neptuneHost + ':' + neptunePort)); - setGetNeptuneSchemaParameters(neptuneHost, neptunePort, neptuneRegion, true); + msg = 'Getting Neptune schema from endpoint: ' + yellow(neptuneHost + ':' + neptunePort); + if (!quiet) console.log(msg); + loggerLog(msg); + + setGetNeptuneSchemaParameters(neptuneHost, neptunePort, neptuneRegion, true, neptuneType); let startTime = performance.now(); inputGraphDBSchema = await getNeptuneSchema(quiet); let endTime = performance.now(); let executionTime = endTime - startTime; - if (!quiet) console.log(`Execution time: ${(executionTime/1000).toFixed(2)} seconds`); - if (!quiet) console.log(''); + msg = 'Execution time: ' + (executionTime/1000).toFixed(2) + ' seconds'; + if (!quiet) console.log(msg); + loggerLog(msg); } // Option 2: inference GraphQL schema from graphDB schema if (inputGraphDBSchema != '' && inputGraphQLSchema == '' && inputGraphQLSchemaFile == '') { - if (!quiet) console.log('Inferencing GraphQL schema from graphDB schema'); + msg = 'Inferencing GraphQL schema from graphDB schema'; + if (!quiet) console.log(msg); + loggerLog(msg); inputGraphQLSchema = graphDBInferenceSchema(inputGraphDBSchema, outputSchemaMutations); if (!quiet) console.log(''); } @@ -303,9 +355,13 @@ async function main() { if (inputGraphQLSchema == '' && inputGraphQLSchemaFile != '') { try { inputGraphQLSchema = readFileSync(inputGraphQLSchemaFile, 'utf8'); - if (!quiet) console.log('Loaded GraphQL schema from file: ' + inputGraphQLSchemaFile); + msg = 'Loaded GraphQL schema from file: ' + yellow(inputGraphQLSchemaFile); + if (!quiet) console.log(msg); + loggerLog(msg); } catch (err) { - console.error('Error reading GraphQL schema file: ' + inputGraphQLSchemaFile); + msg = 'Error reading GraphQL schema file: ' + yellow(inputGraphQLSchemaFile); + console.error(msg); + loggerLog(msg + ": " + JSON.stringify(err)); process.exit(1); } } @@ -314,9 +370,13 @@ async function main() { if (inputGraphQLSchemaChangesFile != '') { try { inputGraphQLSchemaChanges = readFileSync(inputGraphQLSchemaChangesFile, 'utf8'); - if (!quiet) console.log('Loaded GraphQL schema changes from file: ' + inputGraphQLSchemaChangesFile); + msg = 'Loaded GraphQL schema changes from file: ' + yellow(inputGraphQLSchemaChangesFile); + if (!quiet) console.log(msg); + loggerLog(msg); } catch (err) { - console.error('Error reading GraphQL schema changes file: ' + inputGraphQLSchemaChangesFile); + msg = 'Error reading GraphQL schema changes file: ' + yellow(inputGraphQLSchemaChangesFile); + console.error(msg); + loggerLog(msg + ": " + JSON.stringify(err)); process.exit(1); } } @@ -328,20 +388,26 @@ async function main() { } if (createUpdatePipelineEndpoint == '' && createUpdatePipelineRegion == '' && createUpdatePipelineNeptuneDatabaseName == '') { - console.error('AWS pipeline: is required a Neptune endpoint, or a Neptune database name and region.'); + msg = 'AWS pipeline: is required a Neptune endpoint, or a Neptune database name and region.'; + console.error(msg); + loggerLog(msg); process.exit(1); } if (createUpdatePipelineEndpoint == '' && !createUpdatePipelineRegion == '' && createUpdatePipelineNeptuneDatabaseName == '') { - console.error('AWS pipeline: a Neptune database name is required.'); + msg = 'AWS pipeline: a Neptune database name is required.'; + console.error(msg); + loggerLog(msg); process.exit(1); } if (createUpdatePipelineEndpoint == '' && createUpdatePipelineRegion == '' && !createUpdatePipelineNeptuneDatabaseName == '') { - console.error('AWS pipeline: a Neptune database region is required.'); + msg = 'AWS pipeline: a Neptune database region is required.'; + console.error(msg); + loggerLog(msg); process.exit(1); } - if (createUpdatePipelineEndpoint != '') { + if (createUpdatePipelineEndpoint != '' && createUpdatePipelineRegion == '') { let parts = createUpdatePipelineEndpoint.split('.'); createUpdatePipelineNeptuneDatabaseName = parts[0]; createUpdatePipelineRegion = parts[2]; @@ -358,17 +424,23 @@ async function main() { } if (inputCDKpipelineEnpoint == '' && inputCDKpipelineRegion == '' && inputCDKpipelineDatabaseName == '') { - console.error('AWS CDK: is required a Neptune endpoint, or a Neptune database name and region.'); + msg = 'AWS CDK: is required a Neptune endpoint, or a Neptune database name and region.'; + console.error(msg); + loggerLog(msg); process.exit(1); } if (inputCDKpipelineEnpoint == '' && !inputCDKpipelineRegion == '' && inputCDKpipelineDatabaseName == '') { - console.error('AWS CDK: a Neptune database name is required.'); + msg = 'AWS CDK: a Neptune database name is required.'; + console.error(msg); + loggerLog(msg); process.exit(1); } if (inputCDKpipelineEnpoint == '' && inputCDKpipelineRegion == '' && !inputCDKpipelineDatabaseName == '') { - console.error('AWS CDK: a Neptune database region is required.'); + msg = 'AWS CDK: a Neptune database region is required.'; + console.error(msg); + loggerLog(msg); process.exit(1); } if (inputCDKpipelineEnpoint != '') { @@ -404,8 +476,6 @@ async function main() { // Outputs // **************************************************************************** - mkdirSync('./output', { recursive: true }); - // Output GraphQL schema no directives if (inputGraphQLSchema != '') { @@ -420,9 +490,13 @@ async function main() { try { writeFileSync(outputSchemaFile, outputSchema); - if (!quiet) console.log('Wrote GraphQL schema to file: ' + yellow(outputSchemaFile)); + msg = 'Wrote GraphQL schema to file: ' + yellow(outputSchemaFile); + if (!quiet) console.log(msg); + loggerLog(msg); } catch (err) { - console.error('Error writing GraphQL schema to file: ' + outputSchemaFile); + msg = 'Error writing GraphQL schema to file: ' + yellow(outputSchemaFile); + console.error(msg); + loggerLog(msg + ": " + JSON.stringify(err)); } @@ -438,9 +512,13 @@ async function main() { try { writeFileSync(outputSourceSchemaFile, outputSourceSchema); - if (!quiet) console.log('Wrote GraphQL schema to file: ' + yellow(outputSourceSchemaFile)); + msg = 'Wrote GraphQL schema to file: ' + yellow(outputSourceSchemaFile); + if (!quiet) console.log(msg); + loggerLog(msg); } catch (err) { - console.error('Error writing GraphQL schema to file: ' + outputSourceSchemaFile); + msg = 'Error writing GraphQL schema to file: ' + yellow(outputSourceSchemaFile); + console.error(msg); + loggerLog(msg + ": " + JSON.stringify(err)); } @@ -455,9 +533,13 @@ async function main() { try { writeFileSync(outputNeptuneSchemaFile, inputGraphDBSchema); - if (!quiet) console.log('Wrote Neptune schema to file: ' + yellow(outputNeptuneSchemaFile)); + msg = 'Wrote Neptune schema to file: ' + yellow(outputNeptuneSchemaFile); + if (!quiet) console.log(msg); + loggerLog(msg); } catch (err) { - console.error('Error writing Neptune schema to file: ' + outputNeptuneSchemaFile); + msg = 'Error writing Neptune schema to file: ' + yellow(outputNeptuneSchemaFile); + console.error(msg); + loggerLog(msg + ": " + JSON.stringify(err)); } @@ -468,9 +550,13 @@ async function main() { try { writeFileSync(outputLambdaResolverFile, outputLambdaResolver); - if (!quiet) console.log('Wrote Lambda resolver to file: ' + yellow(outputLambdaResolverFile)); + msg = 'Wrote Lambda resolver to file: ' + yellow(outputLambdaResolverFile); + if (!quiet) console.log(msg); + loggerLog(msg); } catch (err) { - console.error('Error writing Lambda resolver to file: ' + outputLambdaResolverFile); + msg = 'Error writing Lambda resolver to file: ' + yellow(outputLambdaResolverFile); + console.error(msg); + loggerLog(msg + ": " + JSON.stringify(err)); } @@ -485,10 +571,13 @@ async function main() { try { writeFileSync(outputJSResolverFile, outputJSResolver); - //writeFileSync('./test/output.resolver.graphql.js', outputJSResolver); // Remove, for development and test only - if (!quiet) console.log('Wrote Javascript resolver to file: ' + yellow(outputJSResolverFile)); + msg = 'Wrote Javascript resolver to file: ' + yellow(outputJSResolverFile); + if (!quiet) console.log(msg); + loggerLog(msg); } catch (err) { - console.error('Error writing Javascript resolver to file: ' + outputJSResolverFile); + msg = 'Error writing Javascript resolver to file: ' + yellow(outputJSResolverFile); + console.error(msg); + loggerLog(msg + ": " + JSON.stringify(err)); } @@ -515,10 +604,14 @@ async function main() { await createLambdaDeploymentPackage(__dirname + outputLambdaPackagePath, outputLambdaResolverZipFile); if (!quiet) { spinner.stop(); - console.log('Wrote Lambda ZIP file: ' + yellow(outputLambdaResolverZipFile)); + msg = 'Wrote Lambda ZIP file: ' + yellow(outputLambdaResolverZipFile); + console.log(msg); + loggerLog(msg); } } catch (err) { - console.error('Error creating Lambda ZIP file: ' + err); + msg = 'Error creating Lambda ZIP file: ' + yellow(err); + console.error(msg); + loggerLog(msg + ": " + JSON.stringify(err)); } } @@ -529,13 +622,31 @@ async function main() { try { let endpointParts = createUpdatePipelineEndpoint.split(':'); if (endpointParts.length < 2) { - console.error('Neptune endpoint must be in the form of host:port'); + msg = 'Neptune endpoint must be in the form of host:port'; + console.error(msg); + loggerLog(msg); process.exit(1); } let neptuneHost = endpointParts[0]; let neptunePort = endpointParts[1]; - if (!quiet) console.log('\nCreating AWS pipeline resources') + let neptuneHostRO = ''; + let neptunePortRO = ''; + if (createUpdatePipelineEndpointRO != '') { + let endpointPartsRO = createUpdatePipelineEndpointRO.split(':'); + if (endpointPartsRO.length < 2) { + msg = 'Neptune read only endpoint must be in the form of host:port'; + console.error(msg); + loggerLog(msg); + process.exit(1); + } + neptuneHostRO = endpointPartsRO[0]; + neptunePortRO = endpointPartsRO[1]; + } + + msg = 'Creating AWS pipeline resources'; + if (!quiet) console.log('\n' + msg); + loggerLog(msg); await createUpdateAWSpipeline( createUpdatePipelineName, createUpdatePipelineNeptuneDatabaseName, createUpdatePipelineRegion, @@ -548,20 +659,30 @@ async function main() { isNeptuneIAMAuth, neptuneHost, neptunePort, - outputFolderPath ); + neptuneHostRO, + neptunePortRO, + createUpdatePipelineDualLambdas, + outputFolderPath, + neptuneType ); } catch (err) { - console.error('Error creating AWS pipeline: ' + err); + msg = 'Error creating AWS pipeline: ' + err; + console.error(msg); + loggerLog(msg + ": " + JSON.stringify(err)); } } // Output CDK if (inputCDKpipeline) { try { - if (!quiet) console.log('\nCreating CDK File') + msg = 'Creating CDK File'; + if (!quiet) console.log('\n' + msg); + loggerLog(msg); let endpointParts = inputCDKpipelineEnpoint.split(':'); if (endpointParts.length < 2) { - console.error('Neptune endpoint must be in the form of host:port'); + msg = 'Neptune endpoint must be in the form of host:port'; + console.error(msg); + loggerLog(msg); process.exit(1); } let neptuneHost = endpointParts[0]; @@ -584,25 +705,36 @@ async function main() { isNeptuneIAMAuth, neptuneHost, neptunePort, - outputFolderPath ); + outputFolderPath, + neptuneType ); } catch (err) { - console.error('Error creating CDK File: ' + err); + msg = 'Error creating CDK File: ' + yellow(err); + console.error(msg); + loggerLog(msg + ": " + JSON.stringify(err)); } } + msg = 'Done'; if (!quiet) console.log('\nDone\n'); + loggerLog(msg); } // Remove AWS Pipeline if ( removePipelineName != '') { - if (!quiet) console.log('\nRemoving pipeline AWS resources, name: ' + yellow(removePipelineName)) + msg = 'Removing pipeline AWS resources, name: ' + yellow(removePipelineName); + if (!quiet) console.log('\n' + msg); + loggerLog(msg); let resourcesToRemove = null; let resourcesFile = `${outputFolderPath}/${removePipelineName}-resources.json`; - if (!quiet) console.log('Using file: ' + yellow(resourcesFile)); + msg = 'Using file: ' + resourcesFile; + if (!quiet) console.log(msg); + loggerLog(msg); try { resourcesToRemove = readFileSync(resourcesFile, 'utf8'); } catch (err) { - console.error('Error reading AWS pipeline resources file: ' + resourcesFile + ' ' + err); + msg = 'Error reading AWS pipeline resources file: ' + yellow(resourcesFile + ' ' + err); + console.error(msg); + loggerLog(msg + ": " + JSON.stringify(err)); process.exit(1); } await removeAWSpipelineResources(JSON.parse(resourcesToRemove), quiet); diff --git a/src/pipelineResources.js b/src/pipelineResources.js index 9e28b30..9995cac 100644 --- a/src/pipelineResources.js +++ b/src/pipelineResources.js @@ -47,6 +47,7 @@ import fs from 'fs'; import archiver from 'archiver'; import ora from 'ora'; import { exit } from "process"; +import { loggerLog } from "./logger.js"; // Input let NEPTUNE_DB_NAME = ''; @@ -63,6 +64,8 @@ let thisOutputFolderPath = './output'; let pipelineExists = false; let NEPTUNE_HOST = null; let NEPTUNE_PORT = null; +let NEPTUNE_HOST_RO = null; +let NEPTUNE_PORT_RO = null; let NEPTUNE_DBSubnetGroup = null; let NEPTUNE_DBSubnetIds = []; let NEPTUNE_VpcSecurityGroupId = null; @@ -70,13 +73,14 @@ let NEPTUME_IAM_AUTH = false; let NEPTUNE_CURRENT_VERSION = ''; let NEPTUNE_CURRENT_IAM = false; let NEPTUNE_IAM_POLICY_RESOURCE = '*'; +let DUAL_LAMBDA = false; let LAMBDA_ROLE = ''; let LAMBDA_ARN = ''; -//let APPSYNC_API_ID = ''; +let NEPTUNE_TYPE = 'neptune-db'; let ZIP = null; let RESOURCES = {}; let RESOURCES_FILE = ''; - +let msg = ''; const sleep = ms => new Promise(r => setTimeout(r, ms)); // alternative: import { setTimeout } from timers/promises let spinner = null; @@ -96,13 +100,15 @@ async function checkPipeline() { let appSyncExists = false; let roleExists = false; - if (!quiet) spinner = ora('Checking pipeline resources...').start(); + msg = 'Checking pipeline resources...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); try { - const command = new GetFunctionCommand({FunctionName: NAME +'LambdaFunction'}); - //const response = await lambdaClient.send(command); + const command = new GetFunctionCommand({FunctionName: NAME +'LambdaFunction'}); await lambdaClient.send(command); lambdaExists = true; - } catch (error) { + } catch (error) { + loggerLog("checkPipeline GetFunctionCommand: " + JSON.stringify(error)); lambdaExists = false; } @@ -116,6 +122,7 @@ async function checkPipeline() { } }); } catch (error) { + loggerLog("checkPipeline ListGraphqlApisCommand : " + JSON.stringify(error)); appSyncExists = false; } @@ -125,23 +132,48 @@ async function checkPipeline() { LAMBDA_ROLE = response.Role.Arn; roleExists = true; } catch (error) { + loggerLog("checkPipeline GetRoleCommand: " + JSON.stringify(error)); roleExists = false; } if (lambdaExists && appSyncExists && roleExists) { - if (!quiet) spinner.succeed('Pipeline exists.'); + msg = 'Pipeline exists.'; + if (!quiet) spinner.succeed(msg); + loggerLog(msg); pipelineExists = true; } else { - if (!quiet) spinner.warn('Pipeline does not exists.'); + msg = 'Pipeline does not exists.'; + if (!quiet) spinner.warn(msg); + loggerLog(msg); } if (lambdaExists && appSyncExists && roleExists) return; if (!lambdaExists && !appSyncExists && !roleExists) return; + + msg = 'One or more pipeline resources are missing.'; if (!quiet) console.log("One or more pipeline resources are missing."); - if (!lambdaExists && !quiet) console.log(" Lambda " + NAME + "LambdaFunction" + " is missing." ); - if (!roleExists && !quiet) console.log(" Role " + NAME + "LambdaExecutionRole" + " is missing." ); - if (!appSyncExists && !quiet) console.log(" AppSync " + NAME + "API" + " is missing." ); - console.error("Fix the issue manually or create the pipeline resources with a new name.\n"); + loggerLog(msg); + + if (!lambdaExists && !quiet) { + msg = ' Lambda ' + yellow(NAME) + 'LambdaFunction' + ' is missing.'; + console.log(msg); + loggerLog(msg); + } + + if (!roleExists && !quiet) { + msg = ' Role ' + yellow(NAME) + 'LambdaExecutionRole' + ' is missing.'; + console.log(msg); + loggerLog(msg); + } + + if (!appSyncExists && !quiet) { + msg = ' AppSync ' + yellow(NAME) + 'API' + ' is missing.'; + console.log(msg); + loggerLog(msg); + } + + msg = 'Fix the issue manually or create the pipeline resources with a new name.'; + console.error(msg + "\n"); process.exit(1); } @@ -170,31 +202,37 @@ async function getNeptuneClusterinfoBy(name, region) { } -async function getNeptuneClusterinfo() { - const neptuneClient = new NeptuneClient({region: REGION}); +async function getNeptuneClusterinfo() { + if (NEPTUNE_TYPE == 'neptune-db') { + const neptuneClient = new NeptuneClient({region: REGION}); - const params = { - DBClusterIdentifier: NEPTUNE_DB_NAME - }; + const params = { + DBClusterIdentifier: NEPTUNE_DB_NAME + }; - const data = await neptuneClient.send(new DescribeDBClustersCommand(params)); - - const input = { // DescribeDBSubnetGroupsMessage - DBSubnetGroupName: data.DBClusters[0].DBSubnetGroup, - }; - const command = new DescribeDBSubnetGroupsCommand(input); - const response = await neptuneClient.send(command); + const data = await neptuneClient.send(new DescribeDBClustersCommand(params)); - NEPTUNE_HOST = data.DBClusters[0].Endpoint; - NEPTUNE_PORT = data.DBClusters[0].Port.toString(); - NEPTUNE_DBSubnetGroup = data.DBClusters[0].DBSubnetGroup; - NEPTUNE_VpcSecurityGroupId = data.DBClusters[0].VpcSecurityGroups[0].VpcSecurityGroupId; - NEPTUNE_CURRENT_IAM = data.DBClusters[0].IAMDatabaseAuthenticationEnabled; - NEPTUNE_CURRENT_VERSION = data.DBClusters[0].EngineVersion; - NEPTUNE_IAM_POLICY_RESOURCE = `${data.DBClusters[0].DBClusterArn.substring(0, data.DBClusters[0].DBClusterArn.lastIndexOf(':cluster')).replace('rds', 'neptune-db')}:${data.DBClusters[0].DbClusterResourceId}/*`; - response.DBSubnetGroups[0].Subnets.forEach(element => { - NEPTUNE_DBSubnetIds.push(element.SubnetIdentifier); - }); + const input = { // DescribeDBSubnetGroupsMessage + DBSubnetGroupName: data.DBClusters[0].DBSubnetGroup, + }; + const command = new DescribeDBSubnetGroupsCommand(input); + const response = await neptuneClient.send(command); + + NEPTUNE_HOST = data.DBClusters[0].Endpoint; + NEPTUNE_PORT = data.DBClusters[0].Port.toString(); + NEPTUNE_DBSubnetGroup = data.DBClusters[0].DBSubnetGroup; + NEPTUNE_VpcSecurityGroupId = data.DBClusters[0].VpcSecurityGroups[0].VpcSecurityGroupId; + NEPTUNE_CURRENT_IAM = data.DBClusters[0].IAMDatabaseAuthenticationEnabled; + NEPTUNE_CURRENT_VERSION = data.DBClusters[0].EngineVersion; + NEPTUNE_IAM_POLICY_RESOURCE = `${data.DBClusters[0].DBClusterArn.substring(0, data.DBClusters[0].DBClusterArn.lastIndexOf(':cluster')).replace('rds', 'neptune-db')}:${data.DBClusters[0].DbClusterResourceId}/*`; + response.DBSubnetGroups[0].Subnets.forEach(element => { + NEPTUNE_DBSubnetIds.push(element.SubnetIdentifier); + }); + } else { + msg = 'AWS SDK for Neptune Analytics is not available, yet.'; + loggerLog(msg); + throw new Error(msg); + } } @@ -202,7 +240,9 @@ async function createLambdaRole() { const iamClient = new IAMClient({region: REGION}); // Create Lambda principal role - if (!quiet) spinner = ora('Creating Lambda principal role ...').start(); + msg = 'Creating Lambda principal role ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); const params = { AssumeRolePolicyDocument: JSON.stringify({ Version: "2012-10-17", @@ -220,11 +260,15 @@ async function createLambdaRole() { //await waitUntilRoleExists({ client: iamClient, maxWaitTime: 180 }, { RoleName: data.Role.RoleName }); // does not work :(, using sleep await sleep(10000); LAMBDA_ROLE = data.Role.Arn; - storeResource({LambdaExecutionRole: NAME +"LambdaExecutionRole"}); - if (!quiet) spinner.succeed('Role ARN: ' + yellow(LAMBDA_ROLE)); + storeResource({LambdaExecutionRole: NAME +"LambdaExecutionRole"}); + msg = 'Role ARN: ' + yellow(LAMBDA_ROLE); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); // Attach to Lambda role the AWSLambdaBasicExecutionRole - if (!quiet) spinner = ora('Attaching policies to the Lambda principal role ...').start(); + msg = 'Attaching AWSLambdaBasicExecutionRole to Lambda Role'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); let input = { RoleName: NAME +"LambdaExecutionRole", PolicyArn: "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole", @@ -232,24 +276,36 @@ async function createLambdaRole() { let command = new AttachRolePolicyCommand(input); await iamClient.send(command); storeResource({LambdaExecutionPolicy1: input.PolicyArn}); - if (!quiet) spinner.succeed(`Attached ${yellow('AWSLambdaBasicExecutionRole')} to Lambda Role`); + msg = `Attached ${yellow('AWSLambdaBasicExecutionRole')} to Lambda Role`; + if (!quiet) spinner.succeed(msg); + loggerLog(msg); if (NEPTUME_IAM_AUTH) { + + let action = []; + if (NEPTUNE_TYPE == 'neptune-db') { + action = [ + "neptune-db:DeleteDataViaQuery", + "neptune-db:connect", + "neptune-db:ReadDataViaQuery", + "neptune-db:WriteDataViaQuery" + ]; + } else { + action = "neptune-graph:*" + } + // Create Neptune query policy - if (!quiet) spinner = ora('Creating policy for Neptune queries ...').start(); + msg = 'Creating policy for Neptune queries'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); let command = new CreatePolicyCommand({ PolicyDocument: JSON.stringify({ Version: "2012-10-17", Statement: [ { Effect: "Allow", - Action: [ - "neptune-db:DeleteDataViaQuery", - "neptune-db:connect", - "neptune-db:ReadDataViaQuery", - "neptune-db:WriteDataViaQuery" - ], + Action: action, Resource: NEPTUNE_IAM_POLICY_RESOURCE }, ], @@ -261,10 +317,14 @@ async function createLambdaRole() { const policyARN = response.Policy.Arn; storeResource({NeptuneQueryPolicy: policyARN}); await sleep(5000); - if (!quiet) spinner.succeed('Neptune query policy ARN: ' + yellow(policyARN)); + msg = 'Neptune query policy ARN: ' + yellow(policyARN); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); // Attach to Lambda role the Neptune query policy. - if (!quiet) spinner = ora('Attaching policy for Neptune queries to Lambda role ...').start(); + msg = 'Attaching Neptune query policy to Lambda role ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); input = { RoleName: NAME +"LambdaExecutionRole", PolicyArn: policyARN, @@ -273,12 +333,15 @@ async function createLambdaRole() { await iamClient.send(command); storeResource({LambdaExecutionPolicy2: input.PolicyArn}); await sleep(10000); - if (!quiet) spinner.succeed(`Attached ${yellow('Neptune Query Policy')} policies to Lambda Role`); + msg = `Attached ${yellow('Neptune Query Policy')} policies to Lambda Role`; + if (!quiet) spinner.succeed(msg); + loggerLog(msg); } else { - - - if (!quiet) spinner = ora('Attaching policy for Neptune VPC to Lambda role ...').start(); + + msg = 'Attaching policy for Neptune VPC to Lambda role ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); input = { RoleName: NAME +"LambdaExecutionRole", PolicyArn: "arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole", @@ -287,7 +350,9 @@ async function createLambdaRole() { await iamClient.send(command); storeResource({LambdaExecutionPolicy2: input.PolicyArn}); await sleep(10000); - if (!quiet) spinner.succeed(`Attached ${yellow('AWSLambdaVPCAccessExecutionRole')} policies to role`); + msg = `Attached ${yellow('AWSLambdaVPCAccessExecutionRole')} policies to role`; + if (!quiet) spinner.succeed(msg); + loggerLog(msg); } } @@ -310,7 +375,8 @@ async function createDeploymentPackage(folderPath) { async function createLambdaFunction() { const lambdaClient = new LambdaClient({region: REGION}); - if (!quiet) spinner = ora('Creating Lambda function ...').start(); + msg = 'Creating Lambda function ...'; + if (!quiet) spinner = ora(msg).start(); let params; if (NEPTUME_IAM_AUTH) { @@ -330,7 +396,8 @@ async function createLambdaFunction() { "NEPTUNE_HOST": NEPTUNE_HOST, "NEPTUNE_PORT": NEPTUNE_PORT, "NEPTUNE_IAM_AUTH_ENABLED": "true", - "LOGGING_ENABLED": "false" + "LOGGING_ENABLED": "false", + "NEPTUNE_TYPE": NEPTUNE_TYPE }, }, }; @@ -365,7 +432,9 @@ async function createLambdaFunction() { //await sleep(5000); LAMBDA_ARN = data.FunctionArn; storeResource({LambdaFunction: NAME +'LambdaFunction'}); - if (!quiet) spinner.succeed('Lambda Name: ' + yellow(NAME +'LambdaFunction') + ' ARN: ' + yellow(LAMBDA_ARN)); + msg = 'Lambda Name: ' + yellow(NAME +'LambdaFunction') + ' ARN: ' + yellow(LAMBDA_ARN); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); } @@ -597,7 +666,10 @@ async function attachResolvers(client, apiId, functionId) { async function attachResolverToSchemaField (client, apiId, functionId, typeName, fieldName) { // attach resolvers to schema - if (!quiet) spinner = ora('Attaching resolver to schema type ' + yellow(typeName) + ' field ' + yellow(fieldName) + ' ...').start(); + msg = 'Attaching resolver to schema type ' + yellow(typeName) + ' field ' + yellow(fieldName) + ' ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); + const input = { apiId: apiId, typeName: typeName, @@ -628,7 +700,9 @@ export function response(ctx) { const command = new CreateResolverCommand(input); await client.send(command); await sleep(200); - if (!quiet) spinner.succeed('Attached resolver to schema type ' + yellow(typeName) + ' field ' + yellow(fieldName)); + msg = 'Attached resolver to schema type ' + yellow(typeName) + ' field ' + yellow(fieldName); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); } @@ -639,33 +713,48 @@ async function removeAWSpipelineResources(resources, quietI) { const iamClient = new IAMClient({region: resources.region}); // Appsync API + msg = 'Deleting AppSync API ...'; if (!quiet) spinner = ora('Deleting AppSync API ...').start(); + loggerLog(msg); + try { const input = { apiId: resources.AppSyncAPI }; const command = new DeleteGraphqlApiCommand(input); await appSyncClient.send(command); - if (!quiet) spinner.succeed('Deleted API id: ' + yellow(resources.AppSyncAPI)); - } catch (error) { - if (!quiet) spinner.fail('AppSync API delete failed: ' + error); + msg = 'Deleted API id: ' + yellow(resources.AppSyncAPI); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); + } catch (error) { + msg = 'AppSync API delete failed: ' + error.message; + if (!quiet) spinner.fail(msg); + loggerLog(msg + " : " + JSON.stringify(error)); } // Lambda - if (!quiet) spinner = ora('Deleting Lambda function ...').start(); + msg = 'Deleting Lambda function ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); try { const input = { FunctionName: resources.LambdaFunction }; const command = new DeleteFunctionCommand(input); await lambdaClient.send(command); - if (!quiet) spinner.succeed('Lambda function deleted: ' + yellow(resources.LambdaFunction)); + msg = 'Deleted Lambda function: ' + yellow(resources.LambdaFunction); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); } catch (error) { - if (!quiet) spinner.fail('Lambda function fail to delete: ' + error); + msg = 'Lambda function fail to delete: ' + error.message; + if (!quiet) spinner.fail(msg); + loggerLog(msg + " : " + JSON.stringify(error)); } // Lambda execution role - if (!quiet) spinner = ora('Detaching IAM policies from role ...').start(); + msg = 'Detaching IAM policies from role ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); try { let input = { PolicyArn: resources.LambdaExecutionPolicy1, @@ -673,12 +762,18 @@ async function removeAWSpipelineResources(resources, quietI) { }; let command = new DetachRolePolicyCommand(input); await iamClient.send(command); - if (!quiet) spinner.succeed('Detached policy: ' + yellow(resources.LambdaExecutionPolicy1) + " from role: " + yellow(resources.LambdaExecutionRole)); + msg = 'Detached policy: ' + yellow(resources.LambdaExecutionPolicy1) + " from role: " + yellow(resources.LambdaExecutionRole); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); } catch (error) { - if (!quiet) spinner.fail('Detach policy failed: ' + error); + msg = 'Detach policy failed: ' + error.error; + if (!quiet) spinner.fail(msg); + loggerLog(msg + " : " + JSON.stringify(error)); } - if (!quiet) spinner = ora('Detaching IAM policies from role ...').start(); + msg = 'Detaching IAM policies from role ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); try { let input = { PolicyArn: resources.LambdaExecutionPolicy2, @@ -686,41 +781,59 @@ async function removeAWSpipelineResources(resources, quietI) { }; let command = new DetachRolePolicyCommand(input); await iamClient.send(command); - if (!quiet) spinner.succeed('Detached policy: ' + yellow(resources.LambdaExecutionPolicy1) + " from role: " + yellow(resources.LambdaExecutionRole)); + msg = 'Detached policy: ' + yellow(resources.LambdaExecutionPolicy2) + " from role: " + yellow(resources.LambdaExecutionRole); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); } catch (error) { - if (!quiet) spinner.fail('Detach policy failed: ' + error); + msg = 'Detach policy failed: ' + error.message; + if (!quiet) spinner.fail(msg); + loggerLog(msg + " : " + JSON.stringify(error)); } // Delete Neptune query Policy if (resources.NeptuneQueryPolicy != undefined) { - if (!quiet) spinner = ora('Deleting policy ...').start(); + msg = 'Deleting policy ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); try { const input = { PolicyArn: resources.NeptuneQueryPolicy, }; const command = new DeletePolicyCommand(input); - await iamClient.send(command); - if (!quiet) spinner.succeed('Deleted policy: ' + yellow(resources.NeptuneQueryPolicy)); + await iamClient.send(command); + msg = 'Deleted policy: ' + yellow(resources.NeptuneQueryPolicy); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); } catch (error) { - if (!quiet) spinner.fail('Delete policy failed: ' + error); + msg = 'Delete policy failed: ' + error.message; + if (!quiet) spinner.fail(msg); + loggerLog(msg + " : " + JSON.stringify(error)); } } // Delete Role - if (!quiet) spinner = ora('Deleting role ...').start(); + msg = 'Deleting role ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); try { const input = { RoleName: resources.LambdaExecutionRole, }; const command = new DeleteRoleCommand(input); await iamClient.send(command); - if (!quiet) spinner.succeed('Deleted role: ' + yellow(resources.LambdaExecutionRole)); + msg = 'Deleted role: ' + yellow(resources.LambdaExecutionRole); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); } catch (error) { - if (!quiet) spinner.fail('Delete role failed: ' + error); + msg = 'Delete role failed: ' + error.message; + if (!quiet) spinner.fail(msg); + loggerLog(msg + " : " + JSON.stringify(error)); } // AppSync Lambda role - if (!quiet) spinner = ora('Detaching policy from AppSync Lambda role ...').start(); + msg = 'Detaching policy from AppSync Lambda role ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); try { let input = { PolicyArn: resources.LambdaInvokePolicy, @@ -728,41 +841,59 @@ async function removeAWSpipelineResources(resources, quietI) { }; let command = new DetachRolePolicyCommand(input); await iamClient.send(command); - if (!quiet) spinner.succeed('Detached policy: ' + yellow(resources.LambdaInvokePolicy) + " from role: " + yellow(resources.LambdaInvokeRole)); + msg = 'Detached policy: ' + yellow(resources.LambdaInvokePolicy) + " from role: " + yellow(resources.LambdaInvokeRole); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); } catch (error) { - if (!quiet) spinner.fail('Detach policy failed: ' + error); + msg = 'Detach policy failed: ' + error.message; + if (!quiet) spinner.fail(msg); + loggerLog(msg + " : " + JSON.stringify(error)); } // Delete Policy - if (!quiet) spinner = ora('Deleting policy ...').start(); + msg = 'Deleting policy ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); try { const input = { PolicyArn: resources.LambdaInvokePolicy, }; const command = new DeletePolicyCommand(input); await iamClient.send(command); - if (!quiet) spinner.succeed('Deleted policy: ' + yellow(resources.LambdaInvokePolicy)); + msg = 'Deleted policy: ' + yellow(resources.LambdaInvokePolicy); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); } catch (error) { - if (!quiet) spinner.fail('Delete policy failed: ' + error); + msg = 'Delete policy failed: ' + error.message; + if (!quiet) spinner.fail(msg); + loggerLog(msg + " : " + JSON.stringify(error)); } // Delete Role - if (!quiet) spinner = ora('Deleting role ...').start(); + msg = 'Deleting role ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); try { const input = { RoleName: resources.LambdaInvokeRole, }; const command = new DeleteRoleCommand(input); await iamClient.send(command); - if (!quiet) spinner.succeed('Deleted role: ' + yellow(resources.LambdaInvokeRole)); + msg = 'Deleted role: ' + yellow(resources.LambdaInvokeRole); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); } catch (error) { - if (!quiet) spinner.fail('Delete role failed: ' + error); + msg = 'Delete role failed: ' + error.message; + if (!quiet) spinner.fail(msg); + loggerLog(msg + " : " + JSON.stringify(error)); } } async function updateLambdaFunction(resources) { - if (!quiet) spinner = ora('Updating Lambda function code ...').start(); + msg = 'Updating Lambda function code ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); const lambdaClient = new LambdaClient({region: resources.region}); const input = { FunctionName: resources.LambdaFunction, @@ -770,14 +901,18 @@ async function updateLambdaFunction(resources) { }; const command = new UpdateFunctionCodeCommand(input); await lambdaClient.send(command); - if (!quiet) spinner.succeed('Lambda function code updated: ' + yellow(resources.LambdaFunction)); + msg = 'Lambda function code updated: ' + yellow(resources.LambdaFunction); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); } async function updateAppSyncAPI(resources) { const appSyncClient = new AppSyncClient({region: resources.region}); - if (!quiet) spinner = ora('Updating AppSync API schema ...').start(); + msg = 'Updating AppSync API schema ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); let encoder = new TextEncoder(); let definition = encoder.encode(APPSYNC_SCHEMA); @@ -788,13 +923,31 @@ async function updateAppSyncAPI(resources) { let command = new StartSchemaCreationCommand(params); await appSyncClient.send(command); await sleep(5000); - if (!quiet) spinner.succeed('Schema updated'); + msg = 'Schema updated'; + if (!quiet) spinner.succeed(msg); + loggerLog(msg); await attachResolvers(appSyncClient, resources.AppSyncAPI, resources.AppSyncAPIFunction); } -async function createUpdateAWSpipeline (pipelineName, neptuneDBName, neptuneDBregion, appSyncSchema, schemaModel, lambdaFilesPath, addMutations, quietI, __dirname, isNeptuneIAMAuth, neptuneHost, neptunePort, outputFolderPath) { +async function createUpdateAWSpipeline ( pipelineName, + neptuneDBName, + neptuneDBregion, + appSyncSchema, + schemaModel, + lambdaFilesPath, + addMutations, + quietI, + __dirname, + isNeptuneIAMAuth, + neptuneHost, + neptunePort, + neptuneHostRO, + neptunePortRO, + dualLambda, + outputFolderPath, + neptuneType) { NAME = pipelineName; REGION = neptuneDBregion; @@ -808,9 +961,16 @@ async function createUpdateAWSpipeline (pipelineName, neptuneDBName, neptuneDBre NEPTUME_IAM_AUTH = isNeptuneIAMAuth; NEPTUNE_HOST = neptuneHost; NEPTUNE_PORT = neptunePort; + NEPTUNE_HOST_RO = neptuneHostRO; + NEPTUNE_PORT_RO = neptunePortRO; + DUAL_LAMBDA = dualLambda; thisOutputFolderPath = outputFolderPath; + NEPTUNE_TYPE = neptuneType; - if (!quiet) console.log('\nCheck if the pipeline resources have been created'); + msg = '\nCreating or updating AWS pipeline resources ...\n'; + //if (!quiet) spinner = ora(msg).start(); + console.log(msg); + loggerLog(msg); await checkPipeline(); if (!pipelineExists) { @@ -824,85 +984,143 @@ async function createUpdateAWSpipeline (pipelineName, neptuneDBName, neptuneDBre if (!quiet) spinner.succeed('Got Neptune Cluster Info'); if (isNeptuneIAMAuth) { if (!NEPTUNE_CURRENT_IAM) { - console.error("The Neptune database authentication is set to VPC."); - console.error("Remove the --create-update-aws-pipeline-neptune-IAM option."); + msg = 'The Neptune database authentication is set to VPC.'; + console.error(msg); + loggerLog(msg); + msg = 'Remove the --create-update-aws-pipeline-neptune-IAM option.'; + console.error(msg); + loggerLog(msg); exit(1); } } else { if (NEPTUNE_CURRENT_IAM) { - console.error("The Neptune database authentication is set to IAM."); - console.error("Add the --create-update-aws-pipeline-neptune-IAM option."); + msg = 'The Neptune database authentication is set to IAM.'; + console.error(msg); + loggerLog(msg); + msg = 'Add the --create-update-aws-pipeline-neptune-IAM option.'; + console.error(msg); + loggerLog(msg); exit(1); } else { - if (!quiet) console.log(`Subnet Group: ` + yellow(NEPTUNE_DBSubnetGroup)); + msg = `Subnet Group: ` + yellow(NEPTUNE_DBSubnetGroup); + if (!quiet) console.log(msg); + loggerLog(msg); } } if (NEPTUNE_CURRENT_VERSION != '') { const v = NEPTUNE_CURRENT_VERSION; if (lambdaFilesPath.includes('SDK') == true && - (v == '1.2.1.0' || v == '1.2.0.2' || v == '1.2.0.1' || v == '1.2.0.0' || v == '1.1.1.0' || v == '1.1.0.0')) { - console.error("Neptune SDK query is supported starting with Neptune versions 1.2.2.0"); - console.error("Switch to Neptune HTTPS query with option --output-resolver-query-https"); + (v == '1.2.1.0' || v == '1.2.0.2' || v == '1.2.0.1' || v == '1.2.0.0' || v == '1.1.1.0' || v == '1.1.0.0')) { + msg = "Neptune SDK query is supported starting with Neptune versions 1.2.2.0"; + console.error(msg); + loggerLog(msg); + msg = "Switch to Neptune HTTPS query with option --output-resolver-query-https"; + console.error(msg); + loggerLog(msg); exit(1); } } } catch (error) { - if (!quiet) spinner.fail("Error getting Neptune Cluster Info."); + msg = 'Error getting Neptune Cluster Info.'; + if (!quiet) spinner.fail(msg); + loggerLog(msg + " : " + JSON.stringify(error)); if (!isNeptuneIAMAuth) { - console.error("VPC data is not available to proceed."); + msg = "VPC data is not available to proceed."; + console.error(msg); + loggerLog(msg); exit(1); } else { - if (!quiet) console.log("Could not read the database ARN to restrict the Lambda permissions. \nTo increase security change the resource in the Neptune Query policy.") - if (!quiet) console.log("Proceeding without getting Neptune Cluster info."); + msg = "Could not read the database ARN to restrict the Lambda permissions. \nTo increase security change the resource in the Neptune Query policy."; + if (!quiet) console.log(msg); + loggerLog(msg); + msg = "Proceeding without getting Neptune Cluster info."; + if (!quiet) console.log(msg); + loggerLog(msg); } } - if (!quiet) console.log('Create ZIP'); - if (!quiet) spinner = ora('Creating ZIP ...').start(); + msg = 'Create ZIP'; + if (!quiet) console.log(msg); + loggerLog(msg); + msg = 'Creating ZIP ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); ZIP = await createDeploymentPackage(LAMBDA_FILES_PATH) - if (!quiet) spinner.succeed('Created ZIP File: ' + yellow(LAMBDA_FILES_PATH)); + msg = 'Created ZIP File: ' + yellow(LAMBDA_FILES_PATH); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); - if (!quiet) console.log('Create Lambda execution role'); + msg = 'Create Lambda execution role'; + if (!quiet) console.log(msg); + loggerLog(msg); await createLambdaRole(); - if (!quiet) console.log('Create Lambda function'); + msg = 'Create Lambda function'; + if (!quiet) console.log(msg); + loggerLog(msg); await createLambdaFunction(); - if (!quiet) console.log('Create AppSync API'); + msg = 'Create AppSync API'; + if (!quiet) console.log(msg); + loggerLog(msg); await createAppSyncAPI(); - if (!quiet) console.log('Saved resorces to file: ' + yellow(RESOURCES_FILE)); + msg = 'Saved resorces to file: ' + yellow(RESOURCES_FILE); + if (!quiet) console.log(msg); + loggerLog(msg); } catch (error) { - if (!quiet) spinner.fail('Error creating resources: ' + error); - console.error('Rolling back resources.'); + msg = 'Error creating resources: ' + error.message; + if (!quiet) spinner.fail(msg); + loggerLog(msg + " : " + JSON.stringify(error)); + msg = 'Rolling back resources.'; + console.error(msg); + loggerLog(msg); await removeAWSpipelineResources(RESOURCES, quiet); return; } } else { - if (!quiet) console.log('Update resources'); + msg = 'Update resources'; + if (!quiet) console.log(msg); + loggerLog(msg); let resources = null; try { - if (!quiet) spinner = ora('Loading resources file ...').start(); + msg = 'Loading resources file ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); resources = JSON.parse(fs.readFileSync(RESOURCES_FILE, 'utf8')); - if (!quiet) spinner.succeed('Loaded resources from file: ' + yellow(RESOURCES_FILE)); + msg = 'Loaded resources from file: ' + yellow(RESOURCES_FILE); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); } catch (error) { - if (!quiet) spinner.warn('Error loading resources file: ' + RESOURCES_FILE + ' ' + error); + msg = 'Error loading resources file: ' + RESOURCES_FILE + ' ' + error.message; + if (!quiet) spinner.warn(msg); + loggerLog(msg + " : " + JSON.stringify(error)); return; } - if (!quiet) console.log('Create ZIP'); - if (!quiet) spinner = ora('Creating ZIP ...').start(); + msg = 'Create ZIP'; + if (!quiet) console.log(msg); + loggerLog(msg); + msg = 'Creating ZIP ...'; + if (!quiet) spinner = ora(msg).start(); + loggerLog(msg); ZIP = await createDeploymentPackage(LAMBDA_FILES_PATH) - if (!quiet) spinner.succeed('File: ' + yellow(LAMBDA_FILES_PATH)); + msg = 'Created ZIP File: ' + yellow(LAMBDA_FILES_PATH); + if (!quiet) spinner.succeed(msg); + loggerLog(msg); - if (!quiet) console.log('Update Lambda function'); + msg = 'Update Lambda function'; + if (!quiet) console.log(msg); + loggerLog(msg); await updateLambdaFunction(resources); - if (!quiet) console.log('Update AppSync API'); + msg = 'Update AppSync API'; + if (!quiet) console.log(msg); + loggerLog(msg); await updateAppSyncAPI(resources); } } diff --git a/src/resolverJS.js b/src/resolverJS.js index c7f6133..61197c5 100644 --- a/src/resolverJS.js +++ b/src/resolverJS.js @@ -11,6 +11,9 @@ permissions and limitations under the License. */ import { readFileSync} from 'fs'; +import { loggerLog } from "./logger.js"; + +let msg = ''; function resolverJS (schemaModel, queryLanguage, queryClient, __dirname) { let code = ''; @@ -22,7 +25,9 @@ function resolverJS (schemaModel, queryLanguage, queryClient, __dirname) { code = code.toString().replace('TIMESTAMP HERE', (new Date()).toISOString()); code = code.toString().replace('INSERT SCHEMA DATA MODEL HERE', queryDataModelJSON); } catch (err) { - console.log('ERROR: No resolver template found.'); + msg = 'ERROR: No resolver template found.'; + console.log(msg); + loggerLog(msg + ' : ' + JSON.stringify(err)); } } return code; diff --git a/templates/Lambda4AppSyncHTTP/index.mjs b/templates/Lambda4AppSyncHTTP/index.mjs index ed22a69..06f6569 100644 --- a/templates/Lambda4AppSyncHTTP/index.mjs +++ b/templates/Lambda4AppSyncHTTP/index.mjs @@ -17,7 +17,7 @@ if (process.env.NEPTUNE_IAM_AUTH_ENABLED === 'true') { const interceptor = aws4Interceptor({ options: { region: AWS_REGION, - service: "neptune-db", + service: process.env.NEPTUNE_TYPE, }, credentials: { accessKeyId: AWS_ACCESS_KEY_ID,