Skip to content

Commit 7e19348

Browse files
authored
Add missing CDK files and pass variables in CDK event handler (aws#106)
This changeset fixes a couple cdk deployment bugs: * changed `CDKPipelineApp.js` to call the `createLambdaDeploymentPackage` function so that it uses the same logic as non-cdk deployment pipeline and will now include previously missing files (json schema file and queryHttpNeptune.mjs) which are necessary for lambda resolver execution * fixed the cdk app sync event handler to pass along variables as part of the event, which is necessary in order for the resolver to handle query variables This changeset also contains improvements to output file logic and test coverage: * changed `main.js` to determine an output file prefix once before any output files are created to repeated code duplication * changed `main.js` to reference the neptune graph name as part of the output file prefix for scenarios where a pipeline is not being executed as to avoid overwriting existing files from a previous utility execution * changed `createZip` function in `zipPackage.js` to wait for the zip file stream to be closed before resolving the promise so that the integration tests would not attempt to read the zip file before it was finished being written * added verification of expected output files to test cases that were previously missing it * removed test function `checkOutputFilesSize` and its usages as it is unnecessary to verify output file sizes if their content is also being checked for equality * replaced `checkOutputFilesContent` test function which assumed output files had the same name as the comparison files with new function `compareFileContents` which is more flexible with file names
1 parent 77afc73 commit 7e19348

24 files changed

+321
-144
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ This release contains new support for Apollo Server integration.
3232
* Fixed queries generated from an input schema which retrieve an array to have an option parameter with limit ([#97](https://github.com/aws/amazon-neptune-for-graphql/pull/97))
3333
* Fixed nested edge subqueries to return an empty array if no results were found (([#100](https://github.com/aws/amazon-neptune-for-graphql/pull/100))
3434
* Fixed usage of variables with nested edge subqueries (([#100](https://github.com/aws/amazon-neptune-for-graphql/pull/100))
35+
* Fixed cdk output file to contain previously missing files that were necessary to execute the lambda resolver (([#106](https://github.com/aws/amazon-neptune-for-graphql/pull/106))
3536

3637

3738
### Features

src/CDKPipelineApp.js

Lines changed: 16 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,10 @@ permissions and limitations under the License.
1212

1313
import { getNeptuneClusterDbInfoBy } from './pipelineResources.js'
1414
import { readFile, writeFile } from 'fs/promises';
15-
//import semver from 'semver';
16-
import fs from 'fs';
17-
import archiver from 'archiver';
1815
import ora from 'ora';
1916
import { loggerDebug, loggerError, loggerInfo, yellow } from "./logger.js";
17+
import path from "path";
18+
import { createLambdaDeploymentPackage } from "./zipPackage.js";
2019

2120
let NAME = '';
2221
let REGION = '';
@@ -33,7 +32,7 @@ let APPSYNC_SCHEMA = '';
3332
let APPSYNC_ATTACH_QUERY = [];
3433
let APPSYNC_ATTACH_MUTATION = [];
3534
let SCHEMA_MODEL = null;
36-
let thisOutputFolderPath = './output';
35+
let RELATIVE_OUTPUT_PATH = './output';
3736

3837
async function getSchemaFields(typeName) {
3938
/* To be updated as:
@@ -55,15 +54,16 @@ async function getSchemaFields(typeName) {
5554
return r;
5655
}
5756

58-
59-
async function createDeploymentFile(folderPath, zipFilePath) {
57+
async function createDeploymentFile(templateFolderPath, resolverFilePath) {
6058
try {
61-
const output = fs.createWriteStream(zipFilePath);
62-
const archive = archiver('zip', { zlib: { level: 9 } });
63-
archive.pipe(output);
64-
archive.directory(folderPath, false);
65-
archive.file('./output/output.resolver.graphql.js', { name: 'output.resolver.graphql.js' })
66-
await archive.finalize();
59+
const zipFilePath = path.join(RELATIVE_OUTPUT_PATH, `${NAME}.zip`);
60+
const resolverSchemaFilePath = path.join(RELATIVE_OUTPUT_PATH, `${NAME}.resolver.schema.json`)
61+
await createLambdaDeploymentPackage({
62+
outputZipFilePath: zipFilePath,
63+
templateFolderPath: templateFolderPath,
64+
resolverFilePath: resolverFilePath,
65+
resolverSchemaFilePath: resolverSchemaFilePath
66+
});
6767
} catch (err) {
6868
loggerError('Error creating deployment zip file', err);
6969
}
@@ -84,6 +84,7 @@ async function createAWSpipelineCDK({
8484
neptuneHost,
8585
neptunePort,
8686
outputFolderPath,
87+
resolverFilePath,
8788
neptuneType
8889
}) {
8990

@@ -95,9 +96,9 @@ async function createAWSpipelineCDK({
9596
SCHEMA_MODEL = schemaModel;
9697
NEPTUNE_HOST = neptuneHost;
9798
NEPTUNE_PORT = neptunePort;
98-
thisOutputFolderPath = outputFolderPath;
99+
RELATIVE_OUTPUT_PATH = outputFolderPath;
99100

100-
LAMBDA_ZIP_FILE = `${thisOutputFolderPath}/${NAME}.zip`;
101+
LAMBDA_ZIP_FILE = `${RELATIVE_OUTPUT_PATH}/${NAME}.zip`;
101102
let spinner = null;
102103
let neptuneClusterInfo = null;
103104

@@ -152,7 +153,7 @@ async function createAWSpipelineCDK({
152153
}
153154

154155
if (!quiet) spinner = ora('Creating ZIP ...').start();
155-
await createDeploymentFile(lambdaFilesPath, LAMBDA_ZIP_FILE);
156+
await createDeploymentFile(lambdaFilesPath, resolverFilePath);
156157
if (!quiet) spinner.succeed('Created ZIP File: ' + yellow(LAMBDA_ZIP_FILE));
157158

158159
APPSYNC_ATTACH_QUERY = await getSchemaFields('Query');

src/main.js

Lines changed: 20 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -288,16 +288,13 @@ function processArgs() {
288288

289289
/**
290290
* Saves the neptune schema to file
291+
* @param outputFilePrefix a prefix to use for the generated neptune schema file name
291292
*/
292-
function saveNeptuneSchema() {
293+
function saveNeptuneSchema(outputFilePrefix) {
293294
// Output Neptune schema
294295
if (inputGraphDBSchema !== '') {
295296
if (outputNeptuneSchemaFile === '') {
296-
if (createUpdatePipelineName === '') {
297-
outputNeptuneSchemaFile = outputFolderPath + '/output.neptune.schema.json';
298-
} else {
299-
outputNeptuneSchemaFile = `${outputFolderPath}/${createUpdatePipelineName}.neptune.schema.json`;
300-
}
297+
outputNeptuneSchemaFile = path.join(outputFolderPath, `${outputFilePrefix}.neptune.schema.json`);
301298
}
302299

303300
try {
@@ -394,8 +391,14 @@ async function main() {
394391
}
395392

396393
createOutputFolder();
394+
// determine a common output file prefix depending on input arguments, falling back to a default value 'output' if no relevant inputs are provided
395+
const outputFilePrefix = createUpdatePipelineName ||
396+
createUpdatePipelineNeptuneDatabaseName ||
397+
inputCDKpipelineName ||
398+
inputCDKpipelineDatabaseName ||
399+
`${neptuneInfo?.graphName?.concat('.') || ''}output`;
397400
// save the neptune schema early for troubleshooting purposes
398-
saveNeptuneSchema();
401+
saveNeptuneSchema(outputFilePrefix);
399402

400403
// Option 2: inference GraphQL schema from graphDB schema
401404
if (inputGraphDBSchema != '' && inputGraphQLSchema == '' && inputGraphQLSchemaFile == '') {
@@ -510,7 +513,7 @@ async function main() {
510513
if (inputGraphQLSchemaChanges != '') {
511514
inputGraphQLSchema = changeGraphQLSchema(inputGraphQLSchema, inputGraphQLSchemaChanges);
512515
}
513-
516+
514517
if (inputGraphQLSchema != '') {
515518
// Parse schema
516519
schemaModel = schemaParser(inputGraphQLSchema);
@@ -520,13 +523,7 @@ async function main() {
520523

521524
// Generate schema for resolver
522525
const queryDataModelJSON = JSON.stringify(schemaModel, null, 2);
523-
524-
let resolverSchemaFile;
525-
if (createUpdatePipelineName == '') {
526-
resolverSchemaFile = `${outputFolderPath}/output.resolver.schema.json`
527-
} else {
528-
resolverSchemaFile = `${outputFolderPath}/${createUpdatePipelineName}.resolver.schema.json`
529-
}
526+
const resolverSchemaFile = path.join(outputFolderPath, `${outputFilePrefix}.resolver.schema.json`);
530527

531528
try {
532529
writeFileSync(resolverSchemaFile, queryDataModelJSON);
@@ -562,12 +559,8 @@ async function main() {
562559
if (inputGraphQLSchema != '') {
563560

564561
outputSchema = schemaStringify(schemaModel, false);
565-
if ( outputSchemaFile == '' ) {
566-
if (createUpdatePipelineName == '') {
567-
outputSchemaFile = outputFolderPath + '/output.schema.graphql';
568-
} else {
569-
outputSchemaFile = `${outputFolderPath}/${createUpdatePipelineName}.schema.graphql`;
570-
}
562+
if (!outputSchemaFile) {
563+
outputSchemaFile = path.join(outputFolderPath, `${outputFilePrefix}.schema.graphql`);
571564
}
572565

573566
try {
@@ -580,12 +573,8 @@ async function main() {
580573

581574
// Output GraphQL schema with directives
582575
outputSourceSchema = schemaStringify(schemaModel, true);
583-
if ( outputSourceSchemaFile == '' ) {
584-
if (createUpdatePipelineName == '') {
585-
outputSourceSchemaFile = outputFolderPath + '/output.source.schema.graphql';
586-
} else {
587-
outputSourceSchemaFile = `${outputFolderPath}/${createUpdatePipelineName}.source.schema.graphql`;
588-
}
576+
if ( outputSourceSchemaFile === '' ) {
577+
outputSourceSchemaFile = path.join(outputFolderPath, `${outputFilePrefix}.source.schema.graphql`)
589578
}
590579

591580
try {
@@ -609,12 +598,8 @@ async function main() {
609598

610599

611600
// Output Javascript resolver
612-
if (outputJSResolverFile == '') {
613-
if (createUpdatePipelineName == '') {
614-
outputJSResolverFile = outputFolderPath + '/output.resolver.graphql.js';
615-
} else {
616-
outputJSResolverFile = `${outputFolderPath}/${createUpdatePipelineName}.resolver.graphql.js`;
617-
}
601+
if (outputJSResolverFile === '') {
602+
outputJSResolverFile = path.join(outputFolderPath, `${outputFilePrefix}.resolver.graphql.js`);
618603
}
619604

620605
try {
@@ -642,12 +627,7 @@ async function main() {
642627
// output Apollo zip
643628
if (createUpdateApolloServer || createUpdateApolloServerSubgraph) {
644629
const apolloZipPath = path.join(outputFolderPath, `apollo-server-${neptuneInfo.graphName}-${new Date().getTime()}.zip`);
645-
let resolverSchemaFilePath;
646-
if (createUpdatePipelineName == '') {
647-
resolverSchemaFilePath = path.join(outputFolderPath, 'output.resolver.schema.json');
648-
} else {
649-
resolverSchemaFilePath = path.join(outputFolderPath, `${createUpdatePipelineName}.resolver.schema.json`);
650-
}
630+
const resolverSchemaFilePath = path.join(outputFolderPath, `${outputFilePrefix}.resolver.schema.json`);
651631
try {
652632
if (!quiet) {
653633
spinner = ora('Creating Apollo server ZIP file ...').start();
@@ -756,6 +736,7 @@ async function main() {
756736
neptuneHost: neptuneHost,
757737
neptunePort: neptunePort,
758738
outputFolderPath: outputFolderPath,
739+
resolverFilePath: outputLambdaResolverFile,
759740
neptuneType: neptuneType
760741
});
761742
} catch (err) {

src/zipPackage.js

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,11 @@ import {fileURLToPath} from "url";
1919
async function createZip({targetZipFilePath, includePaths = [], includeContent = [], excludePatterns = []}) {
2020
const output = fs.createWriteStream(targetZipFilePath);
2121
const archive = archiver('zip', {zlib: {level: 9}});
22+
const streamingCompletedPromise = new Promise((resolve, reject) => {
23+
output.on('close', () => resolve());
24+
archive.on('error', err => reject(err));
25+
});
26+
2227
archive.pipe(output);
2328

2429
includePaths.forEach(includePath => {
@@ -40,6 +45,7 @@ async function createZip({targetZipFilePath, includePaths = [], includeContent =
4045
archive.append(content.source, {name: content.target});
4146
});
4247
await archive.finalize();
48+
await streamingCompletedPromise;
4349
}
4450

4551
/**

templates/CDKTemplate.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -198,6 +198,7 @@ export function request(ctx) {
198198
payload: {
199199
field: ctx.info.fieldName,
200200
arguments: args,
201+
variables: ctx.info.variables,
201202
selectionSetGraphQL: ctx.info.selectionSetGraphQL,
202203
source
203204
},
Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
1-
import { readJSONFile, checkOutputFilesSize, checkOutputFilesContent, checkFolderContainsFiles } from '../../testLib';
2-
3-
const casetest = readJSONFile('./test/TestCases/Case01/case.json');
1+
import { checkFolderContainsFiles, compareFileContents } from '../../testLib';
2+
import path from "path";
43

54
describe('Validate output files', () => {
65
const expectedFiles = [
@@ -10,7 +9,14 @@ describe('Validate output files', () => {
109
'output.schema.graphql',
1110
'output.source.schema.graphql'
1211
];
13-
checkFolderContainsFiles('./test/TestCases/Case01/output', expectedFiles);
14-
checkOutputFilesSize('./test/TestCases/Case01/output', casetest.testOutputFilesSize, './test/TestCases/Case01/outputReference');
15-
checkOutputFilesContent('./test/TestCases/Case01/output', casetest.testOutputFilesContent, './test/TestCases/Case01/outputReference');
12+
const outputFolder = './test/TestCases/Case01/output';
13+
checkFolderContainsFiles(outputFolder, expectedFiles);
14+
const referenceFolder = './test/TestCases/Case01/outputReference';
15+
compareFileContents([{
16+
expected: path.join(referenceFolder, 'output.schema.graphql'),
17+
actual: path.join(outputFolder, 'output.schema.graphql')
18+
}, {
19+
expected: path.join(referenceFolder, 'output.source.schema.graphql'),
20+
actual: path.join(outputFolder, 'output.source.schema.graphql')
21+
}]);
1622
});

test/TestCases/Case01/case.json

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,5 @@
88
"--output-folder-path", "./test/TestCases/Case01/output",
99
"--output-no-lambda-zip"],
1010
"host": "<AIR_ROUTES_DB_HOST>",
11-
"port": "<AIR_ROUTES_DB_PORT>",
12-
"testOutputFilesSize": ["output.schema.graphql", "output.source.schema.graphql"],
13-
"testOutputFilesContent": ["output.schema.graphql", "output.source.schema.graphql"]
11+
"port": "<AIR_ROUTES_DB_PORT>"
1412
}

test/TestCases/Case02/Case02.01.test.js

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,6 @@ async function executeUtility() {
1111
}
1212

1313
describe('Validate successful execution', () => {
14-
afterAll(async () => {
15-
fs.rmSync('./test/TestCases/Case02/output', {recursive: true});
16-
});
17-
1814
test('Execute utility: ' + casetest.argv.join(' '), async () => {
1915
expect(await executeUtility()).not.toBe(null);
2016
}, 600000);
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
import { checkFolderContainsFiles } from '../../testLib';
2+
import fs from "fs";
3+
4+
const outputFolderPath = './test/TestCases/Case02/output';
5+
6+
describe('Validate output content', () => {
7+
afterAll(() => {
8+
fs.rmSync(outputFolderPath, {recursive: true});
9+
});
10+
11+
checkFolderContainsFiles(outputFolderPath, [
12+
'output.jsresolver.graphql.js',
13+
'output.neptune.schema.json',
14+
'output.resolver.graphql.js',
15+
'output.resolver.schema.json',
16+
'output.schema.graphql',
17+
'output.source.schema.graphql'
18+
]);
19+
});

test/TestCases/Case03/Case03.01.test.js

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,10 +10,6 @@ async function executeUtility() {
1010
}
1111

1212
describe('Validate successful execution', () => {
13-
afterAll(async () => {
14-
fs.rmSync('./test/TestCases/Case03/output', {recursive: true});
15-
});
16-
1713
test('Execute utility: ' + casetest.argv.join(' '), async () => {
1814
expect(await executeUtility()).not.toBe(null);
1915
}, 600000);

0 commit comments

Comments
 (0)