Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding Lambda Edge origin request to AWS #192

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

const URL = require('url');

const Request = require('../../request');
const Request = require('../../../request');

function requestMethod(event) {
if (event.version === '2.0') {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
'use strict';

const isBinary = require('./is-binary');
const Response = require('../../response');
const Response = require('../../../response');
const sanitizeHeaders = require('./sanitize-headers');

module.exports = (event, response, options) => {
Expand Down
15 changes: 15 additions & 0 deletions lib/provider/aws/api-gw/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
const cleanUpEvent = require('./clean-up-event');

const createRequest = require('./create-request');
const formatResponse = require('./format-response');

module.exports = options => {
return getResponse => async (event_, context = {}) => {
const event = cleanUpEvent(event_, options);

const request = createRequest(event, options);
const response = await getResponse(request, event, context);

return formatResponse(event, response, options);
};
};
File renamed without changes.
19 changes: 7 additions & 12 deletions lib/provider/aws/index.js
Original file line number Diff line number Diff line change
@@ -1,15 +1,10 @@
const cleanUpEvent = require('./clean-up-event');

const createRequest = require('./create-request');
const formatResponse = require('./format-response');
const apiGw = require('./api-gw');
const lambdaEdgeOriginRequest = require('./lambda-edge-origin-request');

module.exports = options => {
return getResponse => async (event_, context = {}) => {
const event = cleanUpEvent(event_, options);

const request = createRequest(event, options);
const response = await getResponse(request, event, context);

return formatResponse(event, response, options);
};
if (options.type === 'lambda-edge-origin-request') {
return lambdaEdgeOriginRequest(options)
} else {
return apiGw(options)
}
};
31 changes: 31 additions & 0 deletions lib/provider/aws/lambda-edge-origin-request/clean-up-event.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
'use strict';

function getPath({ uri }) {
return typeof uri === 'string' ? uri : '/';
}

module.exports = function cleanupEvent(evt, options) {
const event = (evt &&
evt.Records &&
evt.Records.length > 0 &&
evt.Records[0].cf) ||
{};

event.config = event.config || {};

event.request = event.request || {};
event.request.body = event.request.body || '';

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The request body in a Lambda@Edge event is an object, not a string.

See here:

export interface CloudFrontRequest {
    body?: {
        action: 'read-only' | 'replace';
        data: string;
        encoding: 'base64' | 'text';
        readonly inputTruncated: boolean;
    };
    readonly clientIp: string;
    readonly method: string;
    uri: string;
    querystring: string;
    headers: CloudFrontHeaders;
    origin?: CloudFrontOrigin;
}

event.request.headers = event.request.headers || {};
event.request.method = event.request.method || 'GET';
event.request.uri = getPath(event.request);

if (options.basePath) {
const basePathIndex = event.path.indexOf(options.basePath);

if (basePathIndex > -1) {
event.path = event.path.substr(basePathIndex + options.basePath.length);
}
}

return event;
};
62 changes: 62 additions & 0 deletions lib/provider/aws/lambda-edge-origin-request/create-request.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
'use strict';

const crypto = require('crypto');
const Request = require('../../../request');

function requestHeaders(event) {
let headers = Object.keys(event.request.headers).reduce((headers, key) => {
headers[event.request.headers[key][0].key.toLowerCase()] = event.request.headers[key][0].value;
return headers;
}, {});

headers['x-request-id'] = crypto.randomBytes(30).toString('base64')

return headers;
}

function requestBody(event) {
const type = typeof event.request.body;
const body = event && event.request && event.request.body;

if (!body) return '';

if (Buffer.isBuffer(event.request.body.data)) {
return event.request.body.data;
} else if (type === 'string') {
return Buffer.from(event.request.body.data, event.request.body.encoding === 'base64' ? 'base64' : 'utf8');
} else if (type === 'object') {
return Buffer.from(JSON.stringify(event.request.body.data));
Copy link

@ljwagerfield ljwagerfield Dec 30, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is adding extra "" around the returned request body.

Instead, this should be:

Buffer.from(event.request.body.data, "base64");

Copy link

@ljwagerfield ljwagerfield Dec 30, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Actually, it should be as follows (I've just seen how the original code is for APIG, so I can see how this got copy-and-pasted like this):

function requestBody(event) {
  const body = event && event.request && event.request.body && event.request.body.data;
  const type = typeof body;

  if (!body) return '';

  if (Buffer.isBuffer(body)) {
    return body;
  } else if (type === 'string') {
    return Buffer.from(body, event.request.body.encoding === 'base64' ? 'base64' : 'utf8');
  } else if (type === 'object') {
    return Buffer.from(JSON.stringify(body));
  }

  throw new Error(`Unexpected event.body type: ${typeof event.request.body.data}`);
}

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I can confirm this works 👍

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hey, thanks for the feedback, how is this different from the current implementation? In all cases, I will update the PR to use the body variable (more readable I guess)

Copy link

@ljwagerfield ljwagerfield Jan 5, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The implementation you had results in the request body being wrapped in "", so if you have a request body of "hello", with the previous code, you'd get a string of "\"hello\"".

AFAIK it's because you copied it from the APIG handler, which expects the body to be a string OR an already-decoded JSON object (and in the latter case, is why it uses a JSON.stringify -- to turn it back to a string).

In our case, for Edge, the body is always a string (either base64 encoded or not).

}

throw new Error(`Unexpected event.body type: ${typeof event.request.body.data}`);
}

function getUrl(path, queryString) {
if (queryString) {
return `${path}?${queryString}`
}

return path;
}

module.exports = (event, options) => {
const method = event.request.method;
const remoteAddress = event.request.clientIp;
const headers = requestHeaders(event);
const body = requestBody(event);

if (typeof options.requestId === 'string' && options.requestId.length > 0) {
const header = options.requestId.toLowerCase();
headers[header] = headers[header] || event.config.requestId;
}

const req = new Request({
method,
headers,
body,
remoteAddress,
url: getUrl(event.request.uri, event.request.querystring)
});

return req;
};
27 changes: 27 additions & 0 deletions lib/provider/aws/lambda-edge-origin-request/format-response.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
'use strict';
const http = require('http');

const isBinary = require('./is-binary');
const Response = require('../../../response');
const sanitizeHeaders = require('./sanitize-headers');

module.exports = (response, options) => {
const { statusCode } = response;
const headers = Response.headers(response);

if (headers['transfer-encoding'] === 'chunked' || response.chunkedEncoding) {
throw new Error('chunked encoding not supported');
}

const isBase64Encoded = isBinary(headers, options);
const encoding = isBase64Encoded ? 'base64' : 'utf8';
let body = Response.body(response).toString(encoding);

return {
status: statusCode.toString(),
statusDescription: http.STATUS_CODES[statusCode],
headers: sanitizeHeaders(headers),
body,
bodyEncoding: isBase64Encoded ? 'base64' : 'text'
};
};
16 changes: 16 additions & 0 deletions lib/provider/aws/lambda-edge-origin-request/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
const cleanUpEvent = require('./clean-up-event');

const createRequest = require('./create-request');
const formatResponse = require('./format-response');

module.exports = options => {
return getResponse => async (event_, context = {}) => {
const event = cleanUpEvent(event_, options);

const request = createRequest(event, options);
const response = await getResponse(request, event, context);
const formattedResponse = formatResponse(response, options);

return formattedResponse;
};
};
38 changes: 38 additions & 0 deletions lib/provider/aws/lambda-edge-origin-request/is-binary.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
'use strict';

const BINARY_ENCODINGS = ['gzip', 'deflate', 'br'];
const BINARY_CONTENT_TYPES = (process.env.BINARY_CONTENT_TYPES || '').split(',');

function isBinaryEncoding(headers) {
const contentEncoding = headers['content-encoding'];

if (typeof contentEncoding === 'string') {
return contentEncoding.split(',').some(value =>
BINARY_ENCODINGS.some(binaryEncoding => value.indexOf(binaryEncoding) !== -1)
);
}
}

function isBinaryContent(headers, options) {
const contentTypes = [].concat(options.binary
? options.binary
: BINARY_CONTENT_TYPES
).map(candidate =>
new RegExp(`^${candidate.replace(/\*/g, '.*')}$`)
);

const contentType = (headers['content-type'] || '').split(';')[0];
return !!contentType && contentTypes.some(candidate => candidate.test(contentType));
}

module.exports = function isBinary(headers, options) {
if (options.binary === false) {
return false;
}

if (typeof options.binary === 'function') {
return options.binary(headers);
}

return isBinaryEncoding(headers) || isBinaryContent(headers, options);
};
76 changes: 76 additions & 0 deletions lib/provider/aws/lambda-edge-origin-request/origin-request.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
{
"Records": [
{
"cf": {
"config": {
"distributionDomainName": "d123.cloudfront.net",
"distributionId": "EDFDVBD6EXAMPLE",
"eventType": "viewer-request",
"requestId": "MRVMF7KydIvxMWfJIglgwHQwZsbG2IhRJ07sn9AkKUFSHS9EXAMPLE=="
},
"request": {
"body": {
"action": "read-only",
"data": "eyJ1c2VybmFtZSI6IkxhbWJkYUBFZGdlIiwiY29tbWVudCI6IlRoaXMgaXMgcmVxdWVzdCBib2R5In0=",
"encoding": "base64",
"inputTruncated": false
},
"clientIp": "2001:0db8:85a3:0:0:8a2e:0370:7334",
"querystring": "size=large",
"uri": "/picture.jpg",
"method": "GET",
"headers": {
"host": [
{
"key": "Host",
"value": "d111111abcdef8.cloudfront.net"
}
],
"user-agent": [
{
"key": "User-Agent",
"value": "curl/7.51.0"
}
]
},
"origin": {
"custom": {
"customHeaders": {
"my-origin-custom-header": [
{
"key": "My-Origin-Custom-Header",
"value": "Test"
}
]
},
"domainName": "example.com",
"keepaliveTimeout": 5,
"path": "/custom_path",
"port": 443,
"protocol": "https",
"readTimeout": 5,
"sslProtocols": [
"TLSv1",
"TLSv1.1"
]
},
"s3": {
"authMethod": "origin-access-identity",
"customHeaders": {
"my-origin-custom-header": [
{
"key": "My-Origin-Custom-Header",
"value": "Test"
}
]
},
"domainName": "my-bucket.s3.amazonaws.com",
"path": "/s3_path",
"region": "us-east-1"
}
}
}
}
}
]
}
49 changes: 49 additions & 0 deletions lib/provider/aws/lambda-edge-origin-request/sanitize-headers.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
'use strict';

const setCookieVariations = require('./set-cookie.json').variations;
const readOnlyHeaders = [
"memoept-encoding",

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Typo: fixed in the code block I've suggested here

"content-length",
"if-modified-since",
"if-none-Match",
"if-range",
"if-unmodified-since",
"range",
"transfer-encoding",
"via"
];

module.exports = function sanitizeHeaders(headers) {
return Object.keys(headers).reduce((memo, key) => {
const value = headers[key];
const normalizedKey = key.toLowerCase();

if (readOnlyHeaders.includes(normalizedKey)) {
return memo;
}

if (memo[normalizedKey]) {
memo[normalizedKey].push({
key: key,
value: value
});

return memo;
}

if (Array.isArray(value) && normalizedKey === 'set-cookie') {
value.forEach((cookie, i) => {
memo[setCookieVariations[i]] = cookie;
Copy link

@ljwagerfield ljwagerfield Jan 8, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This seems like a bug: if you have an array of 2 cookies, this results in the following headers being set:

"connection": [
    {
        "key": "connection",
        "value": "close"
    }
],
"set-cookie": "cookie1=hello",
"Set-cookie": "cookie2=world",
"foo": [
    {
        "key": "foo",
        "value": "bar"
    }
],

... because you're taking the index of the cookie, and assigning it to the Nth cookie-variant name... so 0 is set-cookie, 1 is Set-cookie, 2 is sEt-cookie, and so forth...


I would recommend:

  1. Delete set-cookie.json.
  2. Change this whole file (sanitize-headers.js) to the code below.
    • Note: as a separate improvement, the code below also omits blacklisted headers, not just read-only.
    • Most importantly, though, is that the new code handles cookie headers correctly and handles array values for other headers correctly too.
'use strict';

// See: https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/lambda-requirements-limits.html#lambda-read-only-headers
const readOnlyHeaders = [
  "accept-encoding",
  "content-length",
  "if-modified-since",
  "if-none-Match",
  "if-range",
  "if-unmodified-since",
  "range",
  "transfer-encoding",
  "via"
];

// See: https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/lambda-requirements-limits.html#lambda-blacklisted-headers
const blacklistedHeaders = [
  "connection",
  "expect",
  "keep-alive",
  "proxy-authenticate",
  "proxy-authorization",
  "proxy-connection",
  "trailer",
  "upgrade",
  "x-accel-buffering",
  "x-accel-charset",
  "x-accel-limit-rate",
  "x-accel-redirect",
  "x-cache",
  "x-forwarded-proto",
  "x-real-ip",
]

const omittedHeaders = [...readOnlyHeaders, ...blacklistedHeaders]

module.exports = function sanitizeHeaders(headers) {
  return Object.keys(headers).reduce((memo, key) => {
    const value = headers[key];
    const normalizedKey = key.toLowerCase();

    if (omittedHeaders.includes(normalizedKey)) {
        return memo;
    }

    if (memo[normalizedKey] === undefined) {
      memo[normalizedKey] = []
    }

    const valueArray = Array.isArray(value) ? value : [value]

    valueArray.forEach(valueElement => {
      memo[normalizedKey].push({
        key: key,
        value: valueElement
      });
    });

    return memo;
  }, {});
};

});

return memo;
}

memo[normalizedKey] = [{
key: key,
value: value
}];

return memo;
}, {});
};
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"variations":["set-cookie","Set-cookie","sEt-cookie","SEt-cookie","seT-cookie","SeT-cookie","sET-cookie","SET-cookie","set-Cookie","Set-Cookie","sEt-Cookie","SEt-Cookie","seT-Cookie","SeT-Cookie","sET-Cookie","SET-Cookie","set-cOokie","Set-cOokie","sEt-cOokie","SEt-cOokie","seT-cOokie","SeT-cOokie","sET-cOokie","SET-cOokie","set-COokie","Set-COokie","sEt-COokie","SEt-COokie","seT-COokie","SeT-COokie","sET-COokie","SET-COokie","set-coOkie","Set-coOkie","sEt-coOkie","SEt-coOkie","seT-coOkie","SeT-coOkie","sET-coOkie","SET-coOkie","set-CoOkie","Set-CoOkie","sEt-CoOkie","SEt-CoOkie","seT-CoOkie","SeT-CoOkie","sET-CoOkie","SET-CoOkie","set-cOOkie","Set-cOOkie","sEt-cOOkie","SEt-cOOkie","seT-cOOkie","SeT-cOOkie","sET-cOOkie","SET-cOOkie","set-COOkie","Set-COOkie","sEt-COOkie","SEt-COOkie","seT-COOkie","SeT-COOkie","sET-COOkie","SET-COOkie","set-cooKie","Set-cooKie","sEt-cooKie","SEt-cooKie","seT-cooKie","SeT-cooKie","sET-cooKie","SET-cooKie","set-CooKie","Set-CooKie","sEt-CooKie","SEt-CooKie","seT-CooKie","SeT-CooKie","sET-CooKie","SET-CooKie","set-cOoKie","Set-cOoKie","sEt-cOoKie","SEt-cOoKie","seT-cOoKie","SeT-cOoKie","sET-cOoKie","SET-cOoKie","set-COoKie","Set-COoKie","sEt-COoKie","SEt-COoKie","seT-COoKie","SeT-COoKie","sET-COoKie","SET-COoKie","set-coOKie","Set-coOKie","sEt-coOKie","SEt-coOKie","seT-coOKie","SeT-coOKie","sET-coOKie","SET-coOKie","set-CoOKie","Set-CoOKie","sEt-CoOKie","SEt-CoOKie","seT-CoOKie","SeT-CoOKie","sET-CoOKie","SET-CoOKie","set-cOOKie","Set-cOOKie","sEt-cOOKie","SEt-cOOKie","seT-cOOKie","SeT-cOOKie","sET-cOOKie","SET-cOOKie","set-COOKie","Set-COOKie","sEt-COOKie","SEt-COOKie","seT-COOKie","SeT-COOKie","sET-COOKie","SET-COOKie","set-cookIe","Set-cookIe","sEt-cookIe","SEt-cookIe","seT-cookIe","SeT-cookIe","sET-cookIe","SET-cookIe","set-CookIe","Set-CookIe","sEt-CookIe","SEt-CookIe","seT-CookIe","SeT-CookIe","sET-CookIe","SET-CookIe","set-cOokIe","Set-cOokIe","sEt-cOokIe","SEt-cOokIe","seT-cOokIe","SeT-cOokIe","sET-cOokIe","SET-cOokIe","set-COokIe","Set-COokIe","sEt-COokIe","SEt-COokIe","seT-COokIe","SeT-COokIe","sET-COokIe","SET-COokIe","set-coOkIe","Set-coOkIe","sEt-coOkIe","SEt-coOkIe","seT-coOkIe","SeT-coOkIe","sET-coOkIe","SET-coOkIe","set-CoOkIe","Set-CoOkIe","sEt-CoOkIe","SEt-CoOkIe","seT-CoOkIe","SeT-CoOkIe","sET-CoOkIe","SET-CoOkIe","set-cOOkIe","Set-cOOkIe","sEt-cOOkIe","SEt-cOOkIe","seT-cOOkIe","SeT-cOOkIe","sET-cOOkIe","SET-cOOkIe","set-COOkIe","Set-COOkIe","sEt-COOkIe","SEt-COOkIe","seT-COOkIe","SeT-COOkIe","sET-COOkIe","SET-COOkIe","set-cooKIe","Set-cooKIe","sEt-cooKIe","SEt-cooKIe","seT-cooKIe","SeT-cooKIe","sET-cooKIe","SET-cooKIe","set-CooKIe","Set-CooKIe","sEt-CooKIe","SEt-CooKIe","seT-CooKIe","SeT-CooKIe","sET-CooKIe","SET-CooKIe","set-cOoKIe","Set-cOoKIe","sEt-cOoKIe","SEt-cOoKIe","seT-cOoKIe","SeT-cOoKIe","sET-cOoKIe","SET-cOoKIe","set-COoKIe","Set-COoKIe","sEt-COoKIe","SEt-COoKIe","seT-COoKIe","SeT-COoKIe","sET-COoKIe","SET-COoKIe","set-coOKIe","Set-coOKIe","sEt-coOKIe","SEt-coOKIe","seT-coOKIe","SeT-coOKIe","sET-coOKIe","SET-coOKIe","set-CoOKIe","Set-CoOKIe","sEt-CoOKIe","SEt-CoOKIe","seT-CoOKIe","SeT-CoOKIe","sET-CoOKIe","SET-CoOKIe","set-cOOKIe","Set-cOOKIe","sEt-cOOKIe","SEt-cOOKIe","seT-cOOKIe","SeT-cOOKIe","sET-cOOKIe","SET-cOOKIe","set-COOKIe","Set-COOKIe","sEt-COOKIe","SEt-COOKIe","seT-COOKIe","SeT-COOKIe","sET-COOKIe","SET-COOKIe","set-cookiE","Set-cookiE","sEt-cookiE","SEt-cookiE","seT-cookiE","SeT-cookiE","sET-cookiE","SET-cookiE","set-CookiE","Set-CookiE","sEt-CookiE","SEt-CookiE","seT-CookiE","SeT-CookiE","sET-CookiE","SET-CookiE","set-cOokiE","Set-cOokiE","sEt-cOokiE","SEt-cOokiE","seT-cOokiE","SeT-cOokiE","sET-cOokiE","SET-cOokiE","set-COokiE","Set-COokiE","sEt-COokiE","SEt-COokiE","seT-COokiE","SeT-COokiE","sET-COokiE","SET-COokiE","set-coOkiE","Set-coOkiE","sEt-coOkiE","SEt-coOkiE","seT-coOkiE","SeT-coOkiE","sET-coOkiE","SET-coOkiE","set-CoOkiE","Set-CoOkiE","sEt-CoOkiE","SEt-CoOkiE","seT-CoOkiE","SeT-CoOkiE","sET-CoOkiE","SET-CoOkiE","set-cOOkiE","Set-cOOkiE","sEt-cOOkiE","SEt-cOOkiE","seT-cOOkiE","SeT-cOOkiE","sET-cOOkiE","SET-cOOkiE","set-COOkiE","Set-COOkiE","sEt-COOkiE","SEt-COOkiE","seT-COOkiE","SeT-COOkiE","sET-COOkiE","SET-COOkiE","set-cooKiE","Set-cooKiE","sEt-cooKiE","SEt-cooKiE","seT-cooKiE","SeT-cooKiE","sET-cooKiE","SET-cooKiE","set-CooKiE","Set-CooKiE","sEt-CooKiE","SEt-CooKiE","seT-CooKiE","SeT-CooKiE","sET-CooKiE","SET-CooKiE","set-cOoKiE","Set-cOoKiE","sEt-cOoKiE","SEt-cOoKiE","seT-cOoKiE","SeT-cOoKiE","sET-cOoKiE","SET-cOoKiE","set-COoKiE","Set-COoKiE","sEt-COoKiE","SEt-COoKiE","seT-COoKiE","SeT-COoKiE","sET-COoKiE","SET-COoKiE","set-coOKiE","Set-coOKiE","sEt-coOKiE","SEt-coOKiE","seT-coOKiE","SeT-coOKiE","sET-coOKiE","SET-coOKiE","set-CoOKiE","Set-CoOKiE","sEt-CoOKiE","SEt-CoOKiE","seT-CoOKiE","SeT-CoOKiE","sET-CoOKiE","SET-CoOKiE","set-cOOKiE","Set-cOOKiE","sEt-cOOKiE","SEt-cOOKiE","seT-cOOKiE","SeT-cOOKiE","sET-cOOKiE","SET-cOOKiE","set-COOKiE","Set-COOKiE","sEt-COOKiE","SEt-COOKiE","seT-COOKiE","SeT-COOKiE","sET-COOKiE","SET-COOKiE","set-cookIE","Set-cookIE","sEt-cookIE","SEt-cookIE","seT-cookIE","SeT-cookIE","sET-cookIE","SET-cookIE","set-CookIE","Set-CookIE","sEt-CookIE","SEt-CookIE","seT-CookIE","SeT-CookIE","sET-CookIE","SET-CookIE","set-cOokIE","Set-cOokIE","sEt-cOokIE","SEt-cOokIE","seT-cOokIE","SeT-cOokIE","sET-cOokIE","SET-cOokIE","set-COokIE","Set-COokIE","sEt-COokIE","SEt-COokIE","seT-COokIE","SeT-COokIE","sET-COokIE","SET-COokIE","set-coOkIE","Set-coOkIE","sEt-coOkIE","SEt-coOkIE","seT-coOkIE","SeT-coOkIE","sET-coOkIE","SET-coOkIE","set-CoOkIE","Set-CoOkIE","sEt-CoOkIE","SEt-CoOkIE","seT-CoOkIE","SeT-CoOkIE","sET-CoOkIE","SET-CoOkIE","set-cOOkIE","Set-cOOkIE","sEt-cOOkIE","SEt-cOOkIE","seT-cOOkIE","SeT-cOOkIE","sET-cOOkIE","SET-cOOkIE","set-COOkIE","Set-COOkIE","sEt-COOkIE","SEt-COOkIE","seT-COOkIE","SeT-COOkIE","sET-COOkIE","SET-COOkIE","set-cooKIE","Set-cooKIE","sEt-cooKIE","SEt-cooKIE","seT-cooKIE","SeT-cooKIE","sET-cooKIE","SET-cooKIE","set-CooKIE","Set-CooKIE","sEt-CooKIE","SEt-CooKIE","seT-CooKIE","SeT-CooKIE","sET-CooKIE","SET-CooKIE","set-cOoKIE","Set-cOoKIE","sEt-cOoKIE","SEt-cOoKIE","seT-cOoKIE","SeT-cOoKIE","sET-cOoKIE","SET-cOoKIE","set-COoKIE","Set-COoKIE","sEt-COoKIE","SEt-COoKIE","seT-COoKIE","SeT-COoKIE","sET-COoKIE","SET-COoKIE","set-coOKIE","Set-coOKIE","sEt-coOKIE","SEt-coOKIE","seT-coOKIE","SeT-coOKIE","sET-coOKIE","SET-coOKIE","set-CoOKIE","Set-CoOKIE","sEt-CoOKIE","SEt-CoOKIE","seT-CoOKIE","SeT-CoOKIE","sET-CoOKIE","SET-CoOKIE","set-cOOKIE","Set-cOOKIE","sEt-cOOKIE","SEt-cOOKIE","seT-cOOKIE","SeT-cOOKIE","sET-cOOKIE","SET-cOOKIE","set-COOKIE","Set-COOKIE","sEt-COOKIE","SEt-COOKIE","seT-COOKIE","SeT-COOKIE","sET-COOKIE","SET-COOKIE"]}
2 changes: 1 addition & 1 deletion test/clean-up-event.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
'use strict';

const cleanUpEvent = require('../lib/provider/aws/clean-up-event.js');
const cleanUpEvent = require('../lib/provider/aws//api-gw/clean-up-event.js');
const expect = require('chai').expect;

describe('clean up event', () => {
Expand Down
2 changes: 1 addition & 1 deletion test/format-response.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
'use strict';

const formatResponse = require('../lib/provider/aws/format-response');
const formatResponse = require('../lib/provider/aws/api-gw/format-response');
const Response = require('../lib/response');
const expect = require('chai').expect;

Expand Down
2 changes: 1 addition & 1 deletion test/is-binary.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
'use strict';

const isBinary = require('../lib/provider/aws/is-binary');
const isBinary = require('../lib/provider/aws/api-gw/is-binary');
const expect = require('chai').expect;
const sinon = require('sinon');

Expand Down