diff --git a/.eslintignore b/.eslintignore index 1083980c..f6fbb297 100644 --- a/.eslintignore +++ b/.eslintignore @@ -4,3 +4,4 @@ coverage/ dist.js scratch/ src/http/get-index +types/ diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 8c946a71..dcfa02b8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -15,7 +15,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - node-version: [ 14.x, 16.x, 18.x ] + node-version: [ 16.x, 18.x, 20.x ] os: [ windows-latest, ubuntu-latest, macOS-latest ] # Go @@ -53,7 +53,7 @@ jobs: - name: Notify uses: sarisia/actions-status-discord@v1 # Only fire alert once - if: github.ref == 'refs/heads/main' && failure() && matrix.node-version == '14.x' && matrix.os == 'ubuntu-latest' + if: github.ref == 'refs/heads/main' && failure() && matrix.node-version == '20.x' && matrix.os == 'ubuntu-latest' with: webhook: ${{ secrets.DISCORD_WEBHOOK }} title: "build and test" diff --git a/_changelog.md b/_changelog.md index 17399cb4..bb6f289a 100644 --- a/_changelog.md +++ b/_changelog.md @@ -6,6 +6,32 @@ Also see: [Architect changelog](https://github.com/architect/architect/blob/main --- +## [8.0.0] 2023-10-17 + +Architect Functions just got a lot faster. Gone are the days of 500-1000ms cold starts due to instantiating the AWS SDK – Functions v8 is now between 2-5x faster, and uses 2-4x less memory, courtesy of [aws-lite](https://aws-lite.org)! + + +### Added + +- `arc.tables()` now includes a new DynamoDB client: `_client`, an instantiation of [`@aws-lite/dynamodb`](https://github.com/architect/aws-lite/tree/main/plugins/dynamodb) + - `_client` is largely functionally similar to the AWS SDK's DocumentClient, but a bit less fiddly (we think) + - `arc.tables()` methods should be functionally the same, including key error properties + + +### Changed + +- Breaking change: AWS SDK v2 + v3 DynamoDB client + DocumentClient instantiation is now opt-in + - Code depending on `data._db` or `data._doc` must now instantiate with the `awsSdkClient` boolean option, like so: `await arc.tables({ awsSdkClient: true })` + - If you only rely on the DocumentClient (`_doc`), you may want to just try using the new [`@aws-lite/dynamodb`](https://aws-lite.org/services/dynamodb)-based `_client` +- Breaking change: while we've taken efforts to ensure the maximum degree of compatibility with AWS SDK v2 and v3 errors, the errors returned in Arc Functions 8.0 (using `aws-lite`) may still vary slightly + - This only really applies if your error handling relies on specific properties or values + - If you just `console.log()` your errors, you will be totally fine, and the quality of the errors you get via `aws-lite` will most likely improve with this change + - Note: if you're an AWS SDK v2 user considering migrating to v3, error incompatibility will apply even more so; v3 errors are incompatible with v2, whereas `aws-lite` errors attempt to be compatible with both SDK v2 + v3 +- Added Node.js 20.x to test matrix +- Breaking change: removed support for Node.js 14.x (now EOL, and no longer available to created in AWS Lambda) + +--- + ## [7.0.0] 2023-07-10 ### Added diff --git a/package.json b/package.json index 5447f340..e2fbd0e7 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@architect/functions", - "version": "7.0.0", + "version": "8.0.0-RC.3", "description": "Runtime utility library for Functional Web Apps (FWAs) built with Architect (https://arc.codes)", "homepage": "https://github.com/architect/functions", "repository": { @@ -15,7 +15,7 @@ "test:unit": "cross-env tape 'test/unit/**/*-test.js' | tap-arc", "test:integration": "cross-env tape 'test/integration/**/*-test.js' | tap-arc", "coverage": "nyc --reporter=lcov --reporter=text npm run test:unit", - "test": "npm run lint && npm run test:integration && npm run coverage", + "test": "npm run lint && npm run test:integration && npm run coverage && npm run test:types", "test:types": "tsd --files types/*.test-d.ts", "rc": "npm version prerelease --preid RC" }, @@ -25,7 +25,13 @@ "author": "Brian LeRoux ", "license": "Apache-2.0", "dependencies": { - "cookie": "^0.5.0", + "@aws-lite/apigatewaymanagementapi": "^0.0.7", + "@aws-lite/client": "^0.16.0", + "@aws-lite/dynamodb": "^0.3.3", + "@aws-lite/sns": "^0.0.4", + "@aws-lite/sqs": "^0.2.0", + "@aws-lite/ssm": "^0.2.2", + "cookie": "^0.6.0", "cookie-signature": "^1.2.1", "csrf": "^3.1.0", "node-webtokens": "^1.0.4", @@ -34,29 +40,25 @@ "uid-safe": "^2.1.5" }, "devDependencies": { - "@architect/asap": "^6.0.3", + "@architect/asap": "^7.0.3", "@architect/eslint-config": "2.1.1", "@architect/req-res-fixtures": "git+https://github.com/architect/req-res-fixtures.git", - "@architect/sandbox": "^5.7.4", - "@aws-sdk/client-apigatewaymanagementapi": "^3.319.0", - "@aws-sdk/client-dynamodb": "^3.319.0", - "@aws-sdk/client-sns": "^3.319.0", - "@aws-sdk/client-sqs": "^3.319.0", - "@aws-sdk/client-ssm": "^3.319.0", - "@aws-sdk/lib-dynamodb": "^3.319.0", - "@aws-sdk/node-http-handler": "^3.347.0", - "@types/aws-lambda": "^8.10.119", + "@architect/sandbox": "^6.0.0", + "@aws-lite/apigatewaymanagementapi-types": "^0.0.9", + "@aws-lite/dynamodb-types": "^0.3.4", + "@aws-lite/sns-types": "^0.0.5", + "@aws-lite/sqs-types": "^0.2.1", + "@types/aws-lambda": "^8.10.133", "@types/node": "18", - "aws-sdk": "^2.1364.0", "cross-env": "~7.0.3", - "eslint": "^8.47.0", + "eslint": "^8.56.0", "nyc": "~15.1.0", "proxyquire": "~2.1.3", - "sinon": "^15.2.0", - "tap-arc": "~1.0.0", - "tape": "^5.6.6", + "sinon": "^17.0.1", + "tap-arc": "^1.2.2", + "tape": "^5.7.4", "tiny-json-http": "^7.5.1", - "tsd": "^0.28.1" + "tsd": "^0.30.4" }, "files": [ "types/*", diff --git a/src/discovery/index.js b/src/discovery/index.js index a2c2bef6..b6f39f91 100644 --- a/src/discovery/index.js +++ b/src/discovery/index.js @@ -1,5 +1,4 @@ -let { isNode18, useAWS } = require('../lib') -let ssm, ssmClient +let { getAwsClient, useAWS } = require('../lib') /** * @param {string} type - events, queues, or tables @@ -7,7 +6,7 @@ let ssm, ssmClient */ module.exports = function lookup (callback) { - let { ARC_APP_NAME: app, ARC_ENV: env, ARC_SANDBOX, ARC_STACK_NAME: stack, AWS_REGION } = process.env + let { ARC_APP_NAME: app, ARC_ENV: env, ARC_SANDBOX, ARC_STACK_NAME: stack } = process.env let local = !useAWS() @@ -19,11 +18,8 @@ module.exports = function lookup (callback) { app = 'arc-app' } - let Path = `/${stack || toLogicalID(`${app}-${env}`)}` - let Recursive = true - let values = [] - let config - + let plugins = [ '@aws-lite/ssm' ] + let config = { plugins } if (local) { let port = 2222 if (ARC_SANDBOX) { @@ -35,70 +31,49 @@ module.exports = function lookup (callback) { } config = { endpoint: `http://localhost:${port}/_arc/ssm`, - region: AWS_REGION || 'us-west-2', + plugins, } } - // shim v2 and v3 - if (!ssmClient) { - if (isNode18) { - let { SSMClient: SSM, GetParametersByPathCommand: cmd } = require('@aws-sdk/client-ssm') - let GetParametersByPathCommand = cmd - ssm = new SSM(config) - ssmClient = (params, callback) => { - let command = new GetParametersByPathCommand(params) - return ssm.send(command, callback) - } - } + getAwsClient(config, (err, client) => { + if (err) callback(err) else { - let SSM = require('aws-sdk/clients/ssm') - ssm = new SSM(config) - ssmClient = (params, callback) => { - return ssm.getParametersByPath(params, callback) - } - } - } - - function getParams (params) { - ssmClient(params, function done (err, result) { - if (err && local && - err.message.includes('Inaccessible host') && - err.message.includes('localhost')) { - let msg = 'Sandbox internal services are unavailable, please ensure Sandbox is running' - callback(ReferenceError(msg)) - } - else if (err) { - callback(err) - } - else if (result.NextToken) { - values = values.concat(result.Parameters) - getParams({ Path, Recursive, NextToken: result.NextToken }) - } - else { - values = values.concat(result.Parameters) - let services = values.reduce((a, b) => { - let hierarchy = b.Name.split('/') - hierarchy.shift() // leading slash - hierarchy.shift() // stack name - let type = hierarchy.shift() // i.e. tables, events, queues, plugins - if (!a[type]) a[type] = {} - let parent = a[type] - let child, lastChild, lastParent - /* eslint-disable-next-line */ - while (child = hierarchy.shift()) { - if (!parent[child]) parent[child] = {} - lastParent = parent - parent = parent[child] - lastChild = child + let Path = `/${stack || toLogicalID(`${app}-${env}`)}` + client.ssm.GetParametersByPath({ Path, Recursive: true, paginate: true }) + .then(result => { + let services = result.Parameters.reduce((a, b) => { + let hierarchy = b.Name.split('/') + hierarchy.shift() // leading slash + hierarchy.shift() // stack name + let type = hierarchy.shift() // i.e. tables, events, queues, plugins + if (!a[type]) a[type] = {} + let parent = a[type] + let child, lastChild, lastParent + /* eslint-disable-next-line */ + while (child = hierarchy.shift()) { + if (!parent[child]) parent[child] = {} + lastParent = parent + parent = parent[child] + lastChild = child + } + lastParent[lastChild] = b.Value + return a + }, {}) + callback(null, services) + }) + .catch(err => { + if (err && local && + err.message.includes('Inaccessible host') && + err.message.includes('localhost')) { + let msg = 'Sandbox internal services are unavailable, please ensure Sandbox is running' + callback(ReferenceError(msg)) } - lastParent[lastChild] = b.Value - return a - }, {}) - callback(null, services) - } - }) - } - getParams({ Path, Recursive }) + else { + callback(err) + } + }) + } + }) } function toLogicalID (str) { diff --git a/src/events/index.js b/src/events/index.js index c949ea89..c3c2e48d 100644 --- a/src/events/index.js +++ b/src/events/index.js @@ -4,26 +4,5 @@ let subFactory = require('./subscribe') module.exports = function eventsAndQueuesFactory (arc, type) { let publish = pubFactory(arc, type) let subscribe = subFactory(type) - return { - /** - * `arc.events|queues.publish` - * publish events and queues - * - * @param {Object} params - * @param {String} params.name - the event name (required) - * @param {Object} params.payload - a json event payload (required) - * @param {Function} callback - a node style errback (optional) - * @returns {Promise} - returned if no callback is supplied - */ - publish, - - /** - * `arc.events|queues.subscribe` - * listen for events and queues - * - * @param {Function} handler - a single event handler function - * @returns {Lambda} - a Lambda function sig - */ - subscribe - } + return { publish, subscribe } } diff --git a/src/events/publish.js b/src/events/publish.js index b8e7e228..53256543 100644 --- a/src/events/publish.js +++ b/src/events/publish.js @@ -1,7 +1,7 @@ let http = require('http') -let { getPorts, isNode18, useAWS } = require('../lib') +let { getAwsClient, getPorts, useAWS } = require('../lib') let ledger = { events: {}, queues: {} } -let sns, snsClient, sqs, sqsClient, port +let client, port /** * Invoke @@ -10,7 +10,6 @@ let sns, snsClient, sqs, sqsClient, port */ module.exports = function publishFactory (arc, type) { let factory = type === 'events' ? eventFactory : queueFactory - let publishAWS = factory(arc) return function publish (params, callback) { if (!params.name) { throw ReferenceError('missing params.name') @@ -44,9 +43,22 @@ module.exports = function publishFactory (arc, type) { } }) } - else { + else if (client) { + let publishAWS = factory(arc) publishAWS(params, callback) } + else { + getAwsClient({ + plugins: [ '@aws-lite/sns', '@aws-lite/sqs' ], + }, (err, _client) => { + if (err) callback(err) + else { + client = _client + let publishAWS = factory(arc) + publishAWS(params, callback) + } + }) + } return promise } @@ -75,25 +87,14 @@ function _publishSandbox (type, params, callback) { function eventFactory (arc) { return function live ({ name, payload }, callback) { - if (!snsClient) { - if (isNode18) { - let { SNS } = require('@aws-sdk/client-sns') - sns = new SNS - } - else { - let SNS = require('aws-sdk/clients/sns') - sns = new SNS - } - } - snsClient = (params, callback) => { - return sns.publish(params, callback) - } function publish (arn, payload, callback) { - snsClient({ + client.sns.Publish({ TopicArn: arn, Message: JSON.stringify(payload) - }, callback) + }) + .then(result => callback(null, result)) + .catch(callback) } function cacheLedgerAndPublish (serviceMap) { @@ -115,42 +116,31 @@ function eventFactory (arc) { function queueFactory (arc) { return function live ({ name, payload, delaySeconds, groupID }, callback) { - if (!sqsClient) { - if (isNode18) { - let { SQS } = require('@aws-sdk/client-sqs') - sqs = new SQS - } - else { - let SQS = require('aws-sdk/clients/sqs') - sqs = new SQS - } - } - sqsClient = (params, callback) => { - return sqs.sendMessage(params, callback) - } - function publish (arn, payload, callback) { + function publish (url, payload, callback) { let params = { - QueueUrl: arn, + QueueUrl: url, DelaySeconds: delaySeconds || 0, MessageBody: JSON.stringify(payload) } - if (arn.endsWith('.fifo')) { + if (url.endsWith('.fifo')) { params.MessageGroupId = groupID || name } - sqsClient(params, callback) + client.sqs.SendMessage(params) + .then(result => callback(null, result)) + .catch(callback) } function cacheLedgerAndPublish (serviceMap) { ledger.queues = serviceMap.queues - arn = ledger.queues[name] - if (!arn) callback(ReferenceError(`${name} queue not found`)) - else publish(arn, payload, callback) + url = ledger.queues[name] + if (!url) callback(ReferenceError(`${name} queue not found`)) + else publish(url, payload, callback) } - let arn = ledger.queues[name] - if (arn) { - publish(arn, payload, callback) + let url = ledger.queues[name] + if (url) { + publish(url, payload, callback) } else { arc.services().then(cacheLedgerAndPublish).catch(callback) diff --git a/src/http/session/providers/ddb/create.js b/src/http/session/providers/ddb/create.js index 8f92f75d..46a62456 100644 --- a/src/http/session/providers/ddb/create.js +++ b/src/http/session/providers/ddb/create.js @@ -1,10 +1,11 @@ let uid = require('uid-safe') let week = require('./_week-from-now') -let { doc: dynamo } = require('../../../../tables/dynamo') let crsf = require('csrf') let parallel = require('run-parallel') module.exports = function _create (name, payload, callback) { + let { tables } = require('../../../../') + parallel([ function _key (callback) { uid(18, function _uid (err, val) { @@ -24,18 +25,12 @@ module.exports = function _create (name, payload, callback) { results.push({ _ttl: week() }) let keys = results.reduce((a, b) => Object.assign(a, b)) let session = Object.assign(payload, keys) - dynamo(function _gotDB (err, db) { + tables({}, (err, data) => { if (err) callback(err) - else { - db.put({ - TableName: name, - Item: session - }, - function _create (err) { - if (err) callback(err) - else callback(null, session) - }) - } + else data._client.PutItem({ + TableName: name, + Item: session + }).then(() => callback(null, session)).catch(callback) }) }) } diff --git a/src/http/session/providers/ddb/find.js b/src/http/session/providers/ddb/find.js index ae69eb3c..0c64f3e6 100644 --- a/src/http/session/providers/ddb/find.js +++ b/src/http/session/providers/ddb/find.js @@ -1,27 +1,24 @@ -let { doc: dynamo } = require('../../../../tables/dynamo') let create = require('./create') module.exports = function _find (name, _idx, callback) { - dynamo(function _gotDB (err, db) { + let { tables } = require('../../../../') + + tables({}, (err, data) => { if (err) callback(err) - else { - db.get({ - TableName: name, - ConsistentRead: true, - Key: { _idx } - }, - function _get (err, data) { - if (err) callback(err) + else data._client.GetItem({ + TableName: name, + ConsistentRead: true, + Key: { _idx } + }) + .then(item => { + let result = typeof item === 'undefined' ? false : item.Item + if (result?._secret) { + callback(null, result) + } else { - let result = typeof data === 'undefined' ? false : data.Item - if (result?._secret) { - callback(null, result) - } - else { - create(name, {}, callback) - } + create(name, {}, callback) } }) - } + .catch(callback) }) } diff --git a/src/http/session/providers/ddb/update.js b/src/http/session/providers/ddb/update.js index f3b2c969..b08930ff 100644 --- a/src/http/session/providers/ddb/update.js +++ b/src/http/session/providers/ddb/update.js @@ -1,20 +1,14 @@ -let { doc: dynamo } = require('../../../../tables/dynamo') let week = require('./_week-from-now') module.exports = function _update (name, payload, callback) { + let { tables } = require('../../../../') let _ttl = week() let session = Object.assign(payload, { _ttl }) - dynamo(function _gotDB (err, db) { + tables({}, (err, data) => { if (err) callback(err) - else { - db.put({ - TableName: name, - Item: session - }, - function _create (err) { - if (err) callback(err) - else callback(null, session) - }) - } + else data._client.PutItem({ + TableName: name, + Item: session + }).then(() => callback(null, session)).catch(callback) }) } diff --git a/src/lib/_get-ports.js b/src/lib/_get-ports.js index f5bfa6b9..bf5906ee 100644 --- a/src/lib/_get-ports.js +++ b/src/lib/_get-ports.js @@ -1,12 +1,17 @@ +let cache module.exports = function getPorts (callback) { let { ARC_SANDBOX } = process.env let notFound = ReferenceError('Sandbox internal port not found') + if (cache) { + return callback(null, cache) + } // Sandbox env var is the happy path for Lambda runs if (ARC_SANDBOX) { let { ports } = JSON.parse(ARC_SANDBOX) if (!ports) { return callback(notFound) } + cache = ports callback(null, ports) } // Fall back to an internal SSM query in case Functions is running as a bare module @@ -21,6 +26,7 @@ module.exports = function getPorts (callback) { return callback(notFound) } let ports = JSON.parse(services.ARC_SANDBOX.ports) + cache = ports callback(null, ports) } }) diff --git a/src/lib/index.js b/src/lib/index.js index 15dc901a..e551e558 100644 --- a/src/lib/index.js +++ b/src/lib/index.js @@ -4,17 +4,37 @@ let sandboxVersionAtLeast = require('./_sandbox-version') let isNode18 = Number(process.version.replace('v', '').split('.')[0]) >= 18 let nonLocalEnvs = [ 'staging', 'production' ] + +function getAwsClient (params, callback) { + let awsLite = require('@aws-lite/client') + params.autoloadPlugins = false + params.region = process.env.AWS_REGION || 'us-west-2' + awsLite(params) + .then(client => callback(null, client)) + .catch(err => { + if (err.message.includes('AWS credentials') && !useAWS()) { + let accessKeyId = 'arc_dummy_access_key' + let secretAccessKey = 'arc_dummy_secret_key' + awsLite({ ...params, accessKeyId, secretAccessKey }) + .then(client => callback(null, client)) + .catch(callback) + } + else callback(err) + }) +} + function useAWS () { let { ARC_ENV, ARC_LOCAL, ARC_SANDBOX } = process.env // Testing is always local if (ARC_ENV === 'testing') return false - // Local, but using AWS resources + // Local, but using !testing environments if (nonLocalEnvs.includes(ARC_ENV) && ARC_SANDBOX && !ARC_LOCAL) return false // Assumed to be AWS return true } module.exports = { + getAwsClient, getPorts, isNode18, sandboxVersionAtLeast, diff --git a/src/static/index.js b/src/static/index.js index 7e57bc87..e25387e2 100644 --- a/src/static/index.js +++ b/src/static/index.js @@ -8,7 +8,7 @@ let { join } = require('path') * In order to keep this method sync, it does not use reflection to get fingerprint status * - Not checking @static fingerprint true (which we used to read from the .arc file) is possibly dangerous, so ensure asset path is valid * - ? TODO: add fingerprint state to env vars in Arc 6 to restore config safety? - * @param {string} path - the path to the asset (eg. /index.js) + * @param {string} asset - the path to the asset (eg. /index.js) * @returns {string} path - the resolved asset path (eg. /_static/index-xxx.js) */ module.exports = function _static (asset, options = {}) { diff --git a/src/tables/dynamo.js b/src/tables/dynamo.js deleted file mode 100644 index adfe2e3b..00000000 --- a/src/tables/dynamo.js +++ /dev/null @@ -1,86 +0,0 @@ -let { getPorts, isNode18, useAWS } = require('../lib') - -/** - * Instantiates Dynamo service interfaces - */ -let db, doc - -function getDynamo (type, callback) { - - if (!type) - throw ReferenceError('Must supply Dynamo service interface type') - - let { AWS_REGION } = process.env - - if (db && type === 'db') { - return callback(null, db) - } - - if (doc && type === 'doc') { - return callback(null, doc) - } - - let DB, Doc - if (isNode18) { - let dynamo = require('@aws-sdk/client-dynamodb') - let docclient = require('@aws-sdk/lib-dynamodb') - DB = dynamo.DynamoDB - Doc = docclient.DynamoDBDocument - } - else { - let dynamo = require('aws-sdk/clients/dynamodb') - DB = dynamo - Doc = dynamo.DocumentClient - } - - if (useAWS()) { - let config - // SDK v2 (Node <=16) does not have keep-alive enabled by default, whereas v3 (>=18) does - if (!isNode18) { - let https = require('https') - config = { - httpOptions: { - agent: new https.Agent({ - keepAlive: true, - maxSockets: 50, // Node can set to Infinity; AWS maxes at 50 - rejectUnauthorized: true, - }) - } - } - } - db = new DB(config) - doc = isNode18 ? Doc.from(db) : new Doc(config) - return callback(null, type === 'db' ? db : doc) - } - else { - getPorts((err, ports) => { - if (err) callback(err) - else { - let port = ports.tables - if (!port) { - return callback(ReferenceError('Sandbox tables port not found')) - } - let config = { - endpoint: `http://localhost:${port}`, - region: AWS_REGION || 'us-west-2' // Do not assume region is set! - } - if (isNode18) { - // Disable keep-alive locally (or wait Node's default 5s for sockets to time out) - let http = require('http') - let { NodeHttpHandler } = require('@aws-sdk/node-http-handler') - config.requestHandler = new NodeHttpHandler({ - httpAgent: new http.Agent({ keepAlive: false }) - }) - } - db = new DB(config) - doc = isNode18 ? Doc.from(db) : new Doc(config) - return callback(null, type === 'db' ? db : doc) - } - }) - } -} - -module.exports = { - doc: getDynamo.bind({}, 'doc'), - db: getDynamo.bind({}, 'db'), -} diff --git a/src/tables/factory.js b/src/tables/factory.js index fc235d14..39f04680 100644 --- a/src/tables/factory.js +++ b/src/tables/factory.js @@ -1,124 +1,133 @@ -let dynamo = require('./dynamo') -let parallel = require('run-parallel') +let { getAwsClient, getPorts, useAWS } = require('../lib') +let getAwsSdkClient = require('./legacy') +let enumerable = false +let paginate = true /** * returns a data client */ -module.exports = function reflectFactory (tables, callback) { - let local = process.env.ARC_ENV === 'testing' - - parallel(dynamo, function done (err, { db, doc }) { - if (err) return callback(err) - - let data = Object.keys(tables) - .filter(name => { - if (local && !name.includes('-production-')) return name - return name - }) - .reduce((client, fullName) => { - let name = local ? fullName.replace(/.+-staging-/, '') : fullName - client[name] = factory(tables[name]) - return client - }, {}) - - let enumerable = false +module.exports = function factory ({ services, options = {} }, callback) { + let { tables } = services + let { ARC_ENV, AWS_REGION } = process.env + let local = ARC_ENV === 'testing' + let region = AWS_REGION || 'us-west-2' + let plugins = [ '@aws-lite/dynamodb' ] + + if (useAWS()) { + getAwsClient({ region, plugins }, (err, aws) => { + if (err) callback(err) + else dynamoConstructor({ aws, local, options, tables }, callback) + }) + } + else { + getPorts((err, ports) => { + if (err) callback(err) + else { + let port = ports.tables + if (!port) { + return callback(ReferenceError('Sandbox tables port not found')) + } + let config = { + endpoint: `http://localhost:${port}`, + region, + plugins, + } + getAwsClient(config, (err, aws) => { + if (err) callback(err) + else dynamoConstructor({ aws, local, options, tables }, callback) + }) + } + }) + } +} + +function dynamoConstructor (params, callback) { + let { aws, local, options, tables } = params + let data = Object.keys(tables) + .filter(name => { + if (local && !name.includes('-production-')) return name + return name + }) + .reduce((client, fullName) => { + let name = local ? fullName.replace(/.+-staging-/, '') : fullName + client[name] = factory(tables[name]) + return client + }, {}) + + data.reflect = async () => tables + let _name = name => tables[name] + data.name = _name + data._name = _name + + Object.defineProperty(data, '_client', { enumerable, value: aws.dynamodb }) + + if (options.awsSdkClient) { + let { db, doc } = getAwsSdkClient(params) Object.defineProperty(data, '_db', { enumerable, value: db }) Object.defineProperty(data, '_doc', { enumerable, value: doc }) + } - // async jic for later - // eslint-disable-next-line - data.reflect = async () => tables - - let _name = name => tables[name] - data.name = _name - data._name = _name - - function factory (TableName) { - return promisify({ - delete (key, callback) { - let params = {} - params.TableName = TableName - params.Key = key - doc.delete(params, callback) - }, - get (key, callback) { - let params = {} - params.TableName = TableName - params.Key = key - doc.get(params, function _get (err, result) { - if (err) callback(err) - else callback(null, result.Item) - }) - }, - put (item, callback) { - let params = {} - params.TableName = TableName - params.Item = item - doc.put(params, function _put (err) { - if (err) callback(err) - else callback(null, item) - }) - }, - query (params, callback) { - params.TableName = TableName - doc.query(params, callback) - }, - scan (params = {}, callback) { - params.TableName = TableName - doc.scan(params, callback) - }, - scanAll (params = {}, callback) { - let records = [] - params.TableName = TableName - function getRecords () { - db.scan(params, (err, data) => { - if (err) callback(err) - else { - data.Items.forEach(d => records.push(d)) - if (data.LastEvaluatedKey) { - params.ExclusiveStartKey = data.LastEvaluatedKey - getRecords() - } - else { - callback(null, records) - } - } - }) - } - getRecords() - }, - update (params, callback) { - params.TableName = TableName - doc.update(params, callback) - } - }) - } + function go (method, params, callback) { + if (callback) method(params) + .then(result => callback(null, result)) + .catch(err => callback(err)) + else return method(params) + } - callback(null, data) - }) -} + function factory (TableName) { + return { + delete (Key, callback) { + if (callback) aws.dynamodb.DeleteItem({ TableName, Key }) + .then(result => callback(null, result)) + .catch(err => callback(err)) -// accepts an object and promisifies all keys -function promisify (obj) { - let copy = {} - Object.keys(obj).forEach(k => { - copy[k] = promised(obj[k]) - }) - return copy -} + else return new Promise((res, rej) => { + aws.dynamodb.DeleteItem({ TableName, Key }) + .then(result => res(result)) + .catch(rej) + }) + }, -// Accepts an errback style fn and returns a promisified fn -function promised (fn) { - return function _promisified (params, callback) { - if (!callback) { - return new Promise(function (res, rej) { - fn(params, function (err, result) { - err ? rej(err) : res(result) + get (Key, callback) { + if (callback) aws.dynamodb.GetItem({ TableName, Key }) + .then(({ Item }) => callback(null, Item)) + .catch(err => callback(err)) + + else return new Promise((res, rej) => { + aws.dynamodb.GetItem({ TableName, Key }) + .then(({ Item }) => res(Item)) + .catch(rej) }) - }) - } - else { - fn(params, callback) + }, + + put (Item, callback) { + return go(aws.dynamodb.PutItem, { TableName, Item }, callback) + }, + + query (params = {}, callback) { + return go(aws.dynamodb.Query, { ...params, TableName }, callback) + }, + + scan (params = {}, callback) { + return go(aws.dynamodb.Scan, { ...params, TableName }, callback) + }, + + scanAll (params = {}, callback) { + if (callback) aws.dynamodb.Scan({ ...params, TableName, paginate }) + .then(({ Items }) => callback(null, Items)) + .catch(err => callback(err)) + + else return new Promise((res, rej) => { + aws.dynamodb.Scan({ ...params, TableName, paginate }) + .then(({ Items }) => res(Items)) + .catch(rej) + }) + }, + + update (params, callback) { + return go(aws.dynamodb.UpdateItem, { ...params, TableName }, callback) + } } } + callback(null, data) } diff --git a/src/tables/index.js b/src/tables/index.js index ca422380..38d90979 100644 --- a/src/tables/index.js +++ b/src/tables/index.js @@ -18,7 +18,7 @@ let client = false */ module.exports = function tables (arc) { - function api (callback) { + function api (options = {}, callback) { let promise if (!callback) { promise = new Promise(function ugh (res, rej) { @@ -36,12 +36,8 @@ module.exports = function tables (arc) { waterfall([ function (callback) { arc.services() - .then(serviceMap => { - callback(null, serviceMap.tables) - }) - .catch(err => { - callback(err) - }) + .then(services => callback(null, { services, options })) + .catch(callback) }, factory, function (created, callback) { diff --git a/src/tables/legacy.js b/src/tables/legacy.js new file mode 100644 index 00000000..a115185e --- /dev/null +++ b/src/tables/legacy.js @@ -0,0 +1,66 @@ +let { isNode18, useAWS } = require('../lib') + +let client = {} + +/** + * Instantiates legacy AWS SDK DynamoDB service interfaces + */ +module.exports = function getLegacyDynamoClients ({ port, region }) { + + if (client.db && client.doc) return client + + let DB, Doc + + if (isNode18) { + // eslint-disable-next-line + let dynamo = require('@aws-sdk/client-dynamodb') + // eslint-disable-next-line + let docclient = require('@aws-sdk/lib-dynamodb') + DB = dynamo.DynamoDB + Doc = docclient.DynamoDBDocument + } + else { + // eslint-disable-next-line + let dynamo = require('aws-sdk/clients/dynamodb') + DB = dynamo + Doc = dynamo.DocumentClient + } + + if (useAWS()) { + let config + // SDK v2 (Node <=16) does not have keep-alive enabled by default, whereas v3 (>=18) does + if (!isNode18) { + let https = require('https') + config = { + httpOptions: { + agent: new https.Agent({ + keepAlive: true, + maxSockets: 50, // Node can set to Infinity; AWS maxes at 50 + rejectUnauthorized: true, + }) + } + } + } + client.db = new DB(config) + client.doc = isNode18 ? Doc.from(client.db) : new Doc(config) + return client + } + else { + let config = { + endpoint: `http://localhost:${port}`, + region, + } + if (isNode18) { + // Disable keep-alive locally (or wait Node's default 5s for sockets to time out) + let http = require('http') + // eslint-disable-next-line + let { NodeHttpHandler } = require('@smithy/node-http-handler') + config.requestHandler = new NodeHttpHandler({ + httpAgent: new http.Agent({ keepAlive: false }) + }) + } + client.db = new DB(config) + client.doc = isNode18 ? Doc.from(client.db) : new Doc(config) + return client + } +} diff --git a/src/ws/index.js b/src/ws/index.js index d018a21f..80be0b52 100644 --- a/src/ws/index.js +++ b/src/ws/index.js @@ -1,139 +1,128 @@ -let { isNode18, useAWS } = require('../lib') -let _api, _send, _close, _info +let { getAwsClient, useAWS } = require('../lib') +let client, ApiUrl function instantiateAPI () { - if (_api) return + return new Promise((res, rej) => { + if (client) res(client) - let { ARC_WSS_URL, AWS_REGION, ARC_SANDBOX } = process.env + getAwsClient({ + plugins: [ '@aws-lite/apigatewaymanagementapi' ] + }, (err, _client) => { + if (err) rej(err) + else { + client = _client + let { ARC_WSS_URL, ARC_SANDBOX } = process.env - if (isNode18) { - var { - ApiGatewayManagementApi, - PostToConnectionCommand, - DeleteConnectionCommand, - GetConnectionCommand - } = require('@aws-sdk/client-apigatewaymanagementapi') - } - else { - var ApiGatewayManagementApi = require('aws-sdk/clients/apigatewaymanagementapi') - } - - if (useAWS()) { - _api = new ApiGatewayManagementApi({ - apiVersion: '2018-11-29', - endpoint: `${ARC_WSS_URL.replace(/^ws/, 'http')}`, - }) - } - else { - let { ports } = JSON.parse(ARC_SANDBOX) - let port = ports._arc - if (!port) - throw ReferenceError('Architect internal port not found') - _api = new ApiGatewayManagementApi({ - apiVersion: '2018-11-29', - endpoint: `http://localhost:${port}/_arc/ws`, - region: AWS_REGION || 'us-west-2', + if (useAWS()) { + ApiUrl = ARC_WSS_URL + } + else { + let { ports } = JSON.parse(ARC_SANDBOX) + let port = ports._arc + if (!port) throw ReferenceError('Architect internal port not found') + ApiUrl = `http://localhost:${port}/_arc/ws` + } + res(client) + } }) - } - - - /** idk.. **/ - _send = (params, callback) => { - if (isNode18) { - let cmd = new PostToConnectionCommand(params) - return _api.send(cmd, callback) - } - else { - return callback ? _api.postToConnection(params, callback) : _api.postToConnection(params).promise() - } - } + }) +} - /** idk.. **/ - _close = (params, callback) => { - if (isNode18) { - let cmd = new DeleteConnectionCommand(params) - return _api.send(cmd, callback) - } - else { - return callback ? _api.deleteConnection(params, callback) : _api.deleteConnection(params).promise() - } - } +function _api (callback) { + if (callback) instantiateAPI() + .then(client => callback(null, client.ApiGatewayManagementApi)) + .catch(callback) - /** idk.. **/ - _info = (params, callback) => { - if (isNode18) { - let cmd = new GetConnectionCommand(params) - return _api.send(cmd, callback) - } - else { - return callback ? _api.getConnection(params, callback) : _api.getConnection(params).promise() - } - } + else return new Promise((res, rej) => { + instantiateAPI() + .then(client => res(client.ApiGatewayManagementApi)) + .catch(rej) + }) } -/** - * arc.ws.send - * - * publish web socket events - * - * @param {Object} params - * @param {String} params.id - the ws connection id (required) - * @param {Object} params.payload - an event payload (required) - * @param {Function} callback - a node style errback (optional) - * @returns {Promise} - returned if no callback is supplied - */ function send ({ id, payload }, callback) { - instantiateAPI() - return _send({ - ConnectionId: id, - Data: JSON.stringify(payload) - }, callback) + if (callback) instantiateAPI() + .then(client => { + client.ApiGatewayManagementApi.PostToConnection({ + ApiUrl, + ConnectionId: id, + Data: payload, + }) + .then(result => callback(null, result)) + .catch(callback) + }) + .catch(callback) + + else return new Promise((res, rej) => { + instantiateAPI() + .then(client => { + client.ApiGatewayManagementApi.PostToConnection({ + ApiUrl, + ConnectionId: id, + Data: payload, + }) + .then(result => res(result)) + .catch(rej) + }) + .catch(rej) + }) } -/** - * arc.ws.close - * - * publish web socket events - * - * @param {Object} params - * @param {String} params.id - the ws connection id (required) - * @param {Function} callback - a node style errback (optional) - * @returns {Promise} - returned if no callback is supplied - */ function close ({ id }, callback) { - instantiateAPI() - return _close({ - ConnectionId: id, - }, callback) + if (callback) instantiateAPI() + .then(client => { + client.ApiGatewayManagementApi.DeleteConnection({ + ApiUrl, + ConnectionId: id, + }) + .then(result => callback(null, result)) + .catch(callback) + }) + .catch(callback) + + else return new Promise((res, rej) => { + instantiateAPI() + .then(client => { + client.ApiGatewayManagementApi.DeleteConnection({ + ApiUrl, + ConnectionId: id, + }) + .then(result => res(result)) + .catch(rej) + }) + .catch(rej) + }) } -/** - * arc.ws.info - * - * publish web socket events - * - * @param {Object} params - * @param {String} params.id - the ws connection id (required) - * @param {Function} callback - a node style errback (optional) - * @returns {Promise} - returned if no callback is supplied - */ function info ({ id }, callback) { - instantiateAPI() - return _info({ - ConnectionId: id, - }, callback) + if (callback) instantiateAPI() + .then(client => { + client.ApiGatewayManagementApi.GetConnection({ + ApiUrl, + ConnectionId: id, + }) + .then(result => callback(null, result)) + .catch(callback) + }) + .catch(callback) + + else return new Promise((res, rej) => { + instantiateAPI() + .then(client => { + client.ApiGatewayManagementApi.GetConnection({ + ApiUrl, + ConnectionId: id, + }) + .then(result => res(result)) + .catch(rej) + }) + .catch(rej) + }) } module.exports = { + _api, send, close, info, } - -Object.defineProperty(module.exports, '_api', { - enumerable: true, - get () { - instantiateAPI() - return _api - } -}) diff --git a/test/integration/discovery-test.js b/test/integration/discovery-test.js new file mode 100644 index 00000000..9fe08858 --- /dev/null +++ b/test/integration/discovery-test.js @@ -0,0 +1,32 @@ +let { join } = require('path') +let test = require('tape') +let sandbox = require('@architect/sandbox') +let cwd = process.cwd() +let mock = join(cwd, 'test', 'mock', 'project') +let discovery = require('../../src/discovery') + +test('Set up env', async t => { + t.plan(1) + await sandbox.start({ cwd: mock, quiet: true }) + t.pass('Sandbox started') +}) + +test('discovery should parse hierarchical SSM parameters into a service map object', t => { + t.plan(6) + discovery((err, services) => { + t.notOk(err, 'No error passed to callback') + t.equal(services.tables['arc-sessions'], 'test-only-staging-arc-sessions', 'Table value set up in correct place of service map') + t.equal(services.tables.things, 'test-only-staging-things', 'Table value set up in correct place of service map') + + // Check deeper depths + t.ok(services.services.cloudwatch.metrics, 'Deeply nested object exists') + t.equal(services.services.cloudwatch.metrics.foo, 'bar', 'variable has correct value') + t.ok(services.services.cloudwatch.metrics.fiz, 'buz', 'variable has correct value') + }) +}) + +test('Teardown', async t => { + t.plan(1) + await sandbox.end() + t.pass('Sandbox ended') +}) diff --git a/test/integration/tables-test.js b/test/integration/tables-test.js index d9978c41..ed6bd0f2 100644 --- a/test/integration/tables-test.js +++ b/test/integration/tables-test.js @@ -11,10 +11,9 @@ let mock = join(__dirname, '..', 'mock') let tmp = join(mock, 'tmp') let shared = join(tmp, 'node_modules', '@architect', 'shared') - test('Set up mocked files', t => { t.plan(3) - process.env.ARC_APP_NAME = 'test' + process.env.ARC_APP_NAME = 'test-app-name' mkdir(shared, { recursive: true }) copyFileSync(join(mock, 'mock-arc'), join(shared, '.arc')) copyFileSync(join(mock, 'mock-arc'), join(tmp, '.arc')) @@ -89,7 +88,6 @@ test('tables get()', async t => { t.ok(result, 'got accounts table result') t.ok(result.baz.doe, 'result.baz.doe deserialized') result = null - console.log(data['accounts-messages'].get) result = await data['accounts-messages'].get({ accountID: 'fake', msgID: 'alsofake' @@ -107,7 +105,7 @@ test('tables delete()', async t => { let result = await data.accounts.get({ accountID: 'fake' }) - t.equals(result, undefined, 'could not get deleted accounts item') + t.equal(result, undefined, 'could not get deleted accounts item') await data['accounts-messages'].delete({ accountID: 'fake', msgID: 'alsofake' @@ -117,7 +115,7 @@ test('tables delete()', async t => { accountID: 'fake', msgID: 'alsofake' }) - t.equals(otherResult, undefined, 'could not get deleted accounts-messages item') + t.equal(otherResult, undefined, 'could not get deleted accounts-messages item') }) test('tables query()', async t => { @@ -138,7 +136,7 @@ test('tables query()', async t => { }) t.ok(result, 'got a result') - t.equals(result.Count, 1, 'got count of one') + t.equal(result.Count, 1, 'got count of one') }) test('tables scan()', async t => { @@ -181,7 +179,7 @@ test('tables update()', async t => { }) t.ok(result, 'got result') - t.equals(result.hits, 20, 'property updated') + t.equal(result.hits, 20, 'property updated') }) test('server closes', t => { diff --git a/test/integration/ws-test.js b/test/integration/ws-test.js index 4e071357..841e48fb 100644 --- a/test/integration/ws-test.js +++ b/test/integration/ws-test.js @@ -20,11 +20,20 @@ test('Connect, get message, send message, get message, send disconnect, be disco await new Promise(resolve => ws.once('open', resolve)) ws.send(JSON.stringify({ message: 'hi' })) + let infoMessage = await new Promise(resolve => ws.once('message', data => resolve(JSON.parse(data.toString('utf8'))))) + t.equal(infoMessage.message, 'hi back') t.equal(typeof infoMessage.info.ConnectedAt, 'string') + ws.send(JSON.stringify({ message: 'disconnect me' })) + await new Promise(resolve => ws.once('close', resolve)) + + // At this point, it may be normal to see Sandbox errors in the console, like 'WebSocket is not open: readyState 3 (CLOSED)' + // At this point in the test the @ws disconnect Lambda is just firing up, but we're about to shut down Sandbox, thereby creating a Lambda execution race condition + // We'll have to fix that at some point in the future by ensuring Sandbox shuts down invocations before terminating + t.pass('Disconnected') }) diff --git a/test/mock/project/app.arc b/test/mock/project/app.arc index c1eefdad..c3a456a0 100644 --- a/test/mock/project/app.arc +++ b/test/mock/project/app.arc @@ -1,6 +1,9 @@ @app test-only +@plugins +services + @http get /http-session get /http-async-session @@ -22,3 +25,5 @@ async-queue arc-sessions _idx *String _ttl TTL +things + thing *String diff --git a/test/mock/project/src/plugins/services.js b/test/mock/project/src/plugins/services.js new file mode 100644 index 00000000..4b22ad6f --- /dev/null +++ b/test/mock/project/src/plugins/services.js @@ -0,0 +1,15 @@ +module.exports = { + deploy: { + services: async () => { + return { + tables: { + stuff: 'so-very-much-stuff' + }, + 'cloudwatch/metrics': { + foo: 'bar', + fiz: 'buz', + } + } + } + } +} diff --git a/test/unit/src/discovery/index-test.js b/test/unit/src/discovery/index-test.js deleted file mode 100644 index a537bc30..00000000 --- a/test/unit/src/discovery/index-test.js +++ /dev/null @@ -1,100 +0,0 @@ -let { isNode18 } = require('../../../../src/lib') -let test = require('tape') -let proxyquire = require('proxyquire') - -let err, page1, page2, ssmCounter = 0 -let ssm = class { - constructor () { - this.getParametersByPath = (params, callback) => { - let result = ssmCounter === 0 ? page1 : page2 - ssmCounter++ - callback(err, result) - } - } -} -let discovery = proxyquire('../../../../src/discovery', { - 'aws-sdk/clients/ssm': ssm -}) -let reset = () => { - err = page1 = page2 = undefined - ssmCounter = 0 -} - -if (!isNode18) { - test('Set up env', t => { - t.plan(1) - process.env.ARC_APP_NAME = 'test' - t.pass('Set up ARC_APP_NAME env var') - }) - - test('discovery should callback with error if SSM errors', t => { - t.plan(1) - err = true - discovery(err => { - t.ok(err, 'error passed into discovery callback') - reset() - }) - }) - - test('discovery should parse hierarchical SSM parameters into a service map object', t => { - t.plan(3) - page1 = { - Parameters: [ - { Name: '/app/tables/cats', Value: 'tableofcats' }, - { Name: '/app/events/walkthedog', Value: 'timetowalkthedog' } - ] - } - discovery((err, services) => { - t.notOk(err, 'no error passed to callback') - t.equals(services.tables.cats, 'tableofcats', 'cat table value set up in correct place of service map') - t.equals(services.events.walkthedog, 'timetowalkthedog', 'dogwalking event value set up in correct place of service map') - reset() - }) - }) - - test('discovery should parse hierarchical SSM parameters, even ones of different depths, into a service map object', t => { - t.plan(6) - page1 = { - Parameters: [ - { Name: '/app/tables/cats', Value: 'tableofcats' }, - { Name: '/app/cloudwatch/metrics/catbarf', Value: 'somuchbarf' }, - { Name: '/app/cloudwatch/metrics/chill', Value: 'quite' } - ] - } - discovery((err, services) => { - t.notOk(err, 'no error passed to callback') - t.equals(services.tables.cats, 'tableofcats', 'cat table value set up in correct place of service map') - t.ok(services.cloudwatch, 'cloudwatch object exists') - t.ok(services.cloudwatch.metrics, 'cloudwatch.metrics object exists') - t.equals(services.cloudwatch.metrics.catbarf, 'somuchbarf', 'cloudwatch.metrics.catbarf variable has correct value') - t.ok(services.cloudwatch.metrics.chill, 'quite', 'cloudwatch.metrics.child variable has correct value') - reset() - }) - }) - - test('discovery should parse several pages of hierarchical SSM parameters into a service map object', t => { - t.plan(5) - page1 = { NextToken: 'yes', Parameters: [ - { Name: '/app/tables/cats', Value: 'tableofcats' }, - { Name: '/app/events/walkthedog', Value: 'timetowalkthedog' } - ] } - page2 = { NextToken: null, Parameters: [ - { Name: '/app/queues/breadline', Value: 'favouritebakery' }, - { Name: '/app/tables/ofcontents', Value: 'chapters' } - ] } - discovery((err, services) => { - t.notOk(err, 'no error passed to callback') - t.equals(services.tables.cats, 'tableofcats', 'cat table value set up in correct place of service map') - t.equals(services.events.walkthedog, 'timetowalkthedog', 'dogwalking event value set up in correct place of service map') - t.equals(services.tables.ofcontents, 'chapters', 'ofcontents table value set up in correct place of service map') - t.equals(services.queues.breadline, 'favouritebakery', 'breadline queue value set up in correct place of service map') - reset() - }) - }) - - test('Teardown', t => { - t.plan(1) - delete process.env.ARC_APP_NAME - t.pass('Done!') - }) -} diff --git a/test/unit/src/tables/dynamo-test.js b/test/unit/src/tables/dynamo-test.js index e65ef405..7e71bc37 100644 --- a/test/unit/src/tables/dynamo-test.js +++ b/test/unit/src/tables/dynamo-test.js @@ -1,4 +1,4 @@ -let { isNode18 } = require('../../../../src/lib') +/* let test = require('tape') let file = '../../../../src/tables/dynamo' let dynamo @@ -18,149 +18,148 @@ function reset (t) { if (dynamo) t.fail('Did not unset module') } -if (!isNode18) { - test('Set up env', t => { - t.plan(2) - process.env.ARC_ENV = 'testing' - process.env.ARC_SANDBOX = JSON.stringify({ ports: { tables: 5555 } }) +test('Set up env', t => { + t.plan(2) + process.env.ARC_ENV = 'testing' + process.env.ARC_SANDBOX = JSON.stringify({ ports: { tables: 5555 } }) - // eslint-disable-next-line - dynamo = require(file) + // eslint-disable-next-line + dynamo = require(file) - // DB x callback - dynamo.db((err, db) => { - if (err) t.fail(err) - t.ok(db, 'Got DynamoDB object (callback)') - }) + // DB x callback + dynamo.db((err, db) => { + if (err) t.fail(err) + t.ok(db, 'Got DynamoDB object (callback)') + }) - // Doc x callback - dynamo.doc((err, doc) => { - if (err) t.fail(err) - t.ok(doc, 'Got DynamoDB document object (callback)') - }) + // Doc x callback + dynamo.doc((err, doc) => { + if (err) t.fail(err) + t.ok(doc, 'Got DynamoDB document object (callback)') + }) - reset(t) + reset(t) +}) + +test('Local port + region configuration', t => { + t.plan(20) + + process.env.ARC_ENV = 'testing' + process.env.ARC_SANDBOX = JSON.stringify({ ports: { tables: 5555 } }) + let localhost = 'localhost' + let defaultPort = 5555 + let defaultRegion = 'us-west-2' + let host = `${localhost}:${defaultPort}` + + // eslint-disable-next-line + dynamo = require(file) + + // DB x callback + dynamo.db(async (err, db) => { + if (err) t.fail(err) + t.equal(db.endpoint.host, host, `DB configured 'host' property is ${host}`) + t.equal(db.endpoint.hostname, localhost, `DB configured 'hostname' property is ${localhost}`) + t.equal(db.endpoint.href, `http://${host}/`, `DB configured 'href' property is http://${host}/`) + t.equal(db.endpoint.port, defaultPort, `DB configured 'port' property is ${defaultPort}`) + t.equal(db.config.region, defaultRegion, `DB configured 'region' property is ${defaultRegion}`) }) - test('Local port + region configuration', t => { - t.plan(20) + // Doc x callback + // For whatever mysterious reason(s), docs configure their endpoint under doc.service.endpoint, not doc.endpoint + dynamo.doc((err, doc) => { + if (err) t.fail(err) + t.equal(doc.service.endpoint.host, host, `Doc configured 'host' property is ${host}`) + t.equal(doc.service.endpoint.hostname, localhost, `Doc configured 'hostname' property is ${localhost}`) + t.equal(doc.service.endpoint.href, `http://${host}/`, `Doc configured 'href' property is http://${host}/`) + t.equal(doc.service.endpoint.port, defaultPort, `Doc configured 'port' property is ${defaultPort}`) + t.equal(doc.service.config.region, defaultRegion, `Doc configured 'region' property is ${defaultRegion}`) + }) - process.env.ARC_ENV = 'testing' - process.env.ARC_SANDBOX = JSON.stringify({ ports: { tables: 5555 } }) - let localhost = 'localhost' - let defaultPort = 5555 - let defaultRegion = 'us-west-2' - let host = `${localhost}:${defaultPort}` + reset(t) - // eslint-disable-next-line - dynamo = require(file) + let customPort = 5666 + let customRegion = 'us-east-1' + process.env.ARC_ENV = 'testing' + process.env.ARC_SANDBOX = JSON.stringify({ ports: { tables: customPort } }) + process.env.AWS_REGION = customRegion + host = `${localhost}:${customPort}` - // DB x callback - dynamo.db(async (err, db) => { - if (err) t.fail(err) - t.equal(db.endpoint.host, host, `DB configured 'host' property is ${host}`) - t.equal(db.endpoint.hostname, localhost, `DB configured 'hostname' property is ${localhost}`) - t.equal(db.endpoint.href, `http://${host}/`, `DB configured 'href' property is http://${host}/`) - t.equal(db.endpoint.port, defaultPort, `DB configured 'port' property is ${defaultPort}`) - t.equal(db.config.region, defaultRegion, `DB configured 'region' property is ${defaultRegion}`) - }) - - // Doc x callback - // For whatever mysterious reason(s), docs configure their endpoint under doc.service.endpoint, not doc.endpoint - dynamo.doc((err, doc) => { - if (err) t.fail(err) - t.equal(doc.service.endpoint.host, host, `Doc configured 'host' property is ${host}`) - t.equal(doc.service.endpoint.hostname, localhost, `Doc configured 'hostname' property is ${localhost}`) - t.equal(doc.service.endpoint.href, `http://${host}/`, `Doc configured 'href' property is http://${host}/`) - t.equal(doc.service.endpoint.port, defaultPort, `Doc configured 'port' property is ${defaultPort}`) - t.equal(doc.service.config.region, defaultRegion, `Doc configured 'region' property is ${defaultRegion}`) - }) - - reset(t) - - let customPort = 5666 - let customRegion = 'us-east-1' - process.env.ARC_ENV = 'testing' - process.env.ARC_SANDBOX = JSON.stringify({ ports: { tables: customPort } }) - process.env.AWS_REGION = customRegion - host = `${localhost}:${customPort}` - - // eslint-disable-next-line + // eslint-disable-next-line dynamo = require(file) - // DB x callback - dynamo.db((err, db) => { - if (err) t.fail(err) - t.equal(db.endpoint.host, host, `DB configured 'host' property is ${host}`) - t.equal(db.endpoint.hostname, localhost, `DB configured 'hostname' property is ${localhost}`) - t.equal(db.endpoint.href, `http://${host}/`, `DB configured 'href' property is http://${host}/`) - t.equal(db.endpoint.port, customPort, `DB configured 'port' property is ${customPort}`) - t.equal(db.config.region, customRegion, `DB configured 'region' property is ${customRegion}`) - }) - - // Doc x callback - // For whatever mysterious reason(s), docs configure their endpoint under doc.service.endpoint, not doc.endpoint - dynamo.doc((err, doc) => { - if (err) t.fail(err) - t.equal(doc.service.endpoint.host, host, `Doc configured 'host' property is ${host}`) - t.equal(doc.service.endpoint.hostname, localhost, `Doc configured 'hostname' property is ${localhost}`) - t.equal(doc.service.endpoint.href, `http://${host}/`, `Doc configured 'href' property is http://${host}/`) - t.equal(doc.service.endpoint.port, customPort, `Doc configured 'port' property is ${customPort}`) - t.equal(doc.service.config.region, customRegion, `Doc configured 'region' property is ${customRegion}`) - }) - - reset(t) + // DB x callback + dynamo.db((err, db) => { + if (err) t.fail(err) + t.equal(db.endpoint.host, host, `DB configured 'host' property is ${host}`) + t.equal(db.endpoint.hostname, localhost, `DB configured 'hostname' property is ${localhost}`) + t.equal(db.endpoint.href, `http://${host}/`, `DB configured 'href' property is http://${host}/`) + t.equal(db.endpoint.port, customPort, `DB configured 'port' property is ${customPort}`) + t.equal(db.config.region, customRegion, `DB configured 'region' property is ${customRegion}`) }) - test('Live AWS infra config', t => { - t.plan(4) + // Doc x callback + // For whatever mysterious reason(s), docs configure their endpoint under doc.service.endpoint, not doc.endpoint + dynamo.doc((err, doc) => { + if (err) t.fail(err) + t.equal(doc.service.endpoint.host, host, `Doc configured 'host' property is ${host}`) + t.equal(doc.service.endpoint.hostname, localhost, `Doc configured 'hostname' property is ${localhost}`) + t.equal(doc.service.endpoint.href, `http://${host}/`, `Doc configured 'href' property is http://${host}/`) + t.equal(doc.service.endpoint.port, customPort, `Doc configured 'port' property is ${customPort}`) + t.equal(doc.service.config.region, customRegion, `Doc configured 'region' property is ${customRegion}`) + }) - // Defaults - process.env.ARC_ENV = 'testing' - process.env.ARC_SANDBOX = JSON.stringify({ ports: { tables: 5555 } }) + reset(t) +}) - // eslint-disable-next-line - dynamo = require(file) +test('Live AWS infra config', t => { + t.plan(4) - // DB x callback - dynamo.db((err, db) => { - if (err) t.fail(err) - t.notOk(db.config.httpOptions.agent, 'DB HTTP agent options not set') - }) + // Defaults + process.env.ARC_ENV = 'testing' + process.env.ARC_SANDBOX = JSON.stringify({ ports: { tables: 5555 } }) - // Doc x callback - dynamo.doc((err, doc) => { - if (err) t.fail(err) - t.notOk(doc.service.config.httpOptions.agent, 'Doc HTTP agent options not set') - }) + // eslint-disable-next-line + dynamo = require(file) - reset(t) + // DB x callback + dynamo.db((err, db) => { + if (err) t.fail(err) + t.notOk(db.config.httpOptions.agent, 'DB HTTP agent options not set') + }) - // Defaults - process.env.ARC_ENV = 'staging' - process.env.AWS_REGION = 'us-west-1' + // Doc x callback + dynamo.doc((err, doc) => { + if (err) t.fail(err) + t.notOk(doc.service.config.httpOptions.agent, 'Doc HTTP agent options not set') + }) - // eslint-disable-next-line - dynamo = require(file) + reset(t) - // DB x callback - dynamo.db((err, db) => { - if (err) t.fail(err) - t.ok(db.config.httpOptions.agent.options, 'DB HTTP agent options set') - }) + // Defaults + process.env.ARC_ENV = 'staging' + process.env.AWS_REGION = 'us-west-1' - // Doc x callback - dynamo.doc((err, doc) => { - if (err) t.fail(err) - t.ok(doc.service.config.httpOptions.agent.options, 'Doc HTTP agent options set') - }) + // eslint-disable-next-line + dynamo = require(file) - reset(t) + // DB x callback + dynamo.db((err, db) => { + if (err) t.fail(err) + t.ok(db.config.httpOptions.agent.options, 'DB HTTP agent options set') }) - test('Tear down env', t => { - t.plan(1) - reset(t) - t.pass('Tore down env') + // Doc x callback + dynamo.doc((err, doc) => { + if (err) t.fail(err) + t.ok(doc.service.config.httpOptions.agent.options, 'Doc HTTP agent options set') }) -} + + reset(t) +}) + +test('Tear down env', t => { + t.plan(1) + reset(t) + t.pass('Tore down env') +}) +*/ diff --git a/test/unit/src/tables/factory-test.js b/test/unit/src/tables/factory-test.js index 891e27a5..5b3713f8 100644 --- a/test/unit/src/tables/factory-test.js +++ b/test/unit/src/tables/factory-test.js @@ -1,36 +1,60 @@ +let { join } = require('path') let test = require('tape') let proxyquire = require('proxyquire') +let sandbox = require('@architect/sandbox') +let cwd = process.cwd() +let mock = join(cwd, 'test', 'mock', 'project') -let fakeDb = {} -let fakeDoc = {} -let factory +let noop = () => {} +let factory = proxyquire('../../../../src/tables/factory', { + './legacy': () => ({ db: noop, doc: noop }) +}) -test('Set up env', t => { - t.plan(1) - factory = proxyquire('../../../../src/tables/factory', { - './dynamo': { db: {}, doc: {} }, - 'run-parallel': (_, cb) => cb(null, { doc: fakeDoc, db: fakeDb }) - }) +let services = { tables: { hi: 'there' } } + +test('Set up env', async t => { + t.plan(2) + await sandbox.start({ cwd: mock, quiet: true }) + t.pass('Sandbox started') t.ok(factory, 'Tables factory ready') }) -test('tables.factory client properties', t => { - t.plan(3) - let tables = { bat: 'country' } - factory(tables, (err, client) => { + +test('tables.factory main client', t => { + t.plan(4) + factory({ services }, (err, client) => { if (err) t.fail(err) - t.ok(client._db === fakeDb, '_db property assigned') - t.ok(client._doc === fakeDoc, '_doc property assigned') - t.ok(client.bat, 'table name assigned') + t.ok(client._client, '_client property assigned') + t.notOk(client._db, '_db property not assigned') + t.notOk(client._doc, '_doc property not assigned') + t.ok(client.hi, 'table name assigned') + }) +}) + +test('tables.factory AWS SDK properties', t => { + t.plan(4) + factory({ services, options: { awsSdkClient: true } }, (err, client) => { + if (err) t.fail(err) + t.ok(client._client, '_client property assigned') + t.ok(client._db, '_db property assigned') + t.ok(client._doc, '_doc property assigned') + t.ok(client.hi, 'table name assigned') }) }) test('tables.factory client static methods', t => { t.plan(2) - let tables = { quart: 'tequila' } - factory(tables, async (err, client) => { + let services = { tables: { quart: 'tequila' } } + factory({ services }, async (err, client) => { if (err) t.fail(err) - t.equals(await client.reflect(), tables, 'reflect() returns tables object') + t.equals(await client.reflect(), services.tables, 'reflect() returns tables object') t.equals(client._name('quart'), 'tequila', '_name() returns tables value') }) }) + +test('Teardown', async t => { + t.plan(1) + delete process.env.ARC_ENV + await sandbox.end() + t.pass('Sandbox ended') +}) diff --git a/types/events.d.ts b/types/events.d.ts index 62a0f1ba..5c5fada3 100644 --- a/types/events.d.ts +++ b/types/events.d.ts @@ -1,5 +1,6 @@ -import { SNS, SQS } from "aws-sdk"; import { Callback } from "./util"; +import type { PublishResponse as SnsPublishResponse } from "@aws-lite/sns-types" +import type { SendMessageResponse as SqsPublishResponse } from "@aws-lite/sqs-types" // Turn off automatic exporting export { }; @@ -26,5 +27,5 @@ interface EventsOrQueues { ): LambdaFunction; } -export type ArcEvents = EventsOrQueues; -export type ArcQueues = EventsOrQueues; +export type ArcEvents = EventsOrQueues; +export type ArcQueues = EventsOrQueues; diff --git a/types/http.d.ts b/types/http.d.ts index f3f2f3a2..473f085d 100644 --- a/types/http.d.ts +++ b/types/http.d.ts @@ -2,7 +2,7 @@ import { APIGatewayProxyEvent, Context, APIGatewayProxyResult, -} from "aws-lambda"; +} from "aws-lambda"; // from @types/aws-lambda import { Callback } from "./util"; // Turn off automatic exporting diff --git a/types/index.d.ts b/types/index.d.ts index 88ddfe6b..21131dca 100644 --- a/types/index.d.ts +++ b/types/index.d.ts @@ -1,18 +1,18 @@ /// +import { ArcEvents, ArcQueues } from "./events"; import { ArcHTTP, HttpHandler, HttpAsyncHandler } from "./http"; import { ArcStatic } from "./static"; -import { ArcWebSocket } from "./ws"; -import { ArcEvents, ArcQueues } from "./events"; import { ArcTables } from "./tables"; +import { ArcWebSocket } from "./ws"; export type { HttpHandler, HttpAsyncHandler }; export type ArcServices = () => Promise>; -export const http: ArcHTTP; -export const static: ArcStatic; -export const ws: ArcWebSocket; -export const services: ArcServices; export const events: ArcEvents; +export const http: ArcHTTP; export const queues: ArcQueues; +export const services: ArcServices; +export const static: ArcStatic; export const tables: ArcTables; +export const ws: ArcWebSocket; diff --git a/types/index.test-d.ts b/types/index.test-d.ts index 0449c6ac..7e5b8e57 100644 --- a/types/index.test-d.ts +++ b/types/index.test-d.ts @@ -1,19 +1,26 @@ -import { ApiGatewayManagementApi, DynamoDB, SNS, SQS } from "aws-sdk"; -import { Context } from "aws-lambda"; +import type { AwsLiteClient } from "@aws-lite/client" +import type { GetConnectionResponse } from "@aws-lite/apigatewaymanagementapi-types"; +import type { PublishResponse } from "@aws-lite/sns-types" +import type { SendMessageResponse } from "@aws-lite/sqs-types" +import type { Context } from "aws-lambda"; import { expectType, expectAssignable, expectNotAssignable } from "tsd"; import arc from "../"; import type { HttpHandler, HttpAsyncHandler } from "../" import type { HttpMethods, HttpRequest, HttpResponse } from "./http"; +// SERVICES +const servicesResult = await arc.services(); +expectType>(servicesResult); + // EVENTS const eventsPublishArg = { name: "test", payload: { foo: "bar" } }; const eventsPublishResult = await arc.events.publish(eventsPublishArg); -expectType(eventsPublishResult); +expectType(eventsPublishResult); // QUEUES const queuesPublishArg = { name: "test", payload: { foo: "bar" } }; const queuesPublishResult = await arc.queues.publish(queuesPublishArg); -expectType(queuesPublishResult); +expectType(queuesPublishResult); // HTTP const middleware: HttpHandler = (req, res, next) => { @@ -94,8 +101,7 @@ arc.static("/", { stagePath: false }); // TABLES const dbClient = await arc.tables() -expectType(dbClient._db) -expectType(dbClient._doc) +expectType(dbClient._client) expectType(dbClient.name('widgets')) expectType>(dbClient.reflect()) const myTable = dbClient.foobar @@ -122,9 +128,9 @@ await myTable.scanAll({ }) // WS -expectType(arc.ws._api); +expectType(await arc.ws._api()); expectType(await arc.ws.send({ id: "foo", payload: { bar: "baz" } })); expectType(await arc.ws.close({ id: "foo" })); -expectType( +expectType( await arc.ws.info({ id: "foo" }), ); diff --git a/types/tables.d.ts b/types/tables.d.ts index 2eed8247..2bf50b96 100644 --- a/types/tables.d.ts +++ b/types/tables.d.ts @@ -1,4 +1,5 @@ -import type { DynamoDB } from "aws-sdk"; +import type { AwsLiteClient } from "@aws-lite/client" +import type { QueryResponse, ScanResponse, UpdateItemResponse } from "@aws-lite/dynamodb-types" import { Callback } from "./util"; // Turn off automatic exporting @@ -17,17 +18,16 @@ type ItemsOutput = Omit & { Items: Item[]; }; -type QueryParams = Params; -type QueryOutput = ItemsOutput; +type QueryParams = Params[0]>; +type QueryOutput = ItemsOutput; -type ScanParams = Params; -type ScanOutput = ItemsOutput; +type ScanParams = Params[0]>; +type ScanOutput = ItemsOutput; type UpdateParams = ParamsWithKey< - DynamoDB.DocumentClient.UpdateItemInput, + Parameters[0], Item >; -type UpdateOutput = DynamoDB.DocumentClient.UpdateItemOutput; // Depending on the operation, the key attributes may be mandatory, but we don't // know what the key attributes are, so Partial is the best we can do. @@ -51,8 +51,8 @@ export interface ArcTable { scanAll(params: ScanParams): Promise; - update(params: UpdateParams): Promise; - update(params: UpdateParams, callback: Callback): void; + update(params: UpdateParams): Promise; + update(params: UpdateParams, callback: Callback): void; } type ArcDBWith = { @@ -64,8 +64,9 @@ export type ArcDB = ArcDBWith & { reflect(): { [tableName in keyof Tables]: string; }; - _db: DynamoDB; - _doc: DynamoDB.DocumentClient; + _client: AwsLiteClient["DynamoDB"]; + // _db: DynamoDB; + // _doc: DynamoDB.DocumentClient; }; // Permissive by default: allows any table, any inputs, any outputs. diff --git a/types/ws.d.ts b/types/ws.d.ts index a5f38e59..bef3fdb9 100644 --- a/types/ws.d.ts +++ b/types/ws.d.ts @@ -1,4 +1,5 @@ -import { ApiGatewayManagementApi } from "aws-sdk"; +import type { AwsLiteClient } from "@aws-lite/client" +import type { GetConnectionResponse } from "@aws-lite/apigatewaymanagementapi-types"; import { Callback } from "./util"; // Turn off automatic exporting @@ -7,10 +8,10 @@ export { }; type SendParams = { id: string; payload: any }; type CloseParams = { id: string }; type InfoParams = { id: string }; -type InfoResponse = ApiGatewayManagementApi.Types.GetConnectionResponse; export interface ArcWebSocket { - _api: ApiGatewayManagementApi; + _api(): Promise; + _api(callback: Callback): void; send(params: SendParams): Promise; send(params: SendParams, callback: Callback): void; @@ -18,6 +19,6 @@ export interface ArcWebSocket { close(params: CloseParams): Promise; close(params: CloseParams, callback: Callback): void; - info(params: InfoParams): Promise; - info(params: InfoParams, callback: Callback): void; + info(params: InfoParams): Promise; + info(params: InfoParams, callback: Callback): void; }