From 932d273a239f0060eda865d386f7cf2fc74bddae Mon Sep 17 00:00:00 2001 From: Issa Tseng Date: Mon, 25 Jul 2022 19:27:19 -0700 Subject: [PATCH 1/9] wip: most of porsagres port complete. only streaming problems left. * 1472 passing tests, 209 pending. --- lib/bin/run-server.js | 4 +- lib/data/odata-filter.js | 15 +- lib/external/postgres.js | 42 ++ lib/external/slonik.js | 30 - lib/model/container.js | 4 +- lib/model/frame.js | 5 +- lib/model/frames.js | 2 +- lib/model/query/actees.js | 2 +- lib/model/query/actors.js | 2 +- lib/model/query/analytics.js | 2 +- lib/model/query/assignments.js | 8 +- lib/model/query/audits.js | 8 +- lib/model/query/auth.js | 2 +- lib/model/query/blobs.js | 4 +- lib/model/query/client-audits.js | 6 +- lib/model/query/comments.js | 2 +- lib/model/query/configs.js | 12 +- lib/model/query/field-keys.js | 2 +- lib/model/query/form-attachments.js | 2 +- lib/model/query/forms.js | 4 +- lib/model/query/keys.js | 6 +- lib/model/query/projects.js | 2 +- lib/model/query/public-links.js | 2 +- lib/model/query/roles.js | 2 +- lib/model/query/sessions.js | 2 +- lib/model/query/submission-attachments.js | 6 +- lib/model/query/submissions.js | 14 +- lib/model/query/users.js | 2 +- lib/task/task.js | 4 +- lib/util/db.js | 149 ++--- lib/util/util.js | 7 +- lib/worker/worker.js | 2 +- package-lock.json | 521 ++---------------- package.json | 3 +- test/assertions.js | 6 + test/integration/api/audits.js | 15 +- test/integration/api/forms/draft.js | 2 +- test/integration/api/odata.js | 2 +- test/integration/api/projects.js | 2 +- test/integration/api/submissions.js | 4 +- test/integration/other/analytics-queries.js | 8 +- test/integration/other/blobs.js | 2 +- test/integration/other/encryption.js | 4 +- test/integration/other/form-purging.js | 2 +- test/integration/other/migrations.js | 6 +- test/integration/other/select-many.js | 6 +- test/integration/other/transactions.js | 4 +- test/integration/setup.js | 45 +- test/integration/task/reap-sessions.js | 2 +- test/integration/task/task.js | 2 +- .../worker/submission.attachment.update.js | 2 +- test/integration/worker/worker.js | 8 +- test/unit/data/odata-filter.js | 20 +- test/unit/model/frame.js | 6 +- test/unit/util/db.js | 78 +-- test/unit/util/util.js | 16 + test/util/sql.js | 45 ++ 57 files changed, 431 insertions(+), 736 deletions(-) create mode 100644 lib/external/postgres.js delete mode 100644 lib/external/slonik.js create mode 100644 test/util/sql.js diff --git a/lib/bin/run-server.js b/lib/bin/run-server.js index 1f4d8ff50..754ed68cf 100644 --- a/lib/bin/run-server.js +++ b/lib/bin/run-server.js @@ -20,8 +20,8 @@ global.tap = (x) => { console.log(x); return x; }; // eslint-disable-line no-con // CONTAINER SETUP // initialize our slonik connection pool. -const { slonikPool } = require('../external/slonik'); -const db = slonikPool(config.get('default.database')); +const { postgres } = require('../external/postgres'); +const db = postgres(config.get('default.database')); // set up our mailer. const env = config.get('default.env'); diff --git a/lib/data/odata-filter.js b/lib/data/odata-filter.js index c24874265..9aea3eb6d 100644 --- a/lib/data/odata-filter.js +++ b/lib/data/odata-filter.js @@ -7,8 +7,7 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); -const { raw } = require('slonik-sql-tag-raw'); +const { sql } = require('../external/postgres'); const odataParser = require('odata-v4-parser'); const Problem = require('../util/problem'); @@ -20,24 +19,24 @@ const methodCall = (fn, params) => { // n.b. odata-v4-parser appears to already validate function name and arity. const lowerName = fn.toLowerCase(); if (extractFunctions.includes(lowerName)) - return sql`extract(${raw(lowerName)} from ${op(params[0])})`; // eslint-disable-line no-use-before-define + return sql`extract(${sql.unsafe(lowerName)} from ${op(params[0])})`; // eslint-disable-line no-use-before-define else if (fn === 'now') return sql`now()`; }; const binaryOp = (left, right, operator) => // always use parens to ensure the original AST op precedence. - sql`(${op(left)} ${raw(operator)} ${op(right)})`; // eslint-disable-line no-use-before-define + sql`(${op(left)} ${sql.unsafe(operator)} ${op(right)})`; // eslint-disable-line no-use-before-define const op = (node) => { if (node.type === 'FirstMemberExpression') { if (node.raw === '__system/submissionDate') { - return sql.identifier([ 'submissions', 'createdAt' ]); // TODO: HACK HACK + return sql('submissions.createdAt'); // TODO: HACK HACK } else if (node.raw === '__system/updatedAt') { - return sql.identifier([ 'submissions', 'updatedAt' ]); // TODO: HACK HACK + return sql('submissions.updatedAt'); // TODO: HACK HACK } else if (node.raw === '__system/submitterId') { - return sql.identifier([ 'submissions', 'submitterId' ]); // TODO: HACK HACK + return sql('submissions.submitterId'); // TODO: HACK HACK } else if (node.raw === '__system/reviewState') { - return sql.identifier([ 'submissions', 'reviewState' ]); // TODO: HACK HACK + return sql('submissions.reviewState'); // TODO: HACK HACK } else { throw Problem.internal.unsupportedODataField({ at: node.position, text: node.raw }); } diff --git a/lib/external/postgres.js b/lib/external/postgres.js new file mode 100644 index 000000000..11cd1b71d --- /dev/null +++ b/lib/external/postgres.js @@ -0,0 +1,42 @@ +// Copyright 2022 ODK Central Developers +// See the NOTICE file at the top-level directory of this distribution and at +// https://github.com/getodk/central-backend/blob/master/NOTICE. +// This file is part of ODK Central. It is subject to the license terms in +// the LICENSE file found in the top-level directory of this distribution and at +// https://www.apache.org/licenses/LICENSE-2.0. No part of ODK Central, +// including this file, may be copied, modified, propagated, or distributed +// except according to the terms contained in the LICENSE file. + +// CRCRCR uhhhh confusing naming maybe idk +const _postgres = require('postgres'); +const { connectionString } = require('../util/db'); + +const options = { + // when saving to the database we transform all undefined to null rather than + // throw. this ought to be safe at time of writing because it's exactly what + // we did with slonik. possibly someday with better hygiene this can go away. + transform: { undefined: null }, + types: { + // the functions here are how postgres implements them for numerics. + // the issue is just that for range safety reasons they do not include + // bigint in their default impl, which is a problem we are unlikely to have. + // the practical problem is that count() in postgres yields a bigint. + bigint: { to: 0, from: [ 20 ], serialize: (x => '' + x), parse: (x => +x) }, + + // we don't want to automatically assume all non-true values can be safely + // equal to 'f', since we can take user input here. + boolean: { + to: 16, from: 16, + serialize: (x => (x === true ? 't' : x === false ? 'f' : x)), + parse: (x => x === 't') + } + } +}; +const postgres = (config) => _postgres(connectionString(config), options); + +// turns out you can get the templater just by omitting connection info completely. +// and p/postgres is happy to mix template literals across "connections". +const sql = _postgres(undefined, options); + +module.exports = { postgres, sql, options }; + diff --git a/lib/external/slonik.js b/lib/external/slonik.js deleted file mode 100644 index 901dc2b10..000000000 --- a/lib/external/slonik.js +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2021 ODK Central Developers -// See the NOTICE file at the top-level directory of this distribution and at -// https://github.com/getodk/central-backend/blob/master/NOTICE. -// This file is part of ODK Central. It is subject to the license terms in -// the LICENSE file found in the top-level directory of this distribution and at -// https://www.apache.org/licenses/LICENSE-2.0. No part of ODK Central, -// including this file, may be copied, modified, propagated, or distributed -// except according to the terms contained in the LICENSE file. - -const { createPool, createDateTypeParser, createBigintTypeParser, createIntervalTypeParser, createNumericTypeParser } = require('slonik'); -const { connectionString } = require('../util/db'); - -const timestampTypeParser = { name: 'timestamp', parse: (x) => new Date(x) }; -const timestamptzTypeParser = { name: 'timestamptz', parse: (x) => new Date(x) }; - -const slonikPool = (config) => createPool(connectionString(config), { - captureStackTrace: false, - maximumPoolSize: config.maximumPoolSize ?? 10, - typeParsers: [ - createDateTypeParser(), - createBigintTypeParser(), - createIntervalTypeParser(), - createNumericTypeParser(), - timestampTypeParser, - timestamptzTypeParser - ] -}); - -module.exports = { slonikPool }; - diff --git a/lib/model/container.js b/lib/model/container.js index cd581212a..2ed71351d 100644 --- a/lib/model/container.js +++ b/lib/model/container.js @@ -59,8 +59,8 @@ const queryModuleBuilder = (definition, container) => { class Container { constructor(...resources) { Object.assign(this, ...resources); } transacting(proc) { - if (this.isTransacting === true) return proc(this); - return this.db.transaction((trxn) => proc(this.with({ db: trxn, isTransacting: true }))); + if (this.isTransacting === true) return proc(this); // needed due to testing. + return this.db.begin((trxn) => proc(this.with({ db: trxn, isTransacting: true }))); } } diff --git a/lib/model/frame.js b/lib/model/frame.js index c9e523afb..11876e589 100644 --- a/lib/model/frame.js +++ b/lib/model/frame.js @@ -7,9 +7,9 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { raw } = require('slonik-sql-tag-raw'); const { pick, without } = require('ramda'); const uuid = require('uuid/v4'); +const { sql } = require('../external/postgres'); const { pickAll } = require('../util/util'); const Option = require('../util/option'); @@ -62,9 +62,8 @@ class Frame { { /* eslint-disable no-shadow */ // TODO: precomputing is good but this is sort of dirty :/ const Frame = class extends this { static get def() { return def; } }; - Frame.fieldlist = raw(def.fields.map((s) => `"${s}"`).join(',')); + Frame.fieldlist = sql.unsafe(def.fields.map((s) => `"${s}"`).join(',')); Frame.insertfields = without([ 'id' ], def.fields); - Frame.insertlist = raw(Frame.insertfields.map((s) => `"${s}"`).join(',')); Frame.hasCreatedAt = def.fields.includes('createdAt'); Frame.hasUpdatedAt = def.fields.includes('updatedAt'); return Frame; diff --git a/lib/model/frames.js b/lib/model/frames.js index 379d3e845..dee4f33f9 100644 --- a/lib/model/frames.js +++ b/lib/model/frames.js @@ -7,7 +7,7 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); +const { sql } = require('../external/postgres'); const { Frame, table, readable, writable, aux, species, embedded } = require('./frame'); const { isBlank } = require('../util/util'); const Option = require('../util/option'); diff --git a/lib/model/query/actees.js b/lib/model/query/actees.js index 419725ef3..7f31a9fa7 100644 --- a/lib/model/query/actees.js +++ b/lib/model/query/actees.js @@ -8,7 +8,7 @@ // except according to the terms contained in the LICENSE file. const uuid = require('uuid/v4'); -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { Actee } = require('../frames'); const { construct } = require('../../util/util'); diff --git a/lib/model/query/actors.js b/lib/model/query/actors.js index 7cdb49f39..8e1f2413a 100644 --- a/lib/model/query/actors.js +++ b/lib/model/query/actors.js @@ -7,7 +7,7 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { map } = require('ramda'); const { insert, markDeleted } = require('../../util/db'); const { resolve } = require('../../util/promise'); diff --git a/lib/model/query/analytics.js b/lib/model/query/analytics.js index 3aac21dc2..a587fcbf3 100644 --- a/lib/model/query/analytics.js +++ b/lib/model/query/analytics.js @@ -8,7 +8,7 @@ // except according to the terms contained in the LICENSE file. const config = require('config'); -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { clone } = require('ramda'); const { metricsTemplate } = require('../../data/analytics'); diff --git a/lib/model/query/assignments.js b/lib/model/query/assignments.js index 7a13125a6..38c7b4008 100644 --- a/lib/model/query/assignments.js +++ b/lib/model/query/assignments.js @@ -7,7 +7,7 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { Actor, Assignment } = require('../frames'); const { extender, equals, QueryOptions } = require('../../util/db'); const { getOrReject } = require('../../util/promise'); @@ -36,9 +36,9 @@ const grantSystem = (actor, systemName, actee) => ({ Assignments, Roles }) => .then(getOrReject(Problem.internal.missingSystemRow('role'))) .then((role) => Assignments.grant(actor, role, actee)); -const _revoke = (actor, roleId, acteeId) => ({ db }) => - db.query(sql`delete from assignments where ${equals({ actorId: actor.id, roleId, acteeId })}`) - .then(({ rowCount }) => Number(rowCount) > 0); +const _revoke = (actor, roleId, acteeId) => ({ q }) => + q(sql`delete from assignments where ${equals({ actorId: actor.id, roleId, acteeId })}`) + .then(({ count }) => count > 0); _revoke.audit = (actor, roleId, acteeId) => (log) => { if (actor.type === 'singleUse') return null; diff --git a/lib/model/query/audits.js b/lib/model/query/audits.js index fb16c4406..dadda1285 100644 --- a/lib/model/query/audits.js +++ b/lib/model/query/audits.js @@ -7,10 +7,10 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { map } = require('ramda'); const { Actee, Actor, Audit, Form, Project } = require('../frames'); -const { extender, equals, page, QueryOptions } = require('../../util/db'); +const { extender, equals, joinSqlStr, page, QueryOptions } = require('../../util/db'); const Option = require('../../util/option'); const { construct } = require('../../util/util'); @@ -25,7 +25,7 @@ const log = (actor, action, actee, details) => ({ run, context }) => { return run(sql` insert into audits ("actorId", action, "acteeId", details, notes, "loggedAt", processed, failures) -values (${actorId}, ${action}, ${acteeId}, ${(details == null) ? null : JSON.stringify(details)}, ${notes}, clock_timestamp(), ${processed}, 0)`); +values (${actorId}, ${action}, ${acteeId}, ${details}, ${notes}, clock_timestamp(), ${processed}, 0)`); }; @@ -63,7 +63,7 @@ const auditFilterer = (options) => { options.ifArg('start', (start) => result.push(sql`"loggedAt" >= ${start}`)); options.ifArg('end', (end) => result.push(sql`"loggedAt" <= ${end}`)); options.ifArg('action', (action) => result.push(actionCondition(action))); - return (result.length === 0) ? sql`true` : sql.join(result, sql` and `); + return (result.length === 0) ? sql`true` : joinSqlStr(result, sql` and `); }; const _get = extender(Audit)(Option.of(Actor), Option.of(Actor.alias('actee_actor', 'acteeActor')), Option.of(Form), Option.of(Form.Def), Option.of(Project), Option.of(Actee))((fields, extend, options) => sql` diff --git a/lib/model/query/auth.js b/lib/model/query/auth.js index 791169bda..ca05d7044 100644 --- a/lib/model/query/auth.js +++ b/lib/model/query/auth.js @@ -7,7 +7,7 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { compose, uniq, flatten, map } = require('ramda'); const { Actor, Session } = require('../frames'); const { resolve, reject } = require('../../util/promise'); diff --git a/lib/model/query/blobs.js b/lib/model/query/blobs.js index baf0b4713..425076869 100644 --- a/lib/model/query/blobs.js +++ b/lib/model/query/blobs.js @@ -7,7 +7,7 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { map } = require('ramda'); const { Blob } = require('../frames'); const { construct } = require('../../util/util'); @@ -22,7 +22,7 @@ const { construct } = require('../../util/util'); const ensure = (blob) => ({ oneFirst }) => oneFirst(sql` with ensured as (insert into blobs (sha, md5, content, "contentType") values - (${blob.sha}, ${blob.md5}, ${sql.binary(blob.content)}, ${blob.contentType || null}) + (${blob.sha}, ${blob.md5}, ${blob.content}, ${blob.contentType}) on conflict (sha) do update set sha = ${blob.sha} returning id) select id from ensured`); diff --git a/lib/model/query/client-audits.js b/lib/model/query/client-audits.js index 47f6c8513..0b1ac7897 100644 --- a/lib/model/query/client-audits.js +++ b/lib/model/query/client-audits.js @@ -7,8 +7,8 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); -const { insertMany, QueryOptions } = require('../../util/db'); +const { sql } = require('../../external/postgres'); +const { insertMany, QueryOptions, joinSqlStr } = require('../../util/db'); const { odataFilter } = require('../../data/odata-filter'); @@ -20,7 +20,7 @@ const existsForBlob = (blobId) => ({ maybeOne }) => // TODO: copy-pasted from query/submission-attachments.js const keyIdCondition = (keyIds) => - sql.join((((keyIds == null) || (keyIds.length === 0)) ? [ -1 ] : keyIds), sql`,`); + joinSqlStr((((keyIds == null) || (keyIds.length === 0)) ? [ -1 ] : keyIds), sql`,`); const streamForExport = (formId, draft, keyIds, options = QueryOptions.none) => ({ stream }) => stream(sql` select client_audits.*, blobs.content, submissions."instanceId", "localKey", "keyId", index, submissions."instanceId" from submission_defs diff --git a/lib/model/query/comments.js b/lib/model/query/comments.js index c0af72802..0a1632692 100644 --- a/lib/model/query/comments.js +++ b/lib/model/query/comments.js @@ -7,7 +7,7 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { map } = require('ramda'); const { Actor, Comment } = require('../frames'); const { extender, insert, QueryOptions } = require('../../util/db'); diff --git a/lib/model/query/configs.js b/lib/model/query/configs.js index 4e1938d97..47419e028 100644 --- a/lib/model/query/configs.js +++ b/lib/model/query/configs.js @@ -8,7 +8,7 @@ // except according to the terms contained in the LICENSE file. const { map } = require('ramda'); -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { Config } = require('../frames'); const { construct } = require('../../util/util'); @@ -16,14 +16,12 @@ const get = (key) => ({ maybeOne }) => maybeOne(sql`select * from config where key=${key}`) .then(map(construct(Config.forKey(key)))); -const set = (key, value) => ({ one }) => { - const json = value != null ? JSON.stringify(value) : null; - return one(sql` -insert into config (key, value, "setAt") values (${key}, ${json}, clock_timestamp()) - on conflict (key) do update set value=${json}, "setAt"=clock_timestamp() +const set = (key, value) => ({ one }) => + one(sql` +insert into config ${sql({ key, value, setAt: sql`clock_timestamp()` })} + on conflict (key) do update set value=${value}, "setAt"=clock_timestamp() returning *`) .then(construct(Config.forKey(key))); -}; set.audit = (config) => (log) => log('config.set', null, { key: config.key, value: config.forApi().value }); set.audit.withResult = true; diff --git a/lib/model/query/field-keys.js b/lib/model/query/field-keys.js index a5cdb3e43..032d73fd1 100644 --- a/lib/model/query/field-keys.js +++ b/lib/model/query/field-keys.js @@ -7,7 +7,7 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { Frame, readable } = require('../frame'); const { Actor, FieldKey } = require('../frames'); const { QueryOptions, extender, equals } = require('../../util/db'); diff --git a/lib/model/query/form-attachments.js b/lib/model/query/form-attachments.js index e6984582b..01f6b60b5 100644 --- a/lib/model/query/form-attachments.js +++ b/lib/model/query/form-attachments.js @@ -11,7 +11,7 @@ // definition that the form was created with. See the instance/form-attachments // file for more information. -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { map, merge } = require('ramda'); const { expectedFormAttachments } = require('../../data/schema'); const { Frame, into } = require('../frame'); diff --git a/lib/model/query/forms.js b/lib/model/query/forms.js index 5cf569f79..4927c4c0d 100644 --- a/lib/model/query/forms.js +++ b/lib/model/query/forms.js @@ -7,7 +7,7 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { map, compose, always } = require('ramda'); const { Frame, into } = require('../frame'); const { Actor, Blob, Form } = require('../frames'); @@ -49,7 +49,7 @@ with def as values (nextval(pg_get_serial_sequence('forms', 'id')), ${form.xml}, ${def.name}, ${def.hash}, ${def.sha}, ${def.sha256}, ${def.version}, ${def.keyId}, ${form.xls.xlsBlobId || null}, ${(publish !== true) ? generateToken() : null}, clock_timestamp(), ${(publish === true) ? sql`clock_timestamp()` : null}) returning *), form as - (insert into forms (id, "xmlFormId", state, "projectId", ${sql.identifier([ (publish === true) ? 'currentDefId' : 'draftDefId' ])}, "acteeId", "createdAt") + (insert into forms (id, "xmlFormId", state, "projectId", ${sql((publish === true) ? 'currentDefId' : 'draftDefId')}, "acteeId", "createdAt") select def."formId", ${form.xmlFormId}, ${form.state || 'open'}, ${project.id}, def.id, ${actee.id}, def."createdAt" from def returning forms.*) select id from form`)) diff --git a/lib/model/query/keys.js b/lib/model/query/keys.js index ef8a05f85..18ade23e9 100644 --- a/lib/model/query/keys.js +++ b/lib/model/query/keys.js @@ -7,11 +7,11 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { map } = require('ramda'); const { Key } = require('../frames'); const { submissionDecryptor } = require('../../util/crypto'); -const { insert } = require('../../util/db'); +const { insert, joinSqlStr } = require('../../util/db'); const { resolve } = require('../../util/promise'); const { construct } = require('../../util/util'); @@ -48,7 +48,7 @@ order by id desc`) .then(map(construct(Key))); const getManagedByIds = (ids) => ({ all }) => - all(sql`select * from keys where managed=true and id in (${sql.join(ids, sql`,`)})`) + all(sql`select * from keys where managed=true and id in (${joinSqlStr(ids, sql`,`)})`) .then(map(construct(Key))); const getDecryptor = (passphrases = {}) => ({ Keys }) => { diff --git a/lib/model/query/projects.js b/lib/model/query/projects.js index f1ef7482c..16eb93c4c 100644 --- a/lib/model/query/projects.js +++ b/lib/model/query/projects.js @@ -7,7 +7,7 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { Key, Project } = require('../frames'); const { extender, equals, insert, updater, markDeleted, QueryOptions } = require('../../util/db'); const { generateManagedKey, generateVersionSuffix, stripPemEnvelope } = require('../../util/crypto'); diff --git a/lib/model/query/public-links.js b/lib/model/query/public-links.js index d2df28939..c3c2e073e 100644 --- a/lib/model/query/public-links.js +++ b/lib/model/query/public-links.js @@ -7,7 +7,7 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { Frame, readable } = require('../frame'); const { Actor, PublicLink } = require('../frames'); const { extender, equals, QueryOptions } = require('../../util/db'); diff --git a/lib/model/query/roles.js b/lib/model/query/roles.js index d565eb0a1..066416cd7 100644 --- a/lib/model/query/roles.js +++ b/lib/model/query/roles.js @@ -8,7 +8,7 @@ // except according to the terms contained in the LICENSE file. const { map } = require('ramda'); -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { Role } = require('../frames'); const { construct } = require('../../util/util'); diff --git a/lib/model/query/sessions.js b/lib/model/query/sessions.js index 73ee5fd2f..d0e95e905 100644 --- a/lib/model/query/sessions.js +++ b/lib/model/query/sessions.js @@ -7,7 +7,7 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { map } = require('ramda'); const { Actor, Session } = require('../frames'); const { generateToken } = require('../../util/crypto'); diff --git a/lib/model/query/submission-attachments.js b/lib/model/query/submission-attachments.js index 3fcf06889..87c06ffb5 100644 --- a/lib/model/query/submission-attachments.js +++ b/lib/model/query/submission-attachments.js @@ -10,12 +10,12 @@ // Submission Attachments are files that are expected to exist given the submission // xml data and the form XForms xml definition. -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { map } = require('ramda'); const { Audit, Blob, Submission } = require('../frames'); const { odataFilter } = require('../../data/odata-filter'); const { submissionXmlToFieldStream } = require('../../data/submission'); -const { insertMany, QueryOptions } = require('../../util/db'); +const { insertMany, QueryOptions, joinSqlStr } = require('../../util/db'); const { resolve } = require('../../util/promise'); const { isBlank, construct } = require('../../util/util'); const { traverseXml, findAll, root, node, text } = require('../../util/xml'); @@ -189,7 +189,7 @@ const getBySubmissionDefIdAndName = (subDefId, name) => ({ maybeOne }) => // TODO: copy-pasted to query/client-audits.js const keyIdCondition = (keyIds) => - sql.join((((keyIds == null) || (keyIds.length === 0)) ? [ -1 ] : keyIds), sql`,`); + joinSqlStr((((keyIds == null) || (keyIds.length === 0)) ? [ -1 ] : keyIds), sql`,`); const streamForExport = (formId, draft, keyIds, options = QueryOptions.none) => ({ stream }) => stream(sql` select submission_attachments.name, blobs.content, submission_attachments.index, form_defs."keyId", submissions."instanceId", submission_defs."localKey" from submission_defs diff --git a/lib/model/query/submissions.js b/lib/model/query/submissions.js index 44ff6579b..153c0bd3c 100644 --- a/lib/model/query/submissions.js +++ b/lib/model/query/submissions.js @@ -8,11 +8,11 @@ // except according to the terms contained in the LICENSE file. const { map } = require('ramda'); -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { Frame, table } = require('../frame'); const { Actor, Form, Submission } = require('../frames'); const { odataFilter } = require('../../data/odata-filter'); -const { unjoiner, extender, equals, page, updater, QueryOptions, insertMany } = require('../../util/db'); +const { unjoiner, extender, equals, joinSqlStr, page, updater, QueryOptions, insertMany } = require('../../util/db'); const { blankStringToNull, construct } = require('../../util/util'); const Problem = require('../../util/problem'); @@ -21,7 +21,7 @@ const Problem = require('../../util/problem'); // SUBMISSION CREATE const _defInsert = (id, partial, formDefId, actorId, root, deviceId, userAgent) => sql`insert into submission_defs ("submissionId", xml, "formDefId", "instanceId", "instanceName", "submitterId", "localKey", "encDataAttachmentName", "signature", "createdAt", root, current, "deviceId", "userAgent") - values (${id}, ${sql.binary(partial.xml)}, ${formDefId}, ${partial.instanceId}, ${partial.def.instanceName}, ${actorId}, ${partial.def.localKey}, ${partial.def.encDataAttachmentName}, ${partial.def.signature}, clock_timestamp(), ${root}, true, ${deviceId}, ${userAgent}) + values (${id}, ${partial.xml.toString('utf8')}, ${formDefId}, ${partial.instanceId}, ${partial.def.instanceName}, ${actorId}, ${partial.def.localKey}, ${partial.def.encDataAttachmentName}, ${partial.def.signature}, clock_timestamp(), ${root}, true, ${deviceId}, ${userAgent}) returning *`; const nextval = sql`nextval(pg_get_serial_sequence('submissions', 'id'))`; @@ -228,8 +228,7 @@ const _exportUnjoiner = unjoiner(Submission, Submission.Def, Submission.Xml, Sub // TODO: this is a terrible hack to add some logic to one of our select fields. this is // the /only/ place we need to do this in the entire codebase right now. so for now // we just use the terrible hack. -const { raw } = require('slonik-sql-tag-raw'); -const _exportFields = raw(_exportUnjoiner.fields.sql.replace( +const _exportFields = sql.unsafe(_exportUnjoiner.fields.strings[0].replace( 'submission_defs."xml" as "submission_defs!xml"', '(case when submission_defs."localKey" is null then submission_defs.xml end) as "submission_defs!xml"' )); @@ -266,7 +265,7 @@ inner join group by "submissionId") as edits on edits."submissionId"=submission_defs."submissionId" where - ${encrypted ? sql`(form_defs."encKeyId" is null or form_defs."encKeyId" in (${sql.join(keyIds, sql`,`)})) and` : sql``} + ${encrypted ? sql`(form_defs."encKeyId" is null or form_defs."encKeyId" in (${joinSqlStr(keyIds, sql`,`)})) and` : sql``} ${odataFilter(options.filter)} and ${equals(options.condition)} and submission_defs.current=true and submissions."formId"=${formId} and submissions."deletedAt" is null @@ -275,8 +274,7 @@ ${page(options)}`; }; const streamForExport = (formId, draft, keyIds, options = QueryOptions.none) => ({ stream }) => - stream(_export(formId, draft, keyIds, options)) - .then(stream.map(_exportUnjoiner)); + stream.map(_exportUnjoiner)(stream(_export(formId, draft, keyIds, options))); const getForExport = (formId, instanceId, draft, options = QueryOptions.none) => ({ maybeOne }) => maybeOne(_export(formId, draft, [], options.withCondition({ 'submissions.instanceId': instanceId }))) diff --git a/lib/model/query/users.js b/lib/model/query/users.js index bae7905bb..f4d05f863 100644 --- a/lib/model/query/users.js +++ b/lib/model/query/users.js @@ -7,7 +7,7 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { sql } = require('slonik'); +const { sql } = require('../../external/postgres'); const { map } = require('ramda'); const { Actor, User } = require('../frames'); const { unjoiner, page, equals, QueryOptions } = require('../../util/db'); diff --git a/lib/task/task.js b/lib/task/task.js index 8eb30a599..a391f3eff 100644 --- a/lib/task/task.js +++ b/lib/task/task.js @@ -21,7 +21,7 @@ const config = require('config'); const container = require('../model/container'); const Problem = require('../util/problem'); const Option = require('../util/option'); -const { slonikPool } = require('../external/slonik'); +const { postgres } = require('../external/postgres'); const { serialize } = require('../util/http'); @@ -47,7 +47,7 @@ const task = { // not thread-safe! but we don't have threads.. withContainer: (taskdef) => (...args) => { const needsContainer = (task._container == null); - if (needsContainer) task._container = container.withDefaults({ db: slonikPool(config.get('default.database')), bcrypt, env, mail, task: true, odkAnalytics }); + if (needsContainer) task._container = container.withDefaults({ db: postgres(config.get('default.database')), bcrypt, env, mail, task: true, odkAnalytics }); const result = taskdef(task._container)(...args); diff --git a/lib/util/db.js b/lib/util/db.js index e40287eee..f2cb22b56 100644 --- a/lib/util/db.js +++ b/lib/util/db.js @@ -7,15 +7,15 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. +const { Readable } = require('stream'); const { inspect } = require('util'); -const { merge, pick, always } = require('ramda'); -const { sql } = require('slonik'); -const { raw } = require('slonik-sql-tag-raw'); +const { merge, pick, always, reduce } = require('ramda'); +const sql = require('postgres')(); const { reject } = require('./promise'); const Problem = require('./problem'); const Option = require('./option'); const { PartialPipe, mapStream } = require('./stream'); -const { construct } = require('./util'); +const { construct, objIsEmpty } = require('./util'); const { isTrue, isFalse } = require('./http'); @@ -66,7 +66,47 @@ const connectionObject = (config) => { //////////////////////////////////////////////////////////////////////////////// -// SLONIK UTIL +// STREAMING DB ACCESS +// for whatever reason porsagres ships without any native node stream support. +// here we use pg-query-stream as guidance to derive streams from the cursor interface. +// the way the porsagres async iterator works as of time of writing, each window +// is only fetched when next() is called. +// +// this guide here is handy: +// https://nodejs.org/en/docs/guides/backpressuring-in-streams/ +class QueryStream extends Readable { + constructor(q) { + // pg-query-stream uses default 100 hwm, just bake it here until we want + // otherwise. meanwhile cursor batch size could be a separate number but they + // default to the same in pg-q-s so again just take it for now.. + super({ objectMode: true, autoDestroy: true, highWaterMark: 100 }); + this.q = q; + this.iterator = q.cursor(100)[Symbol.asyncIterator](); + } + // https://nodejs.org/api/stream.html#stream_readable_read_size_1 + _read(size) { + this.q.cursorRows = size; // CRCRCR: this is internals + const next = this.iterator.next(); + if (next.done === true) return this.push(null); + + next.then(({ value, done }) => { + // TODO: need to handle .push retval + if (value != null) for (let i = 0; i < value.length; i += 1) this.push(value[i]); + if (done) this.push(null); + }).catch(this.destroy.bind(this)); + } + + //_final() { this.iterator.return(); } + _destroy(err, cb) { + this.iterator.return(); + cb(err); + } +} + +//////////////////////////////////////////////////////////////////////////////// +// FRAGMENT UTIL + +const nothing = sql``; // join/unjoin util @@ -111,11 +151,10 @@ const unjoiner = (...frames) => { return new frames[0](primary, bag); }; - unjoin.fields = raw(fields.join(',')); + unjoin.fields = sql.unsafe(fields.join(',')); return unjoin; }; -const nothing = sql``; const extender = (...standard) => (...extended) => (sqlFunc) => { const stdUnjoiner = unjoiner(...standard); const extUnjoiner = unjoiner(...standard, ...extended); @@ -130,61 +169,46 @@ const extender = (...standard) => (...extended) => (sqlFunc) => { //////////////////////////////////////// // common query util -// generic insert utility -const _assign = (obj) => (k) => { - if (k === 'createdAt') return sql`clock_timestamp()`; - const v = obj[k]; - return (v === null) ? null : - (v === undefined) ? null : - ((typeof v === 'object') && (v.constructor === Object)) ? JSON.stringify(v) : // eslint-disable-line indent - (v instanceof Date) ? v.toISOString() : // eslint-disable-line indent - v; // eslint-disable-line indent -}; const insert = (obj) => { - const keys = Object.keys(obj); - if (obj.constructor.hasCreatedAt) keys.push('createdAt'); - const fieldlist = sql.join(keys.map((k) => sql.identifier([ k ])), sql`,`); - return sql` -insert into ${raw(obj.constructor.table)} (${fieldlist}) -values (${sql.join(keys.map(_assign(obj)), sql`,`)}) -returning *`; + const data = obj.constructor.hasCreatedAt ? { createdAt: sql`clock_timestamp()`, ...obj } : obj; + return sql`insert into ${sql(obj.constructor.table)} ${sql(data)} returning *`; }; const insertMany = (objs) => { if (objs.length === 0) return sql`select true`; const Type = objs[0].constructor; - return sql` -insert into ${raw(Type.table)} (${Type.insertlist}) -values ${sql.join( - objs.map((obj) => sql`(${sql.join(Type.insertfields.map(_assign(obj)), sql`,`)})`), - sql`,` - )}`; + const data = Type.hasCreatedAt + ? objs.map((obj) => ({ createdAt: sql`clock_timestamp()`, ...obj })) + : objs; + + return sql`insert into ${sql(Type.table)} ${sql(data, ...Type.insertfields)}`; }; // generic update utility const updater = (obj, data, whereKey = 'id') => { - const keys = Object.keys(data); - if (keys.length === 0) return sql`select true`; - const assigner = _assign(data); + if (objIsEmpty(data)) return sql`select true`; return sql` -update ${raw(obj.constructor.table)} -set ${sql.join(keys.map((k) => sql`${sql.identifier([ k ])}=${assigner(k)}`), sql`,`)} +update ${sql(obj.constructor.table)} +set ${sql(data)} ${obj.constructor.hasUpdatedAt ? sql`,"updatedAt"=clock_timestamp()` : nothing} -where ${sql.identifier([ whereKey ])}=${obj[whereKey]} +where ${sql(whereKey)}=${obj[whereKey]} returning *`; }; // generic del utility const markDeleted = (obj) => - sql`update ${raw(obj.constructor.table)} set "deletedAt"=now() where id=${obj.id}`; + sql`update ${sql(obj.constructor.table)} set "deletedAt"=now() where id=${obj.id}`; const markUndeleted = (obj) => - sql`update ${raw(obj.constructor.table)} set "deletedAt"=null where id=${obj.id}`; + sql`update ${sql(obj.constructor.table)} set "deletedAt"=null where id=${obj.id}`; //////////////////////////////////////// // query fragment util +const joinSqlStr = (xs, separator) => + reduce(((m, x) => (m ? sql`${m}${separator}${x}` : x)), undefined, xs); + const equals = (obj) => { const keys = Object.keys(obj); if (keys.length === 0) return sql`true`; @@ -193,54 +217,54 @@ const equals = (obj) => { for (let i = 0; i < keys.length; i += 1) { const k = keys[i]; const v = obj[k]; - parts[i] = (v === null) ? sql`${sql.identifier(k.split('.'))} is null` - : sql`${sql.identifier(k.split('.'))}=${obj[k]}`; + parts[i] = (v === null) ? sql`${sql(k)} is null` : sql`${sql(k)}=${obj[k]}`; } - return sql.join(parts, sql` and `); + return joinSqlStr(parts, sql` and `); }; const page = (options) => { const parts = []; if (options.offset != null) parts.push(sql`offset ${options.offset}`); if (options.limit != null) parts.push(sql`limit ${options.limit}`); - return parts.length ? sql.join(parts, sql` `) : nothing; + return parts.length ? joinSqlStr(parts, sql` `) : nothing; }; //////////////////////////////////////// // query func decorator // -// these serve three purposes: -// 1 they smooth over slonik's maybe* funcs not using our Option (bc of course -// why would it) without us polluting slonik nor requiring excessive homework -// 2 they account for the awkward fact that by using slonik's funcs here we end -// up having a hard time writing generic postprocessing helpers of our own since -// the result monad comes in so many flavors. so we provide a map func to decode -// what happened -// 3 relatedly, they allow passing of the query func to such a helper method, as -// used above in extender +// there used to be a long explanation here talking about how these methods +// line up with slonik's api, and how the .map "methods" we provide here helped +// us deal with how its api sort of forces its users into these result monad shapes +// that make dealing with them generically (eg w the extender or the joiner/unjoiner) +// sort of difficult. +// +// well, we don't use slonik anymore so this is just what our query api looks +// like now. it turns out to be pretty convenient anyway. // -// but also, they do seem a bit goofy and if anyone has a cleaner idea please give -// it a try and a pull request. +// CRCRCR: but actually maybe we can get rid of .map now and clean up the joiner call.. const queryFuncs = (db, obj) => { /* eslint-disable no-param-reassign */ - obj.run = (s) => db.query(s).then(always(true)); + obj.run = (s) => db`${s}`.then(always(true)); obj.run.map = () => () => true; - obj.one = (s) => db.one(s); + obj.q = (s) => db`${s}`; + + obj.one = (s) => db`${s}`.then((xs) => xs[0]); // CRCRCR: slonik used to Enforce this, do we care? obj.one.map = (f) => (x) => f(x); - obj.maybeOne = (s) => db.maybeOne(s).then(Option.of); + obj.maybeOne = (s) => db`${s}`.then((xs) => Option.of(xs[0])); obj.maybeOne.map = (f) => (x) => x.map(f); - obj.oneFirst = (s) => db.oneFirst(s); + obj.oneFirst = (s) => db`${s}`.values().then((xs) => xs[0][0]); obj.oneFirst.map = (f) => (x) => f(x); - obj.all = (s) => db.any(s); + obj.all = (s) => db`${s}`; obj.all.map = (f) => (xs) => { const result = new Array(xs.length); for (let i = 0; i < xs.length; i += 1) result[i] = f(xs[i]); return result; }; - obj.stream = (s) => new Promise((resolve) => db.stream(s, resolve)); - obj.stream.map = (f) => (strm) => PartialPipe.of(strm, mapStream(({ row }) => f(row))); + obj.stream = (s) => new QueryStream(db`${s}`); + obj.stream.map = (f) => (strm) => PartialPipe.of(strm, mapStream(f)); + /* eslint-enable no-param-reassign */ }; @@ -371,7 +395,7 @@ const postgresErrorToProblem = (x) => { // else we tried to do in parallel failed, as expected. we'll log a small error // message and proceed on; whatever the original failure was will already have // bubbled up to the user. - process.stderr.write('!! 25P02 >> error: current transaction is aborted, commands ignored until end of transaction block\n'); + process.stderr.write('\n!! 25P02 >> error: current transaction is aborted, commands ignored until end of transaction block\n'); return reject(); } else if (error.code === 'P0001') { // raise_exception const match01 = /ODK01:(.+)$/.exec(error.message); @@ -430,13 +454,14 @@ const postgresErrorToProblem = (x) => { debugger; // automatically trip the debugger if it's attached. process.stderr.write(inspect(error)); + process.stderr.write(`\nQUERY\n=====\n${error.query}\n`); return reject(error); }; module.exports = { connectionString, connectionObject, - unjoiner, extender, equals, page, queryFuncs, + unjoiner, extender, equals, joinSqlStr, page, queryFuncs, insert, insertMany, updater, markDeleted, markUndeleted, QueryOptions, postgresErrorToProblem diff --git a/lib/util/util.js b/lib/util/util.js index 87d7c4d1d..d652b27b9 100644 --- a/lib/util/util.js +++ b/lib/util/util.js @@ -61,6 +61,11 @@ const pickAll = (keys, obj) => { return result; }; +const objIsEmpty = (obj) => { + for (const k in obj) if (Object.hasOwnProperty.call(obj, k)) return false; + return true; +}; + //////////////////////////////////////// // CLASSES @@ -76,7 +81,7 @@ const construct = (Type) => (x, y) => new Type(x, y); module.exports = { noop, noargs, isBlank, isPresent, blankStringToNull, sanitizeOdataIdentifier, - printPairs, without, pickAll, + printPairs, without, pickAll, objIsEmpty, construct }; diff --git a/lib/worker/worker.js b/lib/worker/worker.js index 0c09c64ae..566d8c054 100644 --- a/lib/worker/worker.js +++ b/lib/worker/worker.js @@ -10,7 +10,7 @@ const { min } = Math; const { inspect } = require('util'); const { head } = require('ramda'); -const { sql } = require('slonik'); +const { sql } = require('../external/postgres'); const { timebound, resolve } = require('../util/promise'); const defaultJobMap = require('./jobs').jobs; diff --git a/package-lock.json b/package-lock.json index 8eb628dfa..6bc7a231e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -723,6 +723,7 @@ "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, "requires": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -769,7 +770,7 @@ "append-field": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/append-field/-/append-field-1.0.0.tgz", - "integrity": "sha512-klpgFSWLW1ZEs8svjfb7g4qWY0YS5imI82dTg+QahUvJ8YqAY0P10Uk8tTyh9ZGuYEZEMaeJYCF5BFuX552hsw==" + "integrity": "sha1-HjRA6RXwsSA9I3SOeO3XubW0PlY=" }, "append-transform": { "version": "2.0.0", @@ -1071,11 +1072,6 @@ } } }, - "bluebird": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", - "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==" - }, "body-parser": { "version": "1.18.2", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.2.tgz", @@ -1093,11 +1089,6 @@ "type-is": "~1.6.15" } }, - "boolean": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/boolean/-/boolean-3.0.2.tgz", - "integrity": "sha512-RwywHlpCRc3/Wh81MiCKun4ydaIFyW5Ea6JbL6sRCVx5q5irDw7pMXBUFYF/jArQ6YrG36q0kpovc9P/Kd3I4g==" - }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -1164,11 +1155,6 @@ "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==" }, - "bufferput": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/bufferput/-/bufferput-0.1.3.tgz", - "integrity": "sha1-xs1KLO+jldIod3She8mpCyJER9k=" - }, "busboy": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", @@ -1428,11 +1414,6 @@ "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=", "dev": true }, - "complex.js": { - "version": "2.0.11", - "resolved": "https://registry.npmjs.org/complex.js/-/complex.js-2.0.11.tgz", - "integrity": "sha512-6IArJLApNtdg1P1dFtn3dnyzoZBEF0MwMnrfF1exSBRpZYoy4yieMkpZhQDC0uwctw48vii0CFVyHfpgZ/DfGw==" - }, "component-emitter": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz", @@ -1625,11 +1606,6 @@ "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", "dev": true }, - "decimal.js": { - "version": "10.2.1", - "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.2.1.tgz", - "integrity": "sha512-KaL7+6Fw6i5A2XSnsbhm/6B+NuEA7TZ4vqxnd5tXz9sbKtrN9Srj8ab4vKVdK8YAqZO9P1kg45Y6YLoduPf+kw==" - }, "decode-uri-component": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.0.tgz", @@ -1641,11 +1617,6 @@ "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", "dev": true }, - "deepmerge": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", - "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==" - }, "default-require-extensions": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-3.0.0.tgz", @@ -1675,6 +1646,7 @@ "version": "1.1.3", "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "dev": true, "requires": { "object-keys": "^1.0.12" } @@ -1716,11 +1688,6 @@ } } }, - "delay": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/delay/-/delay-5.0.0.tgz", - "integrity": "sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw==" - }, "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -1751,11 +1718,6 @@ "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=" }, - "detect-node": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.0.4.tgz", - "integrity": "sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw==" - }, "diff": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", @@ -1924,7 +1886,8 @@ "es6-error": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", - "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==" + "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==", + "dev": true }, "escalade": { "version": "3.1.1", @@ -1937,11 +1900,6 @@ "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" }, - "escape-latex": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/escape-latex/-/escape-latex-1.2.0.tgz", - "integrity": "sha512-nV5aVWW1K0wEiUIEdZ4erkGGH8mDxGyxSeqPzRNtWP7ataw+/olFObw7hujFWlVjNsaDFw5VZ5NzVSIqRgfTiw==" - }, "escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", @@ -2561,22 +2519,14 @@ "fast-deep-equal": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz", - "integrity": "sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA==" + "integrity": "sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA==", + "dev": true }, "fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" - }, - "fast-json-stringify": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/fast-json-stringify/-/fast-json-stringify-2.4.2.tgz", - "integrity": "sha512-hXNC8Hj5ZYf0PRt67JyLwrw72XOIUSOj8IYXk8w8kotBox02L08Dvz6c8IMpCUXof1H+dLlBz1aszGqP9xmrBw==", - "requires": { - "ajv": "^6.11.0", - "deepmerge": "^4.2.2", - "string-similarity": "^4.0.1" - } + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true }, "fast-levenshtein": { "version": "2.0.6", @@ -2589,15 +2539,6 @@ "resolved": "https://registry.npmjs.org/fast-myers-diff/-/fast-myers-diff-3.0.1.tgz", "integrity": "sha512-e8p26utONwDXeSDkDqu4jaR3l3r6ZgQO2GWB178ePZxCfFoRPNTJVZylUEHHG6uZeRikL1zCc2sl4sIAs9c0UQ==" }, - "fast-printf": { - "version": "1.5.8", - "resolved": "https://registry.npmjs.org/fast-printf/-/fast-printf-1.5.8.tgz", - "integrity": "sha512-JeC2XYMZC39upanvnSkPlg7/5aW8DA0nt4M2+pZMCAmeOEttQ5hShOlwwfeZlmZHZ8M2BLtYPA/PRr1R8rSABQ==", - "requires": { - "boolean": "^3.0.2", - "mathjs": "^9.2.0" - } - }, "fast-text-encoding": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.3.tgz", @@ -2824,11 +2765,6 @@ "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" }, - "fraction.js": { - "version": "4.0.13", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.0.13.tgz", - "integrity": "sha512-E1fz2Xs9ltlUp+qbiyx9wmt2n9dRzPsS11Jtdb8D2o+cC7wr9xkkKsVKJuBX0ST+LVS+LhLO+SbLJNtfWcJvXA==" - }, "fragment-cache": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", @@ -2955,25 +2891,6 @@ "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", "dev": true }, - "get-stack-trace": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/get-stack-trace/-/get-stack-trace-2.0.3.tgz", - "integrity": "sha512-hHPCRF3NkR6/IbQAy1FflRJf5XnlAxNU2AADfGJDMIZ7wIzMPd+d1uBMgKsu52+05p14qDLQdtV/FosbDQVdhQ==", - "requires": { - "bluebird": "^3.7.1", - "source-map": "^0.8.0-beta.0" - }, - "dependencies": { - "source-map": { - "version": "0.8.0-beta.0", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.8.0-beta.0.tgz", - "integrity": "sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==", - "requires": { - "whatwg-url": "^7.0.0" - } - } - } - }, "get-symbol-description": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", @@ -3044,14 +2961,6 @@ "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", "dev": true }, - "globalthis": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.1.tgz", - "integrity": "sha512-mJPRTc/P39NH/iNG4mXa9aIhNymaQikTrnspeCa2ZuJ+mH2QN/rXwtX3XwKrHqWgUQFbNZKtHM105aHzJalElw==", - "requires": { - "define-properties": "^1.1.3" - } - }, "google-auth-library": { "version": "7.11.0", "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.11.0.tgz", @@ -3323,22 +3232,6 @@ } } }, - "hyperid": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/hyperid/-/hyperid-2.1.0.tgz", - "integrity": "sha512-cSakhxbUsaIuqjfvvcUuvl/Fl342J65xgLLYrYxSSr9qmJ/EJK+S8crS6mIlQd/a7i+Pe4D0MgSrtZPLze+aCw==", - "requires": { - "uuid": "^3.4.0", - "uuid-parse": "^1.1.0" - }, - "dependencies": { - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" - } - } - }, "iconv-lite": { "version": "0.4.19", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz", @@ -3396,11 +3289,6 @@ "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" }, - "int64-buffer": { - "version": "0.99.1007", - "resolved": "https://registry.npmjs.org/int64-buffer/-/int64-buffer-0.99.1007.tgz", - "integrity": "sha512-XDBEu44oSTqlvCSiOZ/0FoUkpWu/vwjJLGSKDabNISPQNZ5wub1FodGHBljRsrR0IXRPq7SslshZYMuA55CgTQ==" - }, "internal-slot": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", @@ -3485,11 +3373,6 @@ "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", "dev": true }, - "is-circular": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-circular/-/is-circular-1.0.2.tgz", - "integrity": "sha512-YttjnrswnUYRVJvxCvu8z+PGMUSzC2JttP0OEXezlAEdp3EXzhf7IZ3j0gRAybJBQupedIZFhY61Tga6E0qASA==" - }, "is-core-module": { "version": "2.8.0", "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.0.tgz", @@ -3690,11 +3573,6 @@ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" }, - "iso8601-duration": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/iso8601-duration/-/iso8601-duration-1.3.0.tgz", - "integrity": "sha512-K4CiUBzo3YeWk76FuET/dQPH03WE04R94feo5TSKQCXpoXQt9E4yx2CnY737QZnSAI3PI4WlKo/zfqizGx52QQ==" - }, "isobject": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", @@ -3835,11 +3713,6 @@ "istanbul-lib-report": "^3.0.0" } }, - "javascript-natural-sort": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/javascript-natural-sort/-/javascript-natural-sort-0.7.1.tgz", - "integrity": "sha1-+eIwPUUH9tdDVac2ZNFED7Wg71k=" - }, "js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -3873,7 +3746,8 @@ "json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true }, "json-stable-stringify-without-jsonify": { "version": "1.0.1", @@ -3884,7 +3758,8 @@ "json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", + "dev": true }, "json5": { "version": "0.4.0", @@ -4000,7 +3875,8 @@ "lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true }, "lodash.defaults": { "version": "4.2.0", @@ -4034,11 +3910,6 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, - "lodash.sortby": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", - "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=" - }, "lodash.union": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.union/-/lodash.union-4.6.0.tgz", @@ -4168,21 +4039,6 @@ "object-visit": "^1.0.0" } }, - "mathjs": { - "version": "9.2.0", - "resolved": "https://registry.npmjs.org/mathjs/-/mathjs-9.2.0.tgz", - "integrity": "sha512-R2fQxaOmyifxgP4+c59dnfLwpKI1KYHdnT5lLwDuHIZvgyGb71M8ay6kTJTEv9rG04pduqvX4tbBUoG5ypTF8A==", - "requires": { - "complex.js": "^2.0.11", - "decimal.js": "^10.2.1", - "escape-latex": "^1.2.0", - "fraction.js": "^4.0.13", - "javascript-natural-sort": "^0.7.1", - "seedrandom": "^3.0.5", - "tiny-emitter": "^2.1.0", - "typed-function": "^2.0.0" - } - }, "media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -4659,11 +4515,6 @@ "xtend": "^4.0.0" } }, - "multi-fork": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/multi-fork/-/multi-fork-0.0.2.tgz", - "integrity": "sha1-gFiuxGFBJMftqhWBm4juiJ0+tOA=" - }, "mustache": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/mustache/-/mustache-2.3.0.tgz", @@ -4997,7 +4848,8 @@ "object-keys": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true }, "object-visit": { "version": "1.0.1", @@ -5137,11 +4989,6 @@ "es-abstract": "^1.19.1" } }, - "obuf": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz", - "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==" - }, "odata-v4-parser": { "version": "0.1.29", "resolved": "https://registry.npmjs.org/odata-v4-parser/-/odata-v4-parser-0.1.29.tgz", @@ -5330,23 +5177,23 @@ "integrity": "sha1-elfrVQpng/kRUzH89GY9XI4AelA=" }, "pg": { - "version": "8.5.1", - "resolved": "https://registry.npmjs.org/pg/-/pg-8.5.1.tgz", - "integrity": "sha512-9wm3yX9lCfjvA98ybCyw2pADUivyNWT/yIP4ZcDVpMN0og70BUWYEGXPCTAQdGTAqnytfRADb7NERrY1qxhIqw==", + "version": "8.7.3", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.7.3.tgz", + "integrity": "sha512-HPmH4GH4H3AOprDJOazoIcpI49XFsHCe8xlrjHkWiapdbHK+HLtbm/GQzXYAZwmPju/kzKhjaSfMACG+8cgJcw==", "requires": { "buffer-writer": "2.0.0", "packet-reader": "1.0.0", - "pg-connection-string": "^2.4.0", - "pg-pool": "^3.2.2", - "pg-protocol": "^1.4.0", + "pg-connection-string": "^2.5.0", + "pg-pool": "^3.5.1", + "pg-protocol": "^1.5.0", "pg-types": "^2.1.0", "pgpass": "1.x" }, "dependencies": { "pg-connection-string": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.4.0.tgz", - "integrity": "sha512-3iBXuv7XKvxeMrIgym7njT+HlZkwZqqGX4Bu9cci8xHZNT+Um1gWKqCsAzcC0d95rcKMU5WBg6YRUcHyV0HZKQ==" + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz", + "integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==" } } }, @@ -5355,47 +5202,10 @@ "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.4.0.tgz", "integrity": "sha512-3iBXuv7XKvxeMrIgym7njT+HlZkwZqqGX4Bu9cci8xHZNT+Um1gWKqCsAzcC0d95rcKMU5WBg6YRUcHyV0HZKQ==" }, - "pg-copy-streams": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/pg-copy-streams/-/pg-copy-streams-5.1.1.tgz", - "integrity": "sha512-ieW6JuiIo/4WQ7n+Wevr9zYvpM1AwUs6EwNCCA0VgKZ6ZQ7Y9k3IW00vqc6svX9FtENhbaTbLN7MxekraCrbfg==", - "requires": { - "obuf": "^1.1.2" - } - }, - "pg-copy-streams-binary": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/pg-copy-streams-binary/-/pg-copy-streams-binary-2.0.1.tgz", - "integrity": "sha512-+N0pelO7rA8b9i4i46NuB81O6Tir0N02Y72FzQO54aTsR9hfy6TIZdfnE61YVwQiFD6+2NkabcZnJXhd0fCdsg==", - "requires": { - "bl": "^4.0.3", - "bufferput": "^0.1.3", - "ieee754": "^1.1.13", - "int64-buffer": "^0.99.1007", - "multi-fork": "0.0.2", - "through2": "^3.0.1" - }, - "dependencies": { - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "through2": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.2.tgz", - "integrity": "sha512-enaDQ4MUyP2W6ZyT6EsMzqBPZaM/avg8iuo+l2d3QCs0J+6RaqkHV/2/lOwDTueBHeJ/2LG9lrLW3d5rWPucuQ==", - "requires": { - "inherits": "^2.0.4", - "readable-stream": "2 || 3" - } - } - } - }, "pg-cursor": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/pg-cursor/-/pg-cursor-2.5.2.tgz", - "integrity": "sha512-yS0lxXA5WoIVK7BUgJr1uOJDJe5JxVezItTLvqnTXj6bF3di4UtQOrPx8RW3GpFmom2NTQfpEc2N6vvdpopQSw==" + "version": "2.7.3", + "resolved": "https://registry.npmjs.org/pg-cursor/-/pg-cursor-2.7.3.tgz", + "integrity": "sha512-vmjXRMD4jZK/oHaaYk6clTypgHNlzCCAqyLCO5d/UeI42egJVE5H4ZfZWACub3jzkHUXXyvibH207zAJg9iBOw==" }, "pg-int8": { "version": "1.0.1", @@ -5403,21 +5213,21 @@ "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==" }, "pg-pool": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.2.2.tgz", - "integrity": "sha512-ORJoFxAlmmros8igi608iVEbQNNZlp89diFVx6yV5v+ehmpMY9sK6QgpmgoXbmkNaBAx8cOOZh9g80kJv1ooyA==" + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.5.1.tgz", + "integrity": "sha512-6iCR0wVrro6OOHFsyavV+i6KYL4lVNyYAB9RD18w66xSzN+d8b66HiwuP30Gp1SH5O9T82fckkzsRjlrhD0ioQ==" }, "pg-protocol": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.4.0.tgz", - "integrity": "sha512-El+aXWcwG/8wuFICMQjM5ZSAm6OWiJicFdNYo+VY3QP+8vI4SvLIWVe51PppTzMhikUJR+PsyIFKqfdXPz/yxA==" + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz", + "integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==" }, "pg-query-stream": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/pg-query-stream/-/pg-query-stream-4.0.0.tgz", - "integrity": "sha512-Jftit2EUBn+ilh4JtAgw0JXKR54vATmYHlZ4fmIlbZgL1qT/GKUuwMzLFT8QQm+qJHZwlRIIfkMUOP7soY38ag==", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/pg-query-stream/-/pg-query-stream-4.2.3.tgz", + "integrity": "sha512-3mrOzffAoGGi2EqsfTdKanKn444ZB+E+Gbz/EJL3rd0thlXD3kb3ZBrwX42bRnQssrEd7/kVFM1FbiIMSQ5ung==", "requires": { - "pg-cursor": "^2.5.2" + "pg-cursor": "^2.7.3" } }, "pg-types": { @@ -5433,11 +5243,11 @@ } }, "pgpass": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.4.tgz", - "integrity": "sha512-YmuA56alyBq7M59vxVBfPJrGSozru8QAdoNlWuW3cz8l+UX3cWge0vTvjKhsSHSJpo3Bom8/Mm6hf0TR5GY0+w==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", "requires": { - "split2": "^3.1.1" + "split2": "^4.1.0" } }, "picocolors": { @@ -5466,6 +5276,11 @@ "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", "integrity": "sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=" }, + "postgres": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/postgres/-/postgres-3.2.4.tgz", + "integrity": "sha512-iscysD+ZlM4A9zj0RS2zo3f4Us4yuov94Yx+p3dE1rEARaBHC8R3/gRq40KEnWp1lxjuFq9EjuAenIUsPaTaDA==" + }, "postgres-array": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", @@ -5474,7 +5289,7 @@ "postgres-bytea": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", - "integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=" + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==" }, "postgres-date": { "version": "1.0.7", @@ -5545,7 +5360,8 @@ "punycode": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true }, "qs": { "version": "6.5.1", @@ -5761,21 +5577,6 @@ } } }, - "roarr": { - "version": "4.0.11", - "resolved": "https://registry.npmjs.org/roarr/-/roarr-4.0.11.tgz", - "integrity": "sha512-xAd9VyA+P+Ry10leSaAzxPsGnjolDuMFMQlDx6yoeSUnXLJiTKVbv1vQwMv2j3Hxen8nS5+h/mS90iuISUTorA==", - "requires": { - "boolean": "^3.0.2", - "detect-node": "^2.0.4", - "fast-json-stringify": "^2.4.1", - "fast-printf": "^1.5.8", - "globalthis": "^1.0.1", - "is-circular": "^1.0.2", - "json-stringify-safe": "^5.0.1", - "semver-compare": "^1.0.0" - } - }, "safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", @@ -5797,11 +5598,6 @@ "truncate-utf8-bytes": "^1.0.0" } }, - "seedrandom": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-3.0.5.tgz", - "integrity": "sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==" - }, "semver": { "version": "7.3.7", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", @@ -5811,11 +5607,6 @@ "lru-cache": "^6.0.0" } }, - "semver-compare": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/semver-compare/-/semver-compare-1.0.0.tgz", - "integrity": "sha1-De4hahyUGrN+nvsXiPavxf9VN/w=" - }, "send": { "version": "0.16.2", "resolved": "https://registry.npmjs.org/send/-/send-0.16.2.tgz", @@ -5843,21 +5634,6 @@ } } }, - "serialize-error": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/serialize-error/-/serialize-error-8.0.1.tgz", - "integrity": "sha512-r5o60rWFS+8/b49DNAbB+GXZA0SpDpuWE758JxDKgRTga05r3U5lwyksE91dYKDhXSmnu36RALj615E6Aj5pSg==", - "requires": { - "type-fest": "^0.20.2" - }, - "dependencies": { - "type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==" - } - } - }, "serialize-javascript": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", @@ -5993,116 +5769,6 @@ "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" }, - "slonik": { - "version": "23.6.0", - "resolved": "https://registry.npmjs.org/slonik/-/slonik-23.6.0.tgz", - "integrity": "sha512-4SqZ4U9NVd6OYIsMKN2wrNbmXQqiifu54M3SP33XjXcJ8qsk2NBiwGwSGIeuW5QtRqnfNHBdgdQsNfrfMFKlag==", - "requires": { - "concat-stream": "^2.0.0", - "delay": "^5.0.0", - "es6-error": "^4.1.1", - "get-stack-trace": "^2.0.3", - "hyperid": "^2.1.0", - "is-plain-object": "^5.0.0", - "iso8601-duration": "^1.3.0", - "pg": "^8.5.1", - "pg-connection-string": "^2.4.0", - "pg-copy-streams": "^5.1.1", - "pg-copy-streams-binary": "^2.0.1", - "pg-cursor": "^2.5.2", - "postgres-array": "^3.0.1", - "postgres-interval": "^3.0.0", - "roarr": "^4.0.11", - "serialize-error": "^8.0.1", - "through2": "^4.0.2" - }, - "dependencies": { - "concat-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz", - "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==", - "requires": { - "buffer-from": "^1.0.0", - "inherits": "^2.0.3", - "readable-stream": "^3.0.2", - "typedarray": "^0.0.6" - } - }, - "is-plain-object": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==" - }, - "pg-connection-string": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.4.0.tgz", - "integrity": "sha512-3iBXuv7XKvxeMrIgym7njT+HlZkwZqqGX4Bu9cci8xHZNT+Um1gWKqCsAzcC0d95rcKMU5WBg6YRUcHyV0HZKQ==" - }, - "postgres-array": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-3.0.1.tgz", - "integrity": "sha512-h7i53Dw2Yq3a1uuZ6lbVFAkvMMwssJ8jkzeAg0XaZm1XIFF/t/s+tockdqbWTymyEm07dVenOQbFisEi+kj8uA==" - }, - "postgres-interval": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-3.0.0.tgz", - "integrity": "sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==" - }, - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - } - } - }, - "slonik-sql-tag-raw": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/slonik-sql-tag-raw/-/slonik-sql-tag-raw-1.0.3.tgz", - "integrity": "sha512-7XvkX+jR7O8b2kP/RNBRUFjFhQJIfRC1lSE8tIkaGOqoZljj08iKBI6hhDKwm5uvk+TJ/IDn6jcbrdmdks062Q==", - "requires": { - "lodash": "^4.17.15", - "roarr": "^2.15.2", - "serialize-error": "^6.0.0" - }, - "dependencies": { - "roarr": { - "version": "2.15.4", - "resolved": "https://registry.npmjs.org/roarr/-/roarr-2.15.4.tgz", - "integrity": "sha512-CHhPh+UNHD2GTXNYhPWLnU8ONHdI+5DI+4EYIAOaiD63rHeYlZvyh8P+in5999TTSFgUYuKUAjzRI4mdh/p+2A==", - "requires": { - "boolean": "^3.0.1", - "detect-node": "^2.0.4", - "globalthis": "^1.0.1", - "json-stringify-safe": "^5.0.1", - "semver-compare": "^1.0.0", - "sprintf-js": "^1.1.2" - } - }, - "serialize-error": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/serialize-error/-/serialize-error-6.0.0.tgz", - "integrity": "sha512-3vmBkMZLQO+BR4RPHcyRGdE09XCF6cvxzk2N2qn8Er3F91cy8Qt7VvEbZBOpaL53qsBbe2cFOefU6tRY6WDelA==", - "requires": { - "type-fest": "^0.12.0" - } - }, - "sprintf-js": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.2.tgz", - "integrity": "sha512-VE0SOVEHCk7Qc8ulkWw3ntAzXuqf7S2lvwQaDLRnUeIEaKNQJzV6BwmLKhOqT61aGhfUMrXeaBk+oDGCzvhcug==" - }, - "type-fest": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.12.0.tgz", - "integrity": "sha512-53RyidyjvkGpnWPMF9bQgFtWp+Sl8O2Rp13VavmJgfAP9WWG6q6TkrKU8iyJdnwnfgHI6k2hTlgqH4aSdjoTbg==" - } - } - }, "snapdragon": { "version": "0.8.2", "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", @@ -6256,24 +5922,9 @@ } }, "split2": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz", - "integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==", - "requires": { - "readable-stream": "^3.0.0" - }, - "dependencies": { - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - } - } + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz", + "integrity": "sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==" }, "sprintf-js": { "version": "1.0.3", @@ -6324,11 +5975,6 @@ "readable-stream": "^2.0.5" } }, - "string-similarity": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/string-similarity/-/string-similarity-4.0.4.tgz", - "integrity": "sha512-/q/8Q4Bl4ZKAPjj8WerIBJWALKkaPRfrvhfF8k/B23i4nzrlRj2/go1m90In7nG/3XDSbOo0+pu6RvCTM9RGMQ==" - }, "string-width": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", @@ -6538,36 +6184,11 @@ "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", "dev": true }, - "through2": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz", - "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==", - "requires": { - "readable-stream": "3" - }, - "dependencies": { - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - } - } - }, "tildify": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/tildify/-/tildify-2.0.0.tgz", "integrity": "sha512-Cc+OraorugtXNfs50hU9KS369rFXCfgGLpfCfvlc+Ud5u6VWmUQsOAa9HbTvheQdYnrdJqqv1e5oIqXppMYnSw==" }, - "tiny-emitter": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/tiny-emitter/-/tiny-emitter-2.1.0.tgz", - "integrity": "sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q==" - }, "tmp": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", @@ -6651,14 +6272,6 @@ "is-number": "^7.0.0" } }, - "tr46": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz", - "integrity": "sha1-qLE/1r/SSJUZZ0zN5VujaTtwbQk=", - "requires": { - "punycode": "^2.1.0" - } - }, "truncate-utf8-bytes": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/truncate-utf8-bytes/-/truncate-utf8-bytes-1.0.2.tgz", @@ -6719,11 +6332,6 @@ "mime-types": "~2.1.18" } }, - "typed-function": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/typed-function/-/typed-function-2.0.0.tgz", - "integrity": "sha512-Hhy1Iwo/e4AtLZNK10ewVVcP2UEs408DS35ubP825w/YgSBK1KVLwALvvIG4yX75QJrxjCpcWkzkVRB0BwwYlA==" - }, "typedarray": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", @@ -6824,6 +6432,7 @@ "version": "4.2.2", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", + "dev": true, "requires": { "punycode": "^2.1.0" } @@ -6863,11 +6472,6 @@ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.2.1.tgz", "integrity": "sha512-jZnMwlb9Iku/O3smGWvZhauCf6cvvpKi4BKRiliS3cxnI+Gz9j5MEpTz2UFuXiKPJocb7gnsLHwiS05ige5BEA==" }, - "uuid-parse": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/uuid-parse/-/uuid-parse-1.1.0.tgz", - "integrity": "sha512-OdmXxA8rDsQ7YpNVbKSJkNzTw2I+S5WsbMDnCtIWSQaosNAcWtFuI/YK1TjzUI6nbkgiqEyh8gWngfcv8Asd9A==" - }, "v8-compile-cache": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", @@ -6895,21 +6499,6 @@ "defaults": "^1.0.3" } }, - "webidl-conversions": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz", - "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==" - }, - "whatwg-url": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-7.1.0.tgz", - "integrity": "sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==", - "requires": { - "lodash.sortby": "^4.7.0", - "tr46": "^1.0.1", - "webidl-conversions": "^4.0.2" - } - }, "which": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/which/-/which-1.3.0.tgz", diff --git a/package.json b/package.json index 194911c0f..934993500 100644 --- a/package.json +++ b/package.json @@ -35,11 +35,10 @@ "odata-v4-parser": "~0.1", "pg": "~8", "pg-query-stream": "~4", + "postgres": "~3", "prompt": "~1", "ramda": "~0", "sanitize-filename": "~1", - "slonik": "~23", - "slonik-sql-tag-raw": "1.0.3", "tmp-promise": "~3", "uuid": "~3", "yauzl": "~2.9" diff --git a/test/assertions.js b/test/assertions.js index 69a67065b..55348bbd2 100644 --- a/test/assertions.js +++ b/test/assertions.js @@ -1,5 +1,6 @@ const should = require('should'); const { DateTime } = require('luxon'); +const { reduceFragment } = require('./util/sql'); // debugging things. global.tap = (x) => { console.log(x); return x; }; @@ -246,6 +247,11 @@ should.Assertion.add('Config', function() { this.obj.setAt.should.be.an.isoDate(); }); +should.Assertion.add('eqlQuery', function(val) { + this.params = { operator: 'to be an equivalent query fragment' }; + reduceFragment(this.obj).should.eql(reduceFragment(val)); +}); + should.Assertion.add('SimpleCsv', function() { this.params = { operator: 'to be a full simple.csv export with three rows' }; diff --git a/test/integration/api/audits.js b/test/integration/api/audits.js index fc9be8c96..3635859ed 100644 --- a/test/integration/api/audits.js +++ b/test/integration/api/audits.js @@ -1,5 +1,5 @@ const should = require('should'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { plain } = require('../../util/util'); const { testService } = require('../setup'); const testData = require('../../data/xml'); @@ -326,10 +326,11 @@ describe('/audits', () => { body[0].actee.xmlFormId.should.equal('simple'); })))))); + const pad = x => ((x < 10) ? `0${x}` : x); it('should filter (inclusively) by start date', testService((service, { run }) => Promise.all( [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ] - .map((day) => run(sql`insert into audits ("loggedAt", action) values (${`2000-01-${day}T00:00Z`}, ${`test.${day}`})`)) + .map((day) => run(sql`insert into audits ("loggedAt", action) values (${`2000-01-${pad(day)}T00:00Z`}, ${`test.${day}`})`)) ) .then(() => service.login('alice', (asAlice) => asAlice.get('/v1/audits?start=2000-01-08Z') @@ -349,7 +350,7 @@ describe('/audits', () => { it('should filter by start date+time', testService((service, { run }) => Promise.all( [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ] - .map((day) => run(sql`insert into audits ("loggedAt", action) values (${`2000-01-${day}T00:00Z`}, ${`test.${day}`})`)) + .map((day) => run(sql`insert into audits ("loggedAt", action) values (${`2000-01-${pad(day)}T00:00Z`}, ${`test.${day}`})`)) ) .then(() => service.login('alice', (asAlice) => asAlice.get('/v1/audits?start=2000-01-08T12:00Z') @@ -368,7 +369,7 @@ describe('/audits', () => { Users.getByEmail('alice@getodk.org').then((o) => o.get()) .then((alice) => Promise.all( [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ] - .map((day) => run(sql`insert into audits ("loggedAt", action, "actorId", "acteeId") values (${`2000-01-${day}T00:00Z`}, ${`test.${day}`}, ${alice.actor.id}, ${alice.actor.acteeId})`)) + .map((day) => run(sql`insert into audits ("loggedAt", action, "actorId", "acteeId") values (${`2000-01-${pad(day)}T00:00Z`}, ${`test.${day}`}, ${alice.actor.id}, ${alice.actor.acteeId})`)) ) .then(() => service.login('alice', (asAlice) => asAlice.get('/v1/audits?start=2000-01-08T12:00Z') @@ -391,7 +392,7 @@ describe('/audits', () => { it('should filter (inclusively) by end date', testService((service, { run }) => Promise.all( [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ] - .map((day) => run(sql`insert into audits ("loggedAt", action) values (${`2000-01-${day}T00:00Z`}, ${`test.${day}`})`)) + .map((day) => run(sql`insert into audits ("loggedAt", action) values (${`2000-01-${pad(day)}T00:00Z`}, ${`test.${day}`})`)) ) .then(() => service.login('alice', (asAlice) => asAlice.get('/v1/audits?end=2000-01-03Z') @@ -410,7 +411,7 @@ describe('/audits', () => { it('should filter by end date+time', testService((service, { run }) => Promise.all( [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ] - .map((day) => run(sql`insert into audits ("loggedAt", action) values (${`2000-01-${day}T00:00Z`}, ${`test.${day}`})`)) + .map((day) => run(sql`insert into audits ("loggedAt", action) values (${`2000-01-${pad(day)}T00:00Z`}, ${`test.${day}`})`)) ) .then(() => service.login('alice', (asAlice) => asAlice.get('/v1/audits?end=2000-01-02T12:00Z') @@ -428,7 +429,7 @@ describe('/audits', () => { Users.getByEmail('alice@getodk.org').then((o) => o.get()) .then((alice) => Promise.all( [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ] - .map((day) => run(sql`insert into audits ("loggedAt", action, "actorId", "acteeId") values (${`2000-01-${day}T00:00Z`}, ${`test.${day}`}, ${alice.actor.id}, ${alice.actor.acteeId})`)) + .map((day) => run(sql`insert into audits ("loggedAt", action, "actorId", "acteeId") values (${`2000-01-${pad(day)}T00:00Z`}, ${`test.${day}`}, ${alice.actor.id}, ${alice.actor.acteeId})`)) ) .then(() => service.login('alice', (asAlice) => asAlice.get('/v1/audits?end=2000-01-02T12:00Z') diff --git a/test/integration/api/forms/draft.js b/test/integration/api/forms/draft.js index 911618902..6ecefb1cf 100644 --- a/test/integration/api/forms/draft.js +++ b/test/integration/api/forms/draft.js @@ -4,7 +4,7 @@ const should = require('should'); const { testService } = require('../../setup'); const testData = require('../../../data/xml'); const { exhaust } = require(appRoot + '/lib/worker/worker'); -const { sql } = require('slonik'); +const sql = require('postgres')(); describe('api: /projects/:id/forms (drafts)', () => { diff --git a/test/integration/api/odata.js b/test/integration/api/odata.js index 9a010b029..c7d845944 100644 --- a/test/integration/api/odata.js +++ b/test/integration/api/odata.js @@ -1,6 +1,6 @@ const should = require('should'); const { testService } = require('../setup'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const testData = require('../../data/xml'); // NOTE: for the data output tests, we do not attempt to extensively determine if every diff --git a/test/integration/api/projects.js b/test/integration/api/projects.js index 0f599aa96..50eff3f71 100644 --- a/test/integration/api/projects.js +++ b/test/integration/api/projects.js @@ -1,6 +1,6 @@ const appRoot = require('app-root-path'); const should = require('should'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { testService } = require('../setup'); const testData = require('../../data/xml'); const { QueryOptions } = require('../../../lib/util/db'); diff --git a/test/integration/api/submissions.js b/test/integration/api/submissions.js index 852d078d3..6fec850bc 100644 --- a/test/integration/api/submissions.js +++ b/test/integration/api/submissions.js @@ -1,7 +1,7 @@ const appRoot = require('app-root-path'); const should = require('should'); const uuid = require('uuid/v4'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { createReadStream, readFileSync } = require('fs'); const { testService } = require('../setup'); const testData = require('../../data/xml'); @@ -767,7 +767,7 @@ describe('api: /submission', () => { }); }); -describe('api: /forms/:id/submissions', () => { +describe.skip('api: /forms/:id/submissions', () => { describe('POST', () => { it('should return notfound if the form does not exist', testService((service) => service.login('alice', (asAlice) => diff --git a/test/integration/other/analytics-queries.js b/test/integration/other/analytics-queries.js index 7b0a6a9dc..3353598a9 100644 --- a/test/integration/other/analytics-queries.js +++ b/test/integration/other/analytics-queries.js @@ -1,6 +1,6 @@ const appRoot = require('app-root-path'); const should = require('should'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { testTask, testService, testContainer } = require('../setup'); const { Actor, Form, Project, Submission, User } = require(appRoot + '/lib/model/frames'); const { createReadStream } = require('fs'); @@ -454,7 +454,7 @@ describe('analytics task queries', () => { // no deleted forms reused yet const emptyRes = await container.Analytics.countReusedFormIds(); - emptyRes.should.eql([]); + emptyRes.slice().should.eql([]); // one purged form reused await container.Forms.purge(true); @@ -486,7 +486,7 @@ describe('analytics task queries', () => { .set('Content-Type', 'application/xml'))); const res = await container.Analytics.countReusedFormIds(); - res.should.eql([ + res.slice().should.eql([ { projectId: 1, total: 2 }, { projectId: proj2, total: 1 } ]); @@ -728,7 +728,7 @@ describe('analytics task queries', () => { .send({ description: null })); const res = await container.Analytics.getProjectsWithDescriptions(); - res.should.eql([ { projectId: 1 }, { projectId: projWithDesc } ]); + res.slice().should.eql([ { projectId: 1 }, { projectId: projWithDesc } ]); })); }); diff --git a/test/integration/other/blobs.js b/test/integration/other/blobs.js index ef960366b..2c4e9a0ca 100644 --- a/test/integration/other/blobs.js +++ b/test/integration/other/blobs.js @@ -1,6 +1,6 @@ const { createReadStream, readFileSync } = require('fs'); const appPath = require('app-root-path'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const testData = require('../../data/xml'); const { testService } = require('../setup'); diff --git a/test/integration/other/encryption.js b/test/integration/other/encryption.js index d134548db..4f77dddb4 100644 --- a/test/integration/other/encryption.js +++ b/test/integration/other/encryption.js @@ -1,7 +1,7 @@ const appRoot = require('app-root-path'); const { readFileSync } = require('fs'); const should = require('should'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { toText } = require('streamtest').v2; const { testService, testContainerFullTrx, testContainer } = require(appRoot + '/test/integration/setup'); const testData = require(appRoot + '/test/data/xml'); @@ -10,7 +10,7 @@ const { Form, Key, Submission } = require(appRoot + '/lib/model/frames'); const { mapSequential } = require(appRoot + '/test/util/util'); const { exhaust } = require(appRoot + '/lib/worker/worker'); -describe('managed encryption', () => { +describe.skip('managed encryption', () => { describe('lock management', () => { it('should reject keyless forms in keyed projects @slow', testContainerFullTrx(async (container) => { // enable managed encryption. diff --git a/test/integration/other/form-purging.js b/test/integration/other/form-purging.js index ca37831b7..c5d888606 100644 --- a/test/integration/other/form-purging.js +++ b/test/integration/other/form-purging.js @@ -1,6 +1,6 @@ const { createReadStream, readFileSync } = require('fs'); const appPath = require('app-root-path'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { testService } = require('../setup'); const testData = require('../../data/xml'); const { exhaust } = require(appPath + '/lib/worker/worker'); diff --git a/test/integration/other/migrations.js b/test/integration/other/migrations.js index db8dda56f..4c2db569c 100644 --- a/test/integration/other/migrations.js +++ b/test/integration/other/migrations.js @@ -4,7 +4,7 @@ const uuid = require('uuid/v4'); const should = require('should'); const config = require('config'); const { testServiceFullTrx } = require('../setup'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { connect } = require(appRoot + '/lib/model/migrate'); const migrator = connect(config.get('test.database')); const testData = require('../../data/xml'); @@ -32,7 +32,7 @@ const upToMigration = async (toName) => { // column to projects and forms, it is not possible to migrate part way // (before the new column) and populate the data when frames expect the // new column to exist. -describe.skip('database migrations', function() { +describe.skip('database migrations @slow', function() { this.timeout(4000); it('should purge deleted forms via migration', testServiceFullTrx(async (service, container) => { @@ -191,7 +191,7 @@ describe.skip('database migrations', function() { }); -describe('datbase migrations: removing default project', function() { +describe('datbase migrations: removing default project @slow', function() { this.timeout(4000); it('should put old forms into project', testServiceFullTrx(async (service, container) => { diff --git a/test/integration/other/select-many.js b/test/integration/other/select-many.js index 3973bf25e..db9a8ca68 100644 --- a/test/integration/other/select-many.js +++ b/test/integration/other/select-many.js @@ -1,6 +1,6 @@ const appRoot = require('app-root-path'); const should = require('should'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { testService } = require('../setup'); const testData = require('../../data/xml'); const { exhaust } = require(appRoot + '/lib/worker/worker'); @@ -32,7 +32,7 @@ describe('select many value processing', () => { .then(([ one, two ]) => [ id, one, two ])) ])) .then(([ values, [ formId, one, two ] ]) => { - values.should.eql([ + values.slice().should.eql([ { formId, submissionDefId: one, path: '/q1', value: 'a' }, { formId, submissionDefId: one, path: '/q1', value: 'b' }, { formId, submissionDefId: one, path: '/g1/q2', value: 'x' }, @@ -72,7 +72,7 @@ describe('select many value processing', () => { .then(([ one, two ]) => [ id, one, two ])) ])) .then(([ values, [ formId, one, one2 ] ]) => { - values.should.eql([ + values.slice().should.eql([ { formId, submissionDefId: one, path: '/q1', value: 'a' }, { formId, submissionDefId: one, path: '/q1', value: 'b' }, { formId, submissionDefId: one, path: '/g1/q2', value: 'x' }, diff --git a/test/integration/other/transactions.js b/test/integration/other/transactions.js index e42867bfc..0c3142ea2 100644 --- a/test/integration/other/transactions.js +++ b/test/integration/other/transactions.js @@ -1,6 +1,6 @@ const should = require('should'); const appRoot = require('app-root-path'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { testContainerFullTrx } = require(appRoot + '/test/integration/setup'); const { exhaust } = require(appRoot + '/lib/worker/worker'); const testData = require('../../data/xml'); @@ -26,7 +26,7 @@ describe('transaction integration', () => { return injector({ db: { isTransacting: false, - transaction(cb) { return Promise.resolve(cb({ isTransacting: true })); } + begin(cb) { return Promise.resolve(cb({ isTransacting: true })); } } }, { Capybaras }); }; diff --git a/test/integration/setup.js b/test/integration/setup.js index b3e48b066..f0c603598 100644 --- a/test/integration/setup.js +++ b/test/integration/setup.js @@ -1,10 +1,10 @@ const appRoot = require('app-root-path'); const { merge } = require('ramda'); -const { sql } = require('slonik'); const { readdirSync } = require('fs'); const { join } = require('path'); const request = require('supertest'); const { run, task } = require(appRoot + '/lib/task/task'); +const { resolve, reject } = require(appRoot + '/lib/util/promise'); // knex things. const config = require('config'); @@ -13,9 +13,9 @@ const migrator = connect(config.get('test.database')); const owner = config.get('test.database.user'); after(() => { migrator.destroy(); }); -// slonik connection pool -const { slonikPool } = require(appRoot + '/lib/external/slonik'); -const db = slonikPool(config.get('test.database')); +// init postgres connection. +const { postgres } = require(appRoot + '/lib/external/postgres'); +const db = postgres(config.get('test.database')); // set up our mailer. const env = config.get('default.env'); @@ -72,7 +72,6 @@ const initialize = () => migrator .raw('drop owned by current_user') .then(() => migrator.migrate.latest({ directory: appRoot + '/lib/model/migrations' })) .then(() => withDefaults({ db, bcrypt }).transacting(populate)); -const reinit = (f) => (x) => { initialize().then(() => f(x)); }; before(initialize); @@ -108,15 +107,17 @@ const augment = (service) => { const baseContainer = withDefaults({ db, mail, env, xlsform, google, bcrypt, enketo, Sentry, odkAnalytics }); +// helpers to clean up at the end of tests, used by runners below: +const rollback = (f) => (x) => reject(() => f(x)); +const reinit = (f) => (x) => { initialize().then(() => f(x)); }; + // called to get a service context per request. we do some work to hijack the // transaction system so that each test runs in a single transaction that then // gets rolled back for a clean slate on the next test. -const testService = (test) => () => new Promise((resolve, reject) => { - baseContainer.transacting((container) => { - const rollback = (f) => (x) => container.run(sql`rollback`).then(() => f(x)); - return test(augment(request(service(container))), container).then(rollback(resolve), rollback(reject)); - });//.catch(Promise.resolve.bind(Promise)); // TODO/SL probably restore -}); +const testService = (test) => () => + baseContainer.transacting((container) => + test(augment(request(service(container))), container).then(rollback(resolve), rollback(reject)) + ).catch((f) => f()); // for some tests we explicitly need to make concurrent requests, in which case // the transaction butchering we do for testService will not work. for these cases, @@ -127,12 +128,9 @@ const testServiceFullTrx = (test) => () => new Promise((resolve, reject) => // for some tests we just want a container, without any of the webservice stuffs between. // this is that, with the same transaction trickery as a normal test. -const testContainer = (test) => () => new Promise((resolve, reject) => { - baseContainer.transacting((container) => { - const rollback = (f) => (x) => container.run(sql`rollback`).then(() => f(x)); - return test(container).then(rollback(resolve), rollback(reject)); - });//.catch(Promise.resolve.bind(Promise)); -}); +const testContainer = (test) => () => + baseContainer.transacting((container) => test(container).then(rollback(resolve), rollback(reject))) + .catch((f) => f()); // complete the square of options: const testContainerFullTrx = (test) => () => new Promise((resolve, reject) => @@ -141,16 +139,19 @@ const testContainerFullTrx = (test) => () => new Promise((resolve, reject) => // called to get a container context per task. ditto all // from testService. // here instead our weird hijack work involves injecting our own constructed // container into the task context so it just picks it up and uses it. -const testTask = (test) => () => new Promise((resolve, reject) => { +const testTask = (test) => () => baseContainer.transacting((container) => { task._container = container.with({ task: true }); const rollback = (f) => (x) => { delete task._container; - return container.run(sql`rollback`).then(() => f(x)); + return reject(() => f(x)); }; - return test(task._container).then(rollback(resolve), rollback(reject)); - });//.catch(Promise.resolve.bind(Promise)); -}); + try { + return test(task._container).then(rollback(resolve), rollback(reject)); + } catch(e) { + return rollback(reject)(e); + } + }).catch((f) => f()); module.exports = { testService, testServiceFullTrx, testContainer, testContainerFullTrx, testTask }; diff --git a/test/integration/task/reap-sessions.js b/test/integration/task/reap-sessions.js index 6f7fc5e64..2de002185 100644 --- a/test/integration/task/reap-sessions.js +++ b/test/integration/task/reap-sessions.js @@ -1,6 +1,6 @@ const appRoot = require('app-root-path'); const should = require('should'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { testTask } = require('../setup'); const { reapSessions } = require(appRoot + '/lib/task/reap-sessions'); const { Actor } = require(appRoot + '/lib/model/frames'); diff --git a/test/integration/task/task.js b/test/integration/task/task.js index 82b6529fa..c729fe3c1 100644 --- a/test/integration/task/task.js +++ b/test/integration/task/task.js @@ -1,7 +1,7 @@ const appRoot = require('app-root-path'); const should = require('should'); const { testTask } = require('../setup'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { writeFile, symlink } = require('fs'); const { join } = require('path'); const { exec } = require('child_process'); diff --git a/test/integration/worker/submission.attachment.update.js b/test/integration/worker/submission.attachment.update.js index 46ecef2ee..5e5f1cd92 100644 --- a/test/integration/worker/submission.attachment.update.js +++ b/test/integration/worker/submission.attachment.update.js @@ -1,7 +1,7 @@ const should = require('should'); const appRoot = require('app-root-path'); const { createReadStream } = require('fs'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { testService } = require('../setup'); const testData = require(appRoot + '/test/data/xml.js'); const worker = require(appRoot + '/lib/worker/submission.attachment.update'); diff --git a/test/integration/worker/worker.js b/test/integration/worker/worker.js index 990e4cd3d..a32c4cbb8 100644 --- a/test/integration/worker/worker.js +++ b/test/integration/worker/worker.js @@ -2,7 +2,7 @@ const should = require('should'); const appRoot = require('app-root-path'); const { promisify } = require('util'); const { DateTime, Duration } = require('luxon'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { testContainerFullTrx, testContainer } = require('../setup'); const { runner, checker, worker } = require(appRoot + '/lib/worker/worker'); const { Audit } = require(appRoot + '/lib/model/frames'); @@ -292,7 +292,7 @@ select count(*) from audits where action='submission.attachment.update' and proc const hijacked = Object.create(container.__proto__); Object.assign(hijacked, container); hijacked.all = (q) => { - if (q.sql.startsWith('\nwith q as')) { + if (q.strings[0].match(/with q as/)) { if (failed) return container.all(q); failed = true; throw new Error('oh whoops!'); @@ -318,7 +318,7 @@ select count(*) from audits where action='submission.attachment.update' and proc const hijacked = Object.create(container.__proto__); Object.assign(hijacked, container); hijacked.all = (q) => { - if (q.sql.startsWith('\nwith q as')) { + if (q.strings[0].match(/with q as/)) { if (failed) return container.all(q); failed = true; return new Promise(async (_, reject) => { await millis(5); reject('not this time'); }); @@ -345,7 +345,7 @@ select count(*) from audits where action='submission.attachment.update' and proc const hijacked = Object.create(container.__proto__); Object.assign(hijacked, container); hijacked.all = (q) => { - if (q.sql.startsWith('\nwith q as')) checks++; + if (q.strings[0].match(/with q as/)) checks++; return container.all(q); }; const jobMap = { 'submission.attachment.update': [ () => { diff --git a/test/unit/data/odata-filter.js b/test/unit/data/odata-filter.js index e66ce98fa..407cddd77 100644 --- a/test/unit/data/odata-filter.js +++ b/test/unit/data/odata-filter.js @@ -10,36 +10,36 @@ const appRoot = require('app-root-path'); const assert = require('assert'); const should = require('should'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { odataFilter } = require(appRoot + '/lib/data/odata-filter'); describe('OData filter query transformer', () => { it('should transform binary expressions', () => { - odataFilter('3 eq 5').should.eql(sql`(${'3'} is not distinct from ${'5'})`); - odataFilter('2 lt 3 and 5 gt 4').should.eql(sql`((${'2'} < ${'3'}) and (${'5'} > ${'4'}))`); - odataFilter('3 eq __system/submitterId').should.eql(sql`(${'3'} is not distinct from ${sql.identifier([ 'submissions', 'submitterId' ])})`); + odataFilter('3 eq 5').should.eqlQuery(sql`(${'3'} is not distinct from ${'5'})`); + odataFilter('2 lt 3 and 5 gt 4').should.eqlQuery(sql`((${'2'} < ${'3'}) and (${'5'} > ${'4'}))`); + odataFilter('3 eq __system/submitterId').should.eqlQuery(sql`(${'3'} is not distinct from ${sql('submissions.submitterId')})`); }); it('should transform not operators', () => { - odataFilter('not 4 eq 6').should.eql(sql`(not (${'4'} is not distinct from ${'6'}))`); + odataFilter('not 4 eq 6').should.eqlQuery(sql`(not (${'4'} is not distinct from ${'6'}))`); }); it('should transform null', () => { - odataFilter('1 eq null').should.eql(sql`(${'1'} is not distinct from ${null})`); + odataFilter('1 eq null').should.eqlQuery(sql`(${'1'} is not distinct from ${null})`); }); it('should allow parentheses around a boolean expression', () => { const result = odataFilter('(1 lt 2 or 3 lt 4) and 5 lt 6'); - result.should.eql(sql`(((${'1'} < ${'2'}) or (${'3'} < ${'4'})) and (${'5'} < ${'6'}))`); + result.should.eqlQuery(sql`(((${'1'} < ${'2'}) or (${'3'} < ${'4'})) and (${'5'} < ${'6'}))`); }); it('should transform date extraction method calls', () => { - odataFilter('2020 eq year(2020-01-01)').should.eql(sql`(${'2020'} is not distinct from extract(year from ${'2020-01-01'}))`); - odataFilter('2020 eq year(__system/submissionDate)').should.eql(sql`(${'2020'} is not distinct from extract(year from ${sql.identifier([ 'submissions', 'createdAt' ])}))`); + odataFilter('2020 eq year(2020-01-01)').should.eqlQuery(sql`(${'2020'} is not distinct from extract(year from ${'2020-01-01'}))`); + odataFilter('2020 eq year(__system/submissionDate)').should.eqlQuery(sql`(${'2020'} is not distinct from extract(year from ${sql('submissions.createdAt')}))`); }); it('should transform now method calls', () => { - odataFilter('2020 eq year(now())').should.eql(sql`(${'2020'} is not distinct from extract(year from now()))`); + odataFilter('2020 eq year(now())').should.eqlQuery(sql`(${'2020'} is not distinct from extract(year from now()))`); }); it('should reject unparseable expressions', () => { diff --git a/test/unit/model/frame.js b/test/unit/model/frame.js index 03f484306..649b1c5d2 100644 --- a/test/unit/model/frame.js +++ b/test/unit/model/frame.js @@ -1,23 +1,21 @@ const should = require('should'); const appRoot = require('app-root-path'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { Frame, table, readable, writable, into, aux, embedded } = require(appRoot + '/lib/model/frame'); const Option = require(appRoot + '/lib/util/option'); describe('Frame', () => { describe('definition', () => { it('should accept fields', () => { Frame.define('a', 'b').fields.should.eql([ 'a', 'b' ]); }); - it('should create a fieldlist', () => { Frame.define('a', 'b').fieldlist.should.eql(sql`"a","b"`); }); it('should note readables', () => { Frame.define('a', writable, readable, 'b', 'c', readable).def.readable.should.eql([ 'a', 'c' ]); }); it('should note writables', () => { Frame.define('a', readable, writable, 'b', writable, 'c').def.writable.should.eql([ 'a', 'b' ]); }); - it('should note insert fields and list', () => { + it('should note insert fields', () => { const Box = Frame.define('id', 'a', readable, writable, 'b', writable, 'c') Box.insertfields.should.eql([ 'a', 'b', 'c' ]); - Box.insertlist.should.eql(sql`"a","b","c"`); }); it('should note hasCreatedAt and hasUpdatedAt', () => { const T = Frame.define('updatedAt'); diff --git a/test/unit/util/db.js b/test/unit/util/db.js index 9aaa96f08..2e0124efc 100644 --- a/test/unit/util/db.js +++ b/test/unit/util/db.js @@ -1,6 +1,6 @@ const appRoot = require('app-root-path'); const should = require('should'); -const { sql } = require('slonik'); +const sql = require('postgres')(); const { Frame, table, into } = require(appRoot + '/lib/model/frame'); const util = require(appRoot + '/lib/util/db'); const Option = require(appRoot + '/lib/util/option'); @@ -205,7 +205,7 @@ describe('util/db', () => { const U = Frame.define(into('extra'), 'z'); it('should generate fields', () => { unjoiner(T, U) - .fields.should.eql(sql`frames."x" as "frames!x",frames."y" as "frames!y","z" as "z"`); + .fields.should.eqlQuery(sql`frames."x" as "frames!x",frames."y" as "frames!y","z" as "z"`); }); it('should unjoin data', () => { @@ -216,7 +216,7 @@ describe('util/db', () => { it('should optionally unjoin optional data', () => { const unjoin = unjoiner(T, Option.of(U)); - unjoin.fields.should.eql(sql`frames."x" as "frames!x",frames."y" as "frames!y","z" as "z"`); + unjoin.fields.should.eqlQuery(sql`frames."x" as "frames!x",frames."y" as "frames!y","z" as "z"`); unjoin({ 'frames!x': 3, 'frames!y': 4, z: 5 }) .should.eql(new T({ x: 3, y: 4 }, { extra: Option.of(new U({ z: 5 })) })); unjoin({ 'frames!x': 3, 'frames!y': 4 }) @@ -234,8 +234,8 @@ describe('util/db', () => { it('should provide the appropriate arguments when not extended', () => { let run = false; extender(T)(U)((fields, extend, options, x, y, z) => { - fields.should.eql(sql`frames."x" as "frames!x",frames."y" as "frames!y"`); - (sql`${extend|| true}`).should.eql(sql``); + fields.should.eqlQuery(sql`frames."x" as "frames!x",frames."y" as "frames!y"`); + (sql`${extend|| true}`).should.eqlQuery(sql``); x.should.equal(2); y.should.equal(3); z.should.equal(4); @@ -247,8 +247,8 @@ describe('util/db', () => { it('should provide the appropriate arguments when extended', () => { let run = false; extender(T)(U)((fields, extend, options, x, y, z) => { - fields.should.eql(sql`frames."x" as "frames!x",frames."y" as "frames!y","a" as "a","b" as "b"`); - (sql`${extend|| true}`).should.eql(sql`${true}`); + fields.should.eqlQuery(sql`frames."x" as "frames!x",frames."y" as "frames!y","a" as "a","b" as "b"`); + (sql`${extend|| true}`).should.eqlQuery(sql`${true}`); x.should.equal(2); y.should.equal(3); z.should.equal(4); @@ -277,26 +277,20 @@ describe('util/db', () => { const T = Frame.define(table('frames')); it('should formulate a basic response based on data', () => { - insert(new T({ x: 2, y: 3 })).should.eql(sql` -insert into frames ("x","y") -values (${2},${3}) -returning *`); + insert(new T({ x: 2, y: 3 })).should.eqlQuery( + sql`insert into "frames" ${sql({ x: 2, y: 3 })} returning *`); }); it('should deal with strange data input types', () => { insert(new T({ x: { test: true }, y: undefined, z: new Date('2000-01-01') })) - .should.eql(sql` -insert into frames ("x","y","z") -values (${'{"test":true}'},${null},${'2000-01-01T00:00:00.000Z'}) -returning *`); + .should.eqlQuery( + sql`insert into "frames" ("x","y","z")values(${{ test: true }},${null},${new Date('2000-01-01T00:00:00.000Z')}) returning *`); }); it('should automatically insert into createdAt if expected', () => { const U = Frame.define(table('cats'), 'createdAt', 'updatedAt'); - insert(new U()).should.eql(sql` -insert into cats ("createdAt") -values (${sql`clock_timestamp()`}) -returning *`); + insert(new U()).should.eqlQuery( + sql`insert into "cats" ("createdAt")values(${sql`clock_timestamp()`}) returning *`); }); }); @@ -305,20 +299,18 @@ returning *`); const T = Frame.define(table('dogs'), 'x', 'y'); it('should do nothing if given no data', () => { - insertMany([]).should.eql(sql`select true`); + insertMany([]).should.eqlQuery(sql`select true`); }); it('should insert all data', () => { - insertMany([ new T({ x: 2 }), new T({ y: 3 }) ]).should.eql(sql` -insert into dogs ("x","y") -values (${2},${null}),(${null},${3})`); + insertMany([ new T({ x: 2 }), new T({ y: 3 }) ]).should.eqlQuery( + sql`insert into "dogs" ("x","y")values(${2},${null}),(${null},${3})`); }); it('should insert createdAt and strange values', () => { const U = Frame.define(table('dogs'), 'x', 'createdAt'); - insertMany([ new U({ x: new Date('2000-01-01') }), new U() ]).should.eql(sql` -insert into dogs ("x","createdAt") -values (${'2000-01-01T00:00:00.000Z'},${sql`clock_timestamp()`}),(${null},${sql`clock_timestamp()`})`); + insertMany([ new U({ x: new Date('2000-01-01') }), new U() ]).should.eqlQuery( + sql`insert into "dogs" ("x","createdAt")values(${new Date('2000-01-01T00:00:00.000Z')},${sql`clock_timestamp()`}),(${null},${sql`clock_timestamp()`})`); }); }); @@ -327,18 +319,18 @@ values (${'2000-01-01T00:00:00.000Z'},${sql`clock_timestamp()`}),(${null},${sql` const T = Frame.define(table('rabbits')); it('should update the given data', () => { - updater(new T({ id: 1, x: 2 }), new T({ y: 3 })).should.eql(sql` -update rabbits + updater(new T({ id: 1, x: 2 }), new T({ y: 3 })).should.eqlQuery(sql` +update "rabbits" set "y"=${3} -where ${sql.identifier([ 'id' ])}=${1} +where ${sql('id')}=${1} returning *`); }); it('should set updatedAt if present', () => { const U = Frame.define(table('rabbits'), 'createdAt', 'updatedAt'); - updater(new U({ id: 1, x: 2 }), new U({ y: 3 })).should.eql(sql` -update rabbits + updater(new U({ id: 1, x: 2 }), new U({ y: 3 })).should.eqlQuery(sql` +update "rabbits" set "y"=${3} ,"updatedAt"=clock_timestamp() where "id"=${1} @@ -346,8 +338,8 @@ returning *`); }); it('should use a different id key if given', () => { - updater(new T({ otherId: 0, x: 2 }), new T({ y: 3 }), 'otherId').should.eql(sql` -update rabbits + updater(new T({ otherId: 0, x: 2 }), new T({ y: 3 }), 'otherId').should.eqlQuery(sql` +update "rabbits" set "y"=${3} where "otherId"=${0} @@ -355,20 +347,32 @@ returning *`); }); }); + describe('joinSqlStr', () => { + const { joinSqlStr } = util; + it('should join sql fragments', () => { + joinSqlStr([ sql`select`, sql`*`, sql`from`, sql('cats') ], sql` `) + .should.eqlQuery(sql`select * from "cats"`); + }); + it('should join sql fragments with values', () => { + joinSqlStr([ sql`select`, sql`*`, sql`from`, sql('cats'), sql`where ${sql('col')}=${42}` ], sql` `) + .should.eqlQuery(sql`select * from "cats" where "col"=${42}`); + }); + }); + describe('equals', () => { const { equals } = util; it('should do nothing if given no conditions', () => { - equals({}).should.eql(sql`true`); + equals({}).should.eqlQuery(sql`true`); }); it('should match k/v pairs', () => { equals({ x: 2, y: 3 }) - .should.eql(sql.join([ sql`"x"=${2}`, sql`"y"=${3}` ], sql` and `)); + .should.eqlQuery(util.joinSqlStr([ sql`"x"=${2}`, sql`"y"=${3}` ], sql` and `)); }); it('should split compound keys', () => { equals({ 'x.y': 2 }) - .should.eql(sql.join([ sql`"x"."y"=${2}` ], sql` and `)); + .should.eqlQuery(util.joinSqlStr([ sql`"x"."y"=${2}` ], sql` and `)); }); }); @@ -419,7 +423,7 @@ returning *`); }); it('should return blank if the arg is not present', () => { - QueryOptions.none.ifArg('z', () => {}).should.eql(sql``); + QueryOptions.none.ifArg('z', () => {}).should.eqlQuery(sql``); }); }); }); diff --git a/test/unit/util/util.js b/test/unit/util/util.js index 2444a6f8c..336378248 100644 --- a/test/unit/util/util.js +++ b/test/unit/util/util.js @@ -50,6 +50,22 @@ describe('util/util', () => { }); }); + describe('objIsEmpty', () => { + const { objIsEmpty } = util; + it('should return true if the object is empty', () => { + objIsEmpty({}).should.equal(true); + class X { x() {} } + objIsEmpty(new X()).should.equal(true); + }); + it('should return false if the object has things', () => { + objIsEmpty({ x: 42 }).should.equal(false); + class X {} + const x = new X(); + x.y = 42; + objIsEmpty(x).should.equal(false); + }); + }); + describe('blankStringToNull', () => { const { blankStringToNull } = util; it('should crush blank strings', () => { diff --git a/test/util/sql.js b/test/util/sql.js new file mode 100644 index 000000000..9e955f419 --- /dev/null +++ b/test/util/sql.js @@ -0,0 +1,45 @@ +const appRoot = require('app-root-path'); +const { sql } = require(appRoot + '/lib/external/postgres'); + +// used by .should.eqlQuery() +// +// simplifies a built porsagres sql`` fragment so that it can be reasonably compared +// with a different fragment of a different construction but the same result. eg, +// sql`x and y` vs sql`x ${sql`and`} y` + +const Query = sql``.constructor; +const Builder = sql({}).constructor; +const Identifier = sql('identifier').constructor; + +const options = { transform: { undefined: null, column: {} } }; +const inferType = (x) => + //x instanceof Parameter ? x.type : + x instanceof Date ? 1184 : + x instanceof Uint8Array ? 17 : + (x === true || x === false) ? 16 : + typeof x === 'bigint' ? 20 : + Array.isArray(x) ? inferType(x[0]) : + 0; + +const reduceFragment = (q, types = [], parameters = []) => { + let string = q.strings[0]; + for (let i = 1; i < q.strings.length; i++) { + string += _value(q.args[i - 1], string, parameters, types) + q.strings[i]; + } + return { string, parameters, types }; +}; + +const _value = (x, string, parameters, types) => { + if (x instanceof Builder) return x.build(string, parameters, types, options); + else if (x instanceof Identifier) return x.value; + else if (x instanceof Query) return reduceFragment(x, types, parameters).string; + else { + const value = (x?.value == null) ? x : x.value; + parameters.push(x); + types.push(inferType(x)); + return `$${parameters.length}`; + } +}; + +module.exports = { reduceFragment }; + From 75a9498ad6b6c8573dd80e84c56c0b0eb0d77506 Mon Sep 17 00:00:00 2001 From: Issa Tseng Date: Wed, 3 Aug 2022 22:08:57 -0700 Subject: [PATCH 2/9] wip: working streams implementation in porsagres. * porsagres does not support multiple portals at once, so we have to serialize what were previously parallel streams. * we bottleneck at the csv => zip output anyway so it doesn't matter. * using the callback interface and abusing the Promise lock to create a backpressure mechanism. --- lib/data/attachments.js | 4 +- lib/data/briefcase.js | 5 ++- lib/data/client-audits.js | 3 +- lib/model/container.js | 4 +- lib/model/query/submissions.js | 3 +- lib/resources/submissions.js | 27 +++++++------- lib/util/db.js | 55 ++++++++++++++++++---------- lib/util/zip.js | 21 ++++++----- test/integration/api/submissions.js | 2 +- test/integration/other/encryption.js | 11 +++--- test/unit/data/attachments.js | 25 +++++++------ test/unit/data/briefcase.js | 8 ++-- test/unit/model/container.js | 8 ++-- test/unit/util/zip.js | 20 +++++----- 14 files changed, 108 insertions(+), 88 deletions(-) diff --git a/lib/data/attachments.js b/lib/data/attachments.js index e46009047..bd1b745f5 100644 --- a/lib/data/attachments.js +++ b/lib/data/attachments.js @@ -30,9 +30,7 @@ const streamAttachments = (inStream, decryptor) => { const writable = new Writable({ objectMode: true, highWaterMark: 5, // the default is 16, we'll be a little more conservative. - write(x, _, done) { - const att = x.row; - + write(att, _, done) { // this sanitization means that two filenames could end up identical. // luckily, this is not actually illegal in the zip spec; two files can live at precisely // the same location, and the conflict is dealt with interactively by the unzipping client. diff --git a/lib/data/briefcase.js b/lib/data/briefcase.js index 48f75dce3..bf019bc94 100644 --- a/lib/data/briefcase.js +++ b/lib/data/briefcase.js @@ -339,7 +339,8 @@ const streamBriefcaseCsvs = (inStream, inFields, xmlFormId, selectValues, decryp // to provide the data to do so. const archive = zipPart(); const name = `${sanitize(xmlFormId)}.csv`; - archive.append(PartialPipe.of(inStream, rootStream, csv()), { name }); + archive.append(PartialPipe.of(inStream, rootStream, csv()), { name }, () => { archive.finalize(); }); + { // two passes; this first pass counts field names (so we know later whether // to append a ~1 number). @@ -364,7 +365,7 @@ const streamBriefcaseCsvs = (inStream, inFields, xmlFormId, selectValues, decryp } } } - archive.finalize(); + return archive; }; diff --git a/lib/data/client-audits.js b/lib/data/client-audits.js index 1e740616c..da297167d 100644 --- a/lib/data/client-audits.js +++ b/lib/data/client-audits.js @@ -82,11 +82,10 @@ const streamClientAudits = (inStream, form, decryptor) => { let first = true; const csvifier = new Transform({ objectMode: true, - transform(x, _, done) { + transform(data, _, done) { // data here contains ClientAudit attchement info as well as associated // submission instanceId fetched through query in // model/query/client-audits.js - const data = x.row; // TODO: we do not currently try/catch this block because it feels low risk. // this may not actually be the case.. diff --git a/lib/model/container.js b/lib/model/container.js index 2ed71351d..ee2bd7295 100644 --- a/lib/model/container.js +++ b/lib/model/container.js @@ -13,7 +13,7 @@ const { merge, head } = require('ramda'); const { postgresErrorToProblem, queryFuncs } = require('../util/db'); -const { resolve, ignoringResult } = require('../util/promise'); +const { ignoringResult } = require('../util/promise'); //////////////////////////////////////////////////////////////////////////////// @@ -28,7 +28,7 @@ const queryModuleBuilder = (definition, container) => { module[methodName] = (...args) => { const fn = definition[methodName]; const result = fn(...args)(container); - const wrapped = (result.pipe != null) ? resolve(result) : + const wrapped = (result.pipe != null) ? result : (result.catch != null) ? result.catch(postgresErrorToProblem) : result; // eslint-disable-line indent diff --git a/lib/model/query/submissions.js b/lib/model/query/submissions.js index 153c0bd3c..6335e7c0d 100644 --- a/lib/model/query/submissions.js +++ b/lib/model/query/submissions.js @@ -13,6 +13,7 @@ const { Frame, table } = require('../frame'); const { Actor, Form, Submission } = require('../frames'); const { odataFilter } = require('../../data/odata-filter'); const { unjoiner, extender, equals, joinSqlStr, page, updater, QueryOptions, insertMany } = require('../../util/db'); +const { PartialPipe, mapStream } = require('../../util/stream'); const { blankStringToNull, construct } = require('../../util/util'); const Problem = require('../../util/problem'); @@ -274,7 +275,7 @@ ${page(options)}`; }; const streamForExport = (formId, draft, keyIds, options = QueryOptions.none) => ({ stream }) => - stream.map(_exportUnjoiner)(stream(_export(formId, draft, keyIds, options))); + PartialPipe.of(stream(_export(formId, draft, keyIds, options)), mapStream(_exportUnjoiner)); const getForExport = (formId, instanceId, draft, options = QueryOptions.none) => ({ maybeOne }) => maybeOne(_export(formId, draft, [], options.withCondition({ 'submissions.instanceId': instanceId }))) diff --git a/lib/resources/submissions.js b/lib/resources/submissions.js index 4232b1748..77a31054e 100644 --- a/lib/resources/submissions.js +++ b/lib/resources/submissions.js @@ -7,7 +7,7 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const { always, identity } = require('ramda'); +const { always, identity, call } = require('ramda'); const multer = require('multer'); const sanitize = require('sanitize-filename'); const { Blob, Form, Submission } = require('../model/frames'); @@ -269,20 +269,19 @@ module.exports = (service, endpoint) => { const options = QueryOptions.fromSubmissionCsvRequest(query); return Promise.all([ (options.deletedFields === true) ? Forms.getMergedFields(form.id) : Forms.getFields(form.def.id), - Submissions.streamForExport(form.id, draft, keys, options), (options.splitSelectMultiples !== true) ? null : Submissions.getSelectMultipleValuesForExport(form.id, draft, options), - SubmissionAttachments.streamForExport(form.id, draft, keys, options), - ClientAudits.streamForExport(form.id, draft, keys, options), draft ? null : Audits.log(auth.actor, 'form.submission.export', form) - ]).then(([ fields, rows, selectValues, attachments, clientAudits ]) => { + ]).then(([ fields, selectValues ]) => { const filename = sanitize(form.xmlFormId); response.append('Content-Disposition', contentDisposition(`${filename}.zip`)); response.append('Content-Type', 'application/zip'); return zipStreamFromParts( - // TODO: not 100% sure that these streams close right on crash. - streamBriefcaseCsvs(rows, fields, form.xmlFormId, selectValues, decryptor, false, options), - streamAttachments(attachments, decryptor), - streamClientAudits(clientAudits, form, decryptor) + () => streamBriefcaseCsvs(Submissions.streamForExport(form.id, draft, keys, options), + fields, form.xmlFormId, selectValues, decryptor, false, options), + () => streamAttachments(SubmissionAttachments.streamForExport(form.id, draft, keys, options), + decryptor), + () => streamClientAudits(ClientAudits.streamForExport(form.id, draft, keys, options), + form, decryptor) ); }); })); @@ -295,18 +294,20 @@ module.exports = (service, endpoint) => { const options = QueryOptions.fromSubmissionCsvRequest(query); return Promise.all([ (options.deletedFields === true) ? Forms.getMergedFields(form.id) : Forms.getFields(form.def.id), - Submissions.streamForExport(form.id, draft, Object.keys(passphrases), options), (options.splitSelectMultiples !== true) ? null : Submissions.getSelectMultipleValuesForExport(form.id, draft, options), Keys.getDecryptor(passphrases), draft ? null : Audits.log(auth.actor, 'form.submission.export', form) ]) - .then(([ fields, rows, selectValues, decryptor ]) => { + .then(([ fields, selectValues, decryptor ]) => { const filename = sanitize(form.xmlFormId); const extension = (rootOnly === true) ? 'csv' : 'csv.zip'; response.append('Content-Disposition', contentDisposition(`${filename}.${extension}`)); response.append('Content-Type', (rootOnly === true) ? 'text/csv' : 'application/zip'); - const envelope = (rootOnly === true) ? identity : zipStreamFromParts; - return envelope(streamBriefcaseCsvs(rows, fields, form.xmlFormId, selectValues, decryptor, rootOnly, options)); + const envelope = (rootOnly === true) ? call : zipStreamFromParts; + return envelope(() => streamBriefcaseCsvs( + Submissions.streamForExport(form.id, draft, Object.keys(passphrases), options), + fields, form.xmlFormId, selectValues, decryptor, rootOnly, options + )); }); }); diff --git a/lib/util/db.js b/lib/util/db.js index f2cb22b56..857ca49a6 100644 --- a/lib/util/db.js +++ b/lib/util/db.js @@ -76,33 +76,49 @@ const connectionObject = (config) => { // https://nodejs.org/en/docs/guides/backpressuring-in-streams/ class QueryStream extends Readable { constructor(q) { - // pg-query-stream uses default 100 hwm, just bake it here until we want - // otherwise. meanwhile cursor batch size could be a separate number but they - // default to the same in pg-q-s so again just take it for now.. + // pg-query-stream uses default 100 hwm, just bake it here and below until we want otherwise. super({ objectMode: true, autoDestroy: true, highWaterMark: 100 }); this.q = q; - this.iterator = q.cursor(100)[Symbol.asyncIterator](); + this.buffer = []; + this.requested = 0; } // https://nodejs.org/api/stream.html#stream_readable_read_size_1 _read(size) { - this.q.cursorRows = size; // CRCRCR: this is internals - const next = this.iterator.next(); - if (next.done === true) return this.push(null); - - next.then(({ value, done }) => { - // TODO: need to handle .push retval - if (value != null) for (let i = 0; i < value.length; i += 1) this.push(value[i]); - if (done) this.push(null); - }).catch(this.destroy.bind(this)); + this.requested = size; // idk if this is "right" but it should be okay + it's simpler for now + this.qpush(this.buffer); + + // maybe in some other circumstance it should be important to only request further + // data from the db if the buffer is low, but realistically it should typically be + // empty by now and even if not one extra buffer is really not the worst. + if (this.qresult == null) + this.qresult = this.q.cursor(100, (rows) => { + this.qpush(rows); + for (let i = 0; i < rows.length; i += 1) this.buffer.push(rows[i]); + + return new Promise((qcontinue, qterminate) => { + this.qcontinue = qcontinue; + this.qterminate = qterminate; + + if (this.buffer.length < this.requested) this.qcontinue(); + }); + }).then( + () => { this.buffer.push(null); this.qpush(this.buffer); }, + (err) => { this.destroy(err); } + ); + else + this.qcontinue(); + } + qpush(source) { + while ((this.requested > 0) && (source.length > 0) && this.push(source.shift())) + this.requested -= 1; } - - //_final() { this.iterator.return(); } _destroy(err, cb) { - this.iterator.return(); + this.qterminate?.(); cb(err); } } + //////////////////////////////////////////////////////////////////////////////// // FRAGMENT UTIL @@ -249,11 +265,12 @@ const queryFuncs = (db, obj) => { obj.q = (s) => db`${s}`; - obj.one = (s) => db`${s}`.then((xs) => xs[0]); // CRCRCR: slonik used to Enforce this, do we care? + obj.one = (s) => db`${s}`.then(([ x ]) => x); // CRCRCR: slonik used to Enforce existence, do we care? obj.one.map = (f) => (x) => f(x); - obj.maybeOne = (s) => db`${s}`.then((xs) => Option.of(xs[0])); + obj.maybeOne = (s) => db`${s}`.then(([ x ]) => Option.of(x)); obj.maybeOne.map = (f) => (x) => x.map(f); - obj.oneFirst = (s) => db`${s}`.values().then((xs) => xs[0][0]); + //obj.oneFirst = (s) => db`${s}`.values().then(([[ x ]]) => x); + obj.oneFirst = (s) => db`${s}`.then((res) => res[0][res.columns[0].name]); obj.oneFirst.map = (f) => (x) => f(x); obj.all = (s) => db`${s}`; diff --git a/lib/util/zip.js b/lib/util/zip.js index 5fcf96933..588f79390 100644 --- a/lib/util/zip.js +++ b/lib/util/zip.js @@ -18,6 +18,10 @@ const archiver = require('archiver'); // Returns an object that can add files to an archive, without having that archive // object directly nor knowing what else is going into it. Call append() to add a // file, and call finalize() to indicate that no more files will be appended. +// +// in some cases, the user will need to be careful not to call finalize() until the +// underlying database stream is complete, or else multiple streams can try to open +// at the same time. const zipPart = () => { const streamStream = new Readable({ read() {}, objectMode: true }); return { @@ -36,8 +40,7 @@ const zipPart = () => { // if the final component in the pipeline emitted the error, archiver would then // emit it again, but if it was an intermediate component archiver wouldn't know // about it. by manually aborting, we always emit the error and archiver never does. -const zipStreamFromParts = (...zipParts) => { - let completed = 0; +const zipStreamFromParts = (...zipPartFns) => { const resultStream = archiver('zip', { zlib: { level: 9 } }); // track requested callbacks and call them when they are fully added to the zip. @@ -47,7 +50,10 @@ const zipStreamFromParts = (...zipParts) => { if (cb != null) cb(); }); - for (const part of zipParts) { + const next = () => { + if (zipPartFns.length === 0) return resultStream.finalize(); + const part = zipPartFns.shift()(); + part.stream.on('data', ({ stream, options, cb }) => { const s = (stream instanceof PartialPipe) ? stream.pipeline((err) => { resultStream.emit('error', err); resultStream.abort(); }) @@ -64,12 +70,9 @@ const zipStreamFromParts = (...zipParts) => { } }); part.stream.on('error', (err) => { resultStream.emit('error', err); }); - part.stream.on('end', () => { // eslint-disable-line no-loop-func - completed += 1; - if (completed === zipParts.length) - resultStream.finalize(); - }); - } + part.stream.on('end', next); + }; + next(); return resultStream; }; diff --git a/test/integration/api/submissions.js b/test/integration/api/submissions.js index 6fec850bc..47a596d75 100644 --- a/test/integration/api/submissions.js +++ b/test/integration/api/submissions.js @@ -767,7 +767,7 @@ describe('api: /submission', () => { }); }); -describe.skip('api: /forms/:id/submissions', () => { +describe('api: /forms/:id/submissions', () => { describe('POST', () => { it('should return notfound if the form does not exist', testService((service) => service.login('alice', (asAlice) => diff --git a/test/integration/other/encryption.js b/test/integration/other/encryption.js index 4f77dddb4..9079f5cd0 100644 --- a/test/integration/other/encryption.js +++ b/test/integration/other/encryption.js @@ -6,13 +6,14 @@ const { toText } = require('streamtest').v2; const { testService, testContainerFullTrx, testContainer } = require(appRoot + '/test/integration/setup'); const testData = require(appRoot + '/test/data/xml'); const { zipStreamToFiles } = require(appRoot + '/test/util/zip'); +const { reduceFragment } = require(appRoot + '/test/util/sql'); const { Form, Key, Submission } = require(appRoot + '/lib/model/frames'); const { mapSequential } = require(appRoot + '/test/util/util'); const { exhaust } = require(appRoot + '/lib/worker/worker'); -describe.skip('managed encryption', () => { +describe('managed encryption', () => { describe('lock management', () => { - it('should reject keyless forms in keyed projects @slow', testContainerFullTrx(async (container) => { + it.skip('should reject keyless forms in keyed projects @slow', testContainerFullTrx(async (container) => { // enable managed encryption. await container.transacting(({ Projects }) => Projects.getById(1).then((o) => o.get()) @@ -32,7 +33,7 @@ describe.skip('managed encryption', () => { error.problemCode.should.equal(409.5); })); - it('should reject forms created while project managed encryption is being enabled @slow', testContainerFullTrx(async (container) => { + it.skip('should reject forms created while project managed encryption is being enabled @slow', testContainerFullTrx(async (container) => { // enable managed encryption but don't allow the transaction to close. let encReq; const unblock = await new Promise((resolve) => { @@ -128,13 +129,13 @@ describe.skip('managed encryption', () => { // hijack the run routine. const results = []; - const db = { query: (x) => { results.push(x); return Promise.resolve(); } }; + const db = (_, q) => { results.push(...reduceFragment(q).parameters); return Promise.resolve(); }; const hijacked = container.with({ db }); return Submission.fromXml(xml) .then((partial) => hijacked.SubmissionAttachments.create(partial, {}, [])) .then(() => { - results[0].values.should.eql([ + results.should.eql([ null, null, 'zulu.file', 0, false, null, null, 'alpha.file', 1, false, null, null, 'bravo.file', 2, false, diff --git a/test/unit/data/attachments.js b/test/unit/data/attachments.js index d0b7297cd..40095fd5b 100644 --- a/test/unit/data/attachments.js +++ b/test/unit/data/attachments.js @@ -8,11 +8,12 @@ const { zipStreamFromParts } = require(appRoot + '/lib/util/zip'); describe('.zip attachments streaming', () => { it('should stream the contents to files at the appropriate paths', (done) => { const inStream = streamTest.fromObjects([ - { row: { instanceId: 'subone', name: 'firstfile.ext', content: 'this is my first file' } }, - { row: { instanceId: 'subone', name: 'secondfile.ext', content: 'this is my second file' } }, - { row: { instanceId: 'subtwo', name: 'thirdfile.ext', content: 'this is my third file' } } + { instanceId: 'subone', name: 'firstfile.ext', content: 'this is my first file' }, + { instanceId: 'subone', name: 'secondfile.ext', content: 'this is my second file' }, + { instanceId: 'subtwo', name: 'thirdfile.ext', content: 'this is my third file' } ]); - zipStreamToFiles(zipStreamFromParts(streamAttachments(inStream)), (result) => { + zipStreamToFiles(zipStreamFromParts(() => streamAttachments(inStream)), (result) => { + console.log(result); result.filenames.should.eql([ 'media/firstfile.ext', 'media/secondfile.ext', @@ -29,11 +30,11 @@ describe('.zip attachments streaming', () => { it('should deal with unsafe filenames sanely', (done) => { const inStream = streamTest.fromObjects([ - { row: { instanceId: '../subone', name: 'firstfile.ext', content: 'this is my first file' } }, - { row: { instanceId: 'subone', name: '../secondfile.ext', content: 'this is my second file' } }, - { row: { instanceId: 'subone', name: './.secondfile.ext', content: 'this is my duplicate second file' } }, + { instanceId: '../subone', name: 'firstfile.ext', content: 'this is my first file' }, + { instanceId: 'subone', name: '../secondfile.ext', content: 'this is my second file' }, + { instanceId: 'subone', name: './.secondfile.ext', content: 'this is my duplicate second file' }, ]); - zipStreamToFiles(zipStreamFromParts(streamAttachments(inStream)), (result) => { + zipStreamToFiles(zipStreamFromParts(() => streamAttachments(inStream)), (result) => { result.filenames.should.eql([ 'media/firstfile.ext', 'media/..secondfile.ext', @@ -46,9 +47,9 @@ describe('.zip attachments streaming', () => { it('should not strip .enc unless decryption is happening', (done) => { const inStream = streamTest.fromObjects([ - { row: { instanceId: 'subone', name: 'firstfile.ext.enc', content: 'this is my first file' } } + { instanceId: 'subone', name: 'firstfile.ext.enc', content: 'this is my first file' } ]); - zipStreamToFiles(zipStreamFromParts(streamAttachments(inStream)), (result) => { + zipStreamToFiles(zipStreamFromParts(() => streamAttachments(inStream)), (result) => { result.filenames.should.eql([ 'media/firstfile.ext.enc' ]); done(); }); @@ -56,9 +57,9 @@ describe('.zip attachments streaming', () => { it('should strip .enc if decryption is happening', (done) => { const inStream = streamTest.fromObjects([ - { row: { instanceId: 'subone', name: 'firstfile.ext.enc', content: 'this is my first file' } } + { instanceId: 'subone', name: 'firstfile.ext.enc', content: 'this is my first file' } ]); - zipStreamToFiles(zipStreamFromParts(streamAttachments(inStream, () => {})), (result) => { + zipStreamToFiles(zipStreamFromParts(() => streamAttachments(inStream, () => {})), (result) => { result.filenames.should.eql([ 'media/firstfile.ext' ]); done(); }); diff --git a/test/unit/data/briefcase.js b/test/unit/data/briefcase.js index 5c43939c6..bc5885639 100644 --- a/test/unit/data/briefcase.js +++ b/test/unit/data/briefcase.js @@ -28,7 +28,7 @@ const withAttachments = (present, expected, row) => ({ ...row, aux: { ...row.aux const callAndParse = (inStream, formXml, xmlFormId, callback) => { fieldsFor(formXml).then((fields) => { - zipStreamToFiles(zipStreamFromParts(streamBriefcaseCsvs(inStream, fields, xmlFormId)), callback); + zipStreamToFiles(zipStreamFromParts(() => streamBriefcaseCsvs(inStream, fields, xmlFormId)), callback); }); }; @@ -381,7 +381,7 @@ describe('.csv.zip briefcase output @slow', () => { ]); fieldsFor(testData.forms.selectMultiple).then((fields) => { - zipStreamToFiles(zipStreamFromParts(streamBriefcaseCsvs(inStream, fields, 'selectMultiple', { '/q1': [ 'x', 'y', 'z' ], '/g1/q2': [ 'm', 'n' ] })), (result) => { + zipStreamToFiles(zipStreamFromParts(() => streamBriefcaseCsvs(inStream, fields, 'selectMultiple', { '/q1': [ 'x', 'y', 'z' ], '/g1/q2': [ 'm', 'n' ] })), (result) => { result.filenames.should.eql([ 'selectMultiple.csv' ]); result['selectMultiple.csv'].should.equal( `SubmissionDate,q1,q1/x,q1/y,q1/z,g1-q2,g1-q2/m,g1-q2/n,KEY,SubmitterID,SubmitterName,AttachmentsPresent,AttachmentsExpected,Status,ReviewState,DeviceID,Edits,FormVersion @@ -478,7 +478,7 @@ describe('.csv.zip briefcase output @slow', () => { ]); fieldsFor(formXml).then((fields) => { - zipStreamToFiles(zipStreamFromParts(streamBriefcaseCsvs(inStream, fields, 'structuredform', undefined, undefined, false, { groupPaths: false })), (result) => { + zipStreamToFiles(zipStreamFromParts(() => streamBriefcaseCsvs(inStream, fields, 'structuredform', undefined, undefined, false, { groupPaths: false })), (result) => { result.filenames.should.eql([ 'structuredform.csv' ]); result['structuredform.csv'].should.equal( `SubmissionDate,instanceID,name,type,street,city,KEY,SubmitterID,SubmitterName,AttachmentsPresent,AttachmentsExpected,Status,ReviewState,DeviceID,Edits,FormVersion @@ -498,7 +498,7 @@ describe('.csv.zip briefcase output @slow', () => { ]); fieldsFor(testData.forms.selectMultiple).then((fields) => { - zipStreamToFiles(zipStreamFromParts(streamBriefcaseCsvs(inStream, fields, 'selectMultiple', { '/q1': [ 'x', 'y', 'z' ], '/g1/q2': [ 'm', 'n' ] }, undefined, false, { groupPaths: false })), (result) => { + zipStreamToFiles(zipStreamFromParts(() => streamBriefcaseCsvs(inStream, fields, 'selectMultiple', { '/q1': [ 'x', 'y', 'z' ], '/g1/q2': [ 'm', 'n' ] }, undefined, false, { groupPaths: false })), (result) => { result.filenames.should.eql([ 'selectMultiple.csv' ]); result['selectMultiple.csv'].should.equal( `SubmissionDate,q1,q1/x,q1/y,q1/z,q2,q2/m,q2/n,KEY,SubmitterID,SubmitterName,AttachmentsPresent,AttachmentsExpected,Status,ReviewState,DeviceID,Edits,FormVersion diff --git a/test/unit/model/container.js b/test/unit/model/container.js index 372ab0bd1..62411acf9 100644 --- a/test/unit/model/container.js +++ b/test/unit/model/container.js @@ -1,4 +1,5 @@ const should = require('should'); +const { Readable } = require('stream'); const { queryModuleBuilder, injector, withDefaults } = require('../../../lib/model/container'); const streamTest = require('streamtest').v2; @@ -33,13 +34,10 @@ describe('container', () => { result.message.should.equal('Key (id)=(42) already exists.'); })); - it('should wrap returned streams with promises', (done) => { + it('should not wrap returned streams with promises', () => { queryModuleBuilder({ f: () => () => streamTest.fromObjects([ {} ]) }) .f() - .then((result) => { - // the fact that .then() does not crash is really the point here. - done(); - }); + .should.be.an.instanceOf(Readable); }); it('should provide database context to query modules', (done) => { diff --git a/test/unit/util/zip.js b/test/unit/util/zip.js index 9d723d8d7..cb65e4ffb 100644 --- a/test/unit/util/zip.js +++ b/test/unit/util/zip.js @@ -13,7 +13,7 @@ describe('zipPart streamer', () => { const part = zipPart(); let closed = false; - zipStreamToFiles(zipStreamFromParts(part), (result) => { + zipStreamToFiles(zipStreamFromParts(() => part), (result) => { closed = true; done(); }); @@ -26,27 +26,27 @@ describe('zipPart streamer', () => { it('should close the archive successfully given no files', (done) => { const part = zipPart(); // no assertions other than verifying that done is called. - zipStreamToFiles(zipStreamFromParts(part), () => done()); + zipStreamToFiles(zipStreamFromParts(() => part), () => done()); part.finalize(); }); it('should error out the archive if a part pushes an error', (done) => { const part1 = zipPart(); const part2 = zipPart(); - const archive = zipStreamFromParts(part1, part2); + const archive = zipStreamFromParts(() => part1, () => part2); archive.on('error', (err) => { err.message.should.equal('whoops'); done(); }); part1.append('test 1', { name: 'x/test1.file' }); - part2.error(new Error('whoops')); + part1.error(new Error('whoops')); }); it('should call the given callback only when the file has been added', (done) => { const part = zipPart(); const file = new Readable({ read() {} }); - const archive = zipStreamFromParts(part); + const archive = zipStreamFromParts(() => part); let pushedAll = false; part.append(file, { name: 'file' }, () => { @@ -65,7 +65,7 @@ describe('zipPart streamer', () => { const part = zipPart(); const file1 = new Readable({ read() {} }); const file2 = new Readable({ read() {} }); - const archive = zipStreamFromParts(part); + const archive = zipStreamFromParts(() => part); archive.pipe(createWriteStream('/dev/null')); archive.on('end', () => { @@ -93,7 +93,7 @@ describe('zipPart streamer', () => { const part1 = zipPart(); const part2 = zipPart(); - zipStreamToFiles(zipStreamFromParts(part1, part2), (result) => { + zipStreamToFiles(zipStreamFromParts(() => part1, () => part2), (result) => { result.filenames.should.containDeep([ 'x/test1.file', 'x/test2.file', @@ -122,7 +122,7 @@ describe('zipPart streamer', () => { const part1 = zipPart(); const part2 = zipPart(); - zipStreamToFiles(zipStreamFromParts(part1, part2), (result) => { + zipStreamToFiles(zipStreamFromParts(() => part1, () => part2), (result) => { result.filenames.should.containDeep([ 'test1.file', 'test2.file' ]); result['test1.file'].should.equal('test static'); result['test2.file'].should.equal('a!test!stream!'); @@ -145,7 +145,7 @@ describe('zipPart streamer', () => { const part1 = zipPart(); const part2 = zipPart(); - const archive = zipStreamFromParts(part1, part2); + const archive = zipStreamFromParts(() => part1, () => part2); let errCount = 0; archive.on('error', (err) => { errCount += 1; @@ -174,7 +174,7 @@ describe('zipPart streamer', () => { const part1 = zipPart(); const part2 = zipPart(); - const archive = zipStreamFromParts(part1, part2); + const archive = zipStreamFromParts(() => part1, () => part2); archive.on('error', (err) => { err.message.should.equal('whoops'); done(); From 13264604fffa2bbe581d4596d2ccfb8fcc5e41e6 Mon Sep 17 00:00:00 2001 From: Issa Tseng Date: Thu, 4 Aug 2022 13:44:39 -0700 Subject: [PATCH 3/9] test: fix last two tests from porsagres refactor. * the error gets passed all the way out now on .begin as opposed to .transaction before, so it's both a nice place to handle it as well as a necessary error to handle or the tests crash. --- test/integration/other/encryption.js | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/test/integration/other/encryption.js b/test/integration/other/encryption.js index 9079f5cd0..1a20f81c9 100644 --- a/test/integration/other/encryption.js +++ b/test/integration/other/encryption.js @@ -13,7 +13,7 @@ const { exhaust } = require(appRoot + '/lib/worker/worker'); describe('managed encryption', () => { describe('lock management', () => { - it.skip('should reject keyless forms in keyed projects @slow', testContainerFullTrx(async (container) => { + it('should reject keyless forms in keyed projects @slow', testContainerFullTrx(async (container) => { // enable managed encryption. await container.transacting(({ Projects }) => Projects.getById(1).then((o) => o.get()) @@ -27,13 +27,12 @@ describe('managed encryption', () => { Form.fromXml(testData.forms.simple2) ]) .then(([ project, partial ]) => Forms.createNew(partial, project)) - .catch((err) => { error = err; }) - ); + ).catch((err) => { error = err; }); error.problemCode.should.equal(409.5); })); - it.skip('should reject forms created while project managed encryption is being enabled @slow', testContainerFullTrx(async (container) => { + it('should reject forms created while project managed encryption is being enabled @slow', testContainerFullTrx(async (container) => { // enable managed encryption but don't allow the transaction to close. let encReq; const unblock = await new Promise((resolve) => { @@ -61,8 +60,8 @@ describe('managed encryption', () => { Form.fromXml(testData.forms.simple2) ]) .then(([ project, partial ]) => Forms.createNew(partial, project)) - .catch((err) => { error = err; }) - ); + ) + .catch((err) => { error = err; }); // now unblock the managed encryption commit and let it all flush through. unblock(); From 0169e5582e7a2a1719fb2596eab29811417d5df8 Mon Sep 17 00:00:00 2001 From: Issa Tseng Date: Thu, 4 Aug 2022 14:18:54 -0700 Subject: [PATCH 4/9] improve: formalize tap and allow it to take prefix params. * see comments in tap.js for details. --- lib/bin/run-server.js | 3 +-- lib/util/tap.js | 10 ++++++++++ test/assertions.js | 3 --- test/util/tap.js | 3 +++ 4 files changed, 14 insertions(+), 5 deletions(-) create mode 100644 lib/util/tap.js create mode 100644 test/util/tap.js diff --git a/lib/bin/run-server.js b/lib/bin/run-server.js index 754ed68cf..8a6ef07bb 100644 --- a/lib/bin/run-server.js +++ b/lib/bin/run-server.js @@ -13,8 +13,7 @@ const { merge } = require('ramda'); const config = require('config'); const exit = require('express-graceful-exit'); - -global.tap = (x) => { console.log(x); return x; }; // eslint-disable-line no-console +require('../util/tap'); //////////////////////////////////////////////////////////////////////////////// // CONTAINER SETUP diff --git a/lib/util/tap.js b/lib/util/tap.js new file mode 100644 index 000000000..33c9acebc --- /dev/null +++ b/lib/util/tap.js @@ -0,0 +1,10 @@ +// suggested usage +// .then(some(annoying(nonsense, here))) +// v v v v v v v v v v +// .then(some(tap('annoying:', annoying(nonsense, here)))) +// v v v v v v v v v v v v v v v v v v v v v v +// .then(some(tap('annoying:', annoying(tap('nonsense:', nonsense), here)))) + +global.tap = (...xs) => { console.log(...xs); return xs[xs.length - 1]; }; +global.tap.trace = (...xs) => { console.trace(...xs); return xs[xs.length - 1]; }; + diff --git a/test/assertions.js b/test/assertions.js index 55348bbd2..a4775ad81 100644 --- a/test/assertions.js +++ b/test/assertions.js @@ -2,9 +2,6 @@ const should = require('should'); const { DateTime } = require('luxon'); const { reduceFragment } = require('./util/sql'); -// debugging things. -global.tap = (x) => { console.log(x); return x; }; - should.Assertion.add('httpDate', function() { this.params = { operator: 'to be an HTTP date string' }; DateTime.fromHTTP(this.obj).isValid.should.equal(true); diff --git a/test/util/tap.js b/test/util/tap.js new file mode 100644 index 000000000..473b95198 --- /dev/null +++ b/test/util/tap.js @@ -0,0 +1,3 @@ +const appRoot = require('app-root-path'); +require(appRoot + '/lib/util/tap'); + From 4f4a2e7fa66de7df63b8a5833f30e925c13882d5 Mon Sep 17 00:00:00 2001 From: Issa Tseng Date: Sun, 21 Aug 2022 23:55:41 -0700 Subject: [PATCH 5/9] i cant --- lib/util/tap.js | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/lib/util/tap.js b/lib/util/tap.js index 99eded89c..7ac8d4746 100644 --- a/lib/util/tap.js +++ b/lib/util/tap.js @@ -1,3 +1,12 @@ +// Copyright 2022 ODK Central Developers +// See the NOTICE file at the top-level directory of this distribution and at +// https://github.com/getodk/central-backend/blob/master/NOTICE. +// This file is part of ODK Central. It is subject to the license terms in +// the LICENSE file found in the top-level directory of this distribution and at +// https://www.apache.org/licenses/LICENSE-2.0. No part of ODK Central, +// including this file, may be copied, modified, propagated, or distributed +// except according to the terms contained in the LICENSE file. + // suggested usage // .then(some(annoying(nonsense, here))) // v v v v v v v v v v From dfcef3d0f493bb6673e0c56b7cf3da6114ebce01 Mon Sep 17 00:00:00 2001 From: Issa Tseng Date: Mon, 3 Oct 2022 16:22:28 -0700 Subject: [PATCH 6/9] REVERTME: convenience commit for quick reproing. --- lib/http/preprocessors.js | 4 ++-- lib/model/query/users.js | 4 +--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/lib/http/preprocessors.js b/lib/http/preprocessors.js index 12cca0a89..c9d764015 100644 --- a/lib/http/preprocessors.js +++ b/lib/http/preprocessors.js @@ -71,8 +71,8 @@ const sessionHandler = ({ Sessions, Users, Auth, bcrypt }, context) => { // fail the request unless we are under HTTPS. // this logic does mean that if we are not under nginx it is possible to fool the server. // but it is the user's prerogative to undertake this bypass, so their security is in their hands. - if ((context.protocol !== 'https') && (context.headers['x-forwarded-proto'] !== 'https')) - return reject(Problem.user.httpsOnly()); + //if ((context.protocol !== 'https') && (context.headers['x-forwarded-proto'] !== 'https')) + // return reject(Problem.user.httpsOnly()); // we have to use a regex rather than .split(':') in case the password contains :s. const plainCredentials = Buffer.from(authHeader.slice(6), 'base64').toString('utf8'); diff --git a/lib/model/query/users.js b/lib/model/query/users.js index f4d05f863..a6d484e03 100644 --- a/lib/model/query/users.js +++ b/lib/model/query/users.js @@ -33,9 +33,7 @@ const update = (user, data) => ({ run, one }) => { update.audit = (user, data) => (log) => log('user.update', user.actor, { data: data.with(data.actor) }); const updatePassword = (user, cleartext) => ({ run, bcrypt }) => - (cleartext.length < 10 - ? reject(Problem.user.passwordTooShort()) - : bcrypt.hash(cleartext)) + bcrypt.hash(cleartext) .then((hash) => run(sql`update users set password=${hash} where "actorId"=${user.actor.id}`)); updatePassword.audit = (user) => (log) => log('user.update', user.actor, { data: { password: true } }); From 240ca85d11a72e0cffa6944068de900ed233c114 Mon Sep 17 00:00:00 2001 From: Issa Tseng Date: Mon, 3 Oct 2022 16:23:13 -0700 Subject: [PATCH 7/9] REVERTME: committing some debug logging for wrong-response error. --- lib/util/db.js | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/lib/util/db.js b/lib/util/db.js index e09f3b9e8..c0cd2b36d 100644 --- a/lib/util/db.js +++ b/lib/util/db.js @@ -163,7 +163,13 @@ const unjoiner = (...frames) => { // pull the primary out and inflate all extensions. const primary = bag[frames[0].to]; delete bag[frames[0].to]; - for (const k of Object.keys(bag)) bag[k] = constructors[k](bag[k]); + for (const k of Object.keys(bag)) { + if (typeof constructors[k] !== 'function') { + console.log('expected fields:', unmap, unprefix); + console.error('FAULT received data:', row, constructors, k, Object.keys(bag), bag); + } + bag[k] = constructors[k](bag[k]); + } return new frames[0](primary, bag); }; From ce225b53ccc2c015598b453ef31c6d0fd168ee71 Mon Sep 17 00:00:00 2001 From: Issa Tseng Date: Mon, 3 Oct 2022 16:23:43 -0700 Subject: [PATCH 8/9] rm: remove some already commented dead code. --- lib/util/db.js | 3 --- 1 file changed, 3 deletions(-) diff --git a/lib/util/db.js b/lib/util/db.js index c0cd2b36d..152fc21b4 100644 --- a/lib/util/db.js +++ b/lib/util/db.js @@ -262,8 +262,6 @@ const page = (options) => { // // well, we don't use slonik anymore so this is just what our query api looks // like now. it turns out to be pretty convenient anyway. -// -// CRCRCR: but actually maybe we can get rid of .map now and clean up the joiner call.. const queryFuncs = (db, obj) => { /* eslint-disable no-param-reassign */ obj.run = (s) => db`${s}`.then(always(true)); @@ -275,7 +273,6 @@ const queryFuncs = (db, obj) => { obj.one.map = (f) => (x) => f(x); obj.maybeOne = (s) => db`${s}`.then(([ x ]) => Option.of(x)); obj.maybeOne.map = (f) => (x) => x.map(f); - //obj.oneFirst = (s) => db`${s}`.values().then(([[ x ]]) => x); obj.oneFirst = (s) => db`${s}`.then((res) => res[0][res.columns[0].name]); obj.oneFirst.map = (f) => (x) => f(x); From 2c6380943ab9a458513e3b32f28765a7c2b90cd8 Mon Sep 17 00:00:00 2001 From: Issa Tseng Date: Mon, 3 Oct 2022 16:23:58 -0700 Subject: [PATCH 9/9] deps: upgrade porsagres to 3.3.1 --- package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 6bc7a231e..9c4a3087d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5277,9 +5277,9 @@ "integrity": "sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=" }, "postgres": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/postgres/-/postgres-3.2.4.tgz", - "integrity": "sha512-iscysD+ZlM4A9zj0RS2zo3f4Us4yuov94Yx+p3dE1rEARaBHC8R3/gRq40KEnWp1lxjuFq9EjuAenIUsPaTaDA==" + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postgres/-/postgres-3.3.1.tgz", + "integrity": "sha512-ak/xXToZYwRvQlZIUtLgPUIggz62eIIbPTgxl/Yl4oTu0TgNOd1CrzTCifsvZ89jBwLvnX6+Ky5frp5HzIBoaw==" }, "postgres-array": { "version": "2.0.0",