diff --git a/.github/workflows/db-migrations.yml b/.github/workflows/db-migrations.yml index 8b6594c9c..4a33bf04a 100644 --- a/.github/workflows/db-migrations.yml +++ b/.github/workflows/db-migrations.yml @@ -6,6 +6,7 @@ on: - .github/workflows/db-migrations.yml - lib/bin/create-docker-databases.js - lib/model/migrations/** + - lib/model/migrations-post-knex/** - test/db-migrations/** - package.json - package-lock.json @@ -15,6 +16,7 @@ on: - .github/workflows/db-migrations.yml - lib/bin/create-docker-databases.js - lib/model/migrations/** + - lib/model/migrations-post-knex/** - test/db-migrations/** - package.json - package-lock.json diff --git a/lib/bin/check-migrations.js b/lib/bin/check-migrations.js index 20b9c5e82..19919672a 100644 --- a/lib/bin/check-migrations.js +++ b/lib/bin/check-migrations.js @@ -8,10 +8,14 @@ // except according to the terms contained in the LICENSE file. const { withKnex, checkMigrations } = require('../model/migrate'); +const { checkPgMigrations } = require('../model/pg-migrator'); + +// REVIEW why is check-migrations required in the first place? (async () => { try { await withKnex(require('config').get('default.database'))(checkMigrations); + await checkPgMigrations(require('config').get('default.database')); } catch (err) { console.error('Error:', err.message); process.exit(1); diff --git a/lib/bin/run-migrations.js b/lib/bin/run-migrations.js index 4bca96779..755c6be01 100644 --- a/lib/bin/run-migrations.js +++ b/lib/bin/run-migrations.js @@ -8,10 +8,14 @@ // except according to the terms contained in the LICENSE file. const { withKnex, migrate } = require('../model/migrate'); +const { pgMigrations } = require('../model/pg-migrator'); (async () => { try { await withKnex(require('config').get('default.database'))(migrate); + await pgMigrations(require('config').get('default.database')); + // REVIEW should the new migrator follow the same function signature? e.g. + // withPg(config)(migrate)? } catch (err) { console.error('Error:', err.message); process.exit(1); diff --git a/lib/model/migrate.js b/lib/model/migrate.js index 0fc5a3c03..2d5b04ee6 100644 --- a/lib/model/migrate.js +++ b/lib/model/migrate.js @@ -10,6 +10,9 @@ // This is a variety of functions helpful for connecting to and performing // top-level operations with a database, like migrations. +// TODO rename e.g. legacy-knex-migrator +// TODO move migration files to e.g. /migrations/legacy + const knex = require('knex'); const { knexConnection } = require('../util/db'); diff --git a/lib/model/migrations-post-knex/.eslintrc.json b/lib/model/migrations-post-knex/.eslintrc.json new file mode 100644 index 000000000..93a99d9dd --- /dev/null +++ b/lib/model/migrations-post-knex/.eslintrc.json @@ -0,0 +1,6 @@ +{ + "extends": "../../../.eslintrc.json", + "rules": { + "no-restricted-modules": [ "error", { "patterns": [ "../*" ] } ] + } +} diff --git a/lib/model/migrations/20250113-01-disable-nullable-blob-content-types.js b/lib/model/migrations-post-knex/20250204-01-disable-nullable-blob-content-types.js similarity index 93% rename from lib/model/migrations/20250113-01-disable-nullable-blob-content-types.js rename to lib/model/migrations-post-knex/20250204-01-disable-nullable-blob-content-types.js index b3844bc3a..9d57cae54 100644 --- a/lib/model/migrations/20250113-01-disable-nullable-blob-content-types.js +++ b/lib/model/migrations-post-knex/20250204-01-disable-nullable-blob-content-types.js @@ -7,14 +7,14 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. -const up = (db) => db.raw(` +const up = (db) => db.query(` UPDATE blobs SET "contentType"='application/octet-stream' WHERE "contentType" IS NULL; ALTER TABLE blobs ALTER COLUMN "contentType" SET DEFAULT 'application/octet-stream', ALTER COLUMN "contentType" SET NOT NULL `); -const down = (db) => db.raw(` +const down = (db) => db.query(` ALTER TABLE blobs ALTER COLUMN "contentType" DROP NOT NULL, ALTER COLUMN "contentType" DROP DEFAULT diff --git a/lib/model/pg-migrator.js b/lib/model/pg-migrator.js new file mode 100644 index 000000000..eb936b7be --- /dev/null +++ b/lib/model/pg-migrator.js @@ -0,0 +1,189 @@ +// Copyright 2025 ODK Central Developers +// See the NOTICE file at the top-level directory of this distribution and at +// https://github.com/getodk/central-backend/blob/master/NOTICE. +// This file is part of ODK Central. It is subject to the license terms in +// the LICENSE file found in the top-level directory of this distribution and at +// https://www.apache.org/licenses/LICENSE-2.0. No part of ODK Central, +// including this file, may be copied, modified, propagated, or distributed +// except according to the terms contained in the LICENSE file. + +const { lstatSync, readdirSync } = require('node:fs'); + +const _ = require('lodash'); // eslint-disable-line import/no-extraneous-dependencies +const pg = require('pg'); + +const migrationsDir = `${__dirname}/migrations-post-knex`; // TODO rename to /migrations/current or something + +const withPg = async (config, fn) => { + const log = (...args) => console.log('[withPg]', ...args); // eslint-disable-line no-console + log('ENTRY'); + + const { Client } = pg; + const client = new Client(config); + + log('client created'); + + log('Connecting to client...'); + await client.connect(); + log('Client connected OK.'); + + try { + await fn(client); + } finally { + log('Ending client...'); + await client.end(); + log('Client ended.'); + } +}; + +const getMigrationsToRun = async client => { + const log = (...args) => console.log('[getMigrationsToRun]', ...args); // eslint-disable-line no-console + log('ENTRY'); + + const allMigrations = readdirSync(migrationsDir) + .filter(f => f.endsWith('.js') && lstatSync(`${migrationsDir}/${f}`).isFile()) + .sort(); + log('allMigrations:', allMigrations); + + const alreadyRun = (await client.query('SELECT name FROM post_knex_migrations')).rows.map(r => r.name); + log('alreadyRun:', alreadyRun); + + const toRunNames = allMigrations.filter(m => !alreadyRun.includes(m)); + log('toRunNames:', toRunNames); + + const toRun = toRunNames.map(name => { + const path = `${migrationsDir}/${name}`; + const migration = require(path); // eslint-disable-line import/no-dynamic-require + return { name, path, migration }; + }); + log('toRun:', toRun); + + return toRun; +}; + +const pgMigrations = async (config) => { + const log = (...args) => console.log('[pgMigrations]', ...args); // eslint-disable-line no-console + log('ENTRY'); + + // In the main, this migrator is written to behave similarly to knex's: + // + // * expects transaction property async .up({ raw }) + // * provides implementation of db.raw() + // * runs all new migrations in the same transaction + // + // Notable differences + // + // * uses new post_knex_migrations table + // * ONLY provides db.raw()-equivalent function to transactions - no knex query builder etc. + // * ONLY implements up(); will throw if a transaction has other properties, except for `down()` which is currently ignored TODO implement this if it's useful to devs + // * gets list of migrations to run _after_ acquiring db lock + // * sets migration_time to be the start of the migration batch's transaction rather than some other intermediate time + + await withPg(config, async client => { + try { + log('Starting transaction...'); + await client.query('BEGIN'); // TODO do we need a specific transaction type? + log('Transaction started.'); + + log('Acquiring knex lock...'); + // TODO do this... if it's useful. Need to think of _some_ way to prevent new migrations and old migrations running simultaneously. + log('Knex lock acquired'); + + log('Creating new table if not exists...'); + // N.B. this table is created to be similar to the legacy knex-created table. + // The key difference is that name, batch and migration_time columns are + // not nullable. + const maxLen = 255; + await client.query(` + CREATE TABLE IF NOT EXISTS post_knex_migrations ( + id SERIAL PRIMARY KEY, + name VARCHAR(${maxLen}) NOT NULL, + batch INTEGER NOT NULL, + migration_time TIMESTAMP(3) WITH TIME ZONE NOT NULL + )`); + log('Table now definitely exists.'); + + log('Acquiring lock on post_knex_migrations table...'); + await client.query('LOCK TABLE post_knex_migrations IN EXCLUSIVE MODE NOWAIT'); + log('Lock acquired.'); + + const toRun = await getMigrationsToRun(client); + + if (!toRun.length) { + log('No migrations to run - exiting.'); + await client.query('ROLLBACK'); + return; + } + + log('Validating', toRun.length, 'migrations...'); + for (const { migration, name } of toRun) { + log('Validing migration:', name, '...'); + + if (name.length > maxLen) throw new Error(`Migration name '${name}' is too long - max length is ${maxLen}, but got ${name.length}`); + + // TODO check for illegal chars in name? + + const keys = Object.keys(migration); + const unexpectedKeys = _.omit(keys, 'up', 'down'); + if (unexpectedKeys.length) throw new Error(`Unexpected key(s) found in migration ${name}: ${unexpectedKeys}`); + + if (!migration.up) throw new Error(`Required prop .up not found in migration ${name}`); + if (typeof migration.up !== 'function') { + throw new Error(`Required prop .up of migration ${name} has incorrect type - expected 'function', but got '${typeof migration.up}'`); + } + + if (migration.down && typeof migration.down !== 'function') { + throw new Error(`Optional prop .down of migration ${name} has incorrect type - expected 'function' but got '${typeof migration.down}'`); + } + + log('Migration', name, 'looks valid.'); + } + log(toRun.length, 'migrations look valid.'); + + log('Running', toRun.length, 'migrations...'); + for (const { migration, name } of toRun) { + log('Running migration:', name); + await migration.up(client); // eslint-disable-line no-await-in-loop + log('Migration complete:', name); + } + log(toRun.length, 'migrations ran OK.'); + + const { lastBatch } = (await client.query(`SELECT COALESCE(MAX(batch), 0) AS "lastBatch" FROM post_knex_migrations`)).rows[0]; + log('lastBatch:', lastBatch); + + // Note that migration_time is CLOCK_TIMESTAMP() to match knex implementation. + // TODO confirm in relevant version of knex source code that this is actually the case, and link here. + const namesJson = JSON.stringify(toRun.map(m => m.name)); + // See: https://www.postgresql.org/docs/current/functions-json.html + await client.query(` + INSERT INTO post_knex_migrations(name, batch, migration_time) + SELECT value#>>'{}' AS name + , ${lastBatch + 1} AS batch + , CLOCK_TIMESTAMP() AS migration_time + FROM JSON_ARRAY_ELEMENTS($1) + `, [ namesJson ]); + + log('Committing migrations...'); + await client.query('COMMIT'); + log('Migrations committed.'); + } catch (err) { + log('Caught error; rolling back', err); + await client.query('ROLLBACK'); + throw err; + } + }); +}; + +// Checks for pending migrations and returns an exit code of 1 if any are +// still pending/unapplied (e.g. automatically running migrations just failed). +const checkPgMigrations = async config => { + const log = (...args) => console.log('[checkPgMigrations]', ...args); // eslint-disable-line no-console + log('ENTRY'); + + await withPg(config, async client => { + const toRun = await getMigrationsToRun(client); + if (toRun.length) process.exitCode = 1; + }); +}; + +module.exports = { checkPgMigrations, withPg, pgMigrations }; diff --git a/test/db-migrations/20241008-01-add-user_preferences.spec.js b/test/db-migrations/20241008-01-add-user_preferences.spec.js index 8a89e1eea..74a70dd20 100644 --- a/test/db-migrations/20241008-01-add-user_preferences.spec.js +++ b/test/db-migrations/20241008-01-add-user_preferences.spec.js @@ -2,10 +2,10 @@ const { // eslint-disable-line object-curly-newline assertIndexExists, assertTableDoesNotExist, assertTableSchema, - describeMigration, + describeLegacyMigration, } = require('./utils'); // eslint-disable-line object-curly-newline -describeMigration('20241008-01-add-user_preferences', ({ runMigrationBeingTested }) => { +describeLegacyMigration('20241008-01-add-user_preferences', ({ runMigrationBeingTested }) => { before(async () => { await assertTableDoesNotExist('user_site_preferences'); await assertTableDoesNotExist('user_project_preferences'); diff --git a/test/db-migrations/20250113-01-disable-nullable-blob-content-types.spec.js b/test/db-migrations/20250204-01-disable-nullable-blob-content-types.spec.js similarity index 88% rename from test/db-migrations/20250113-01-disable-nullable-blob-content-types.spec.js rename to test/db-migrations/20250204-01-disable-nullable-blob-content-types.spec.js index 24a3f37a5..9ab3b8357 100644 --- a/test/db-migrations/20250113-01-disable-nullable-blob-content-types.spec.js +++ b/test/db-migrations/20250204-01-disable-nullable-blob-content-types.spec.js @@ -3,11 +3,11 @@ const { hash, randomBytes } = require('node:crypto'); const { // eslint-disable-line object-curly-newline assertTableContents, - describeMigration, + describeNewMigration, rowsExistFor, } = require('./utils'); // eslint-disable-line object-curly-newline -describeMigration('20250113-01-disable-nullable-blob-content-types', ({ runMigrationBeingTested }) => { +describeNewMigration('20250204-01-disable-nullable-blob-content-types', ({ runMigrationBeingTested }) => { const aBlobWith = props => { const randomContent = randomBytes(100); const md5 = hash('md5', randomContent); // eslint-disable-line no-multi-spaces @@ -21,6 +21,7 @@ describeMigration('20250113-01-disable-nullable-blob-content-types', ({ runMigra before(async () => { await rowsExistFor('blobs', blob1, blob2); + await assertTableContents('blobs', blob1, blob2); // should fail if old migration still exists await runMigrationBeingTested(); }); diff --git a/test/db-migrations/migrator.js b/test/db-migrations/migrator.js index 739b8d971..682046896 100644 --- a/test/db-migrations/migrator.js +++ b/test/db-migrations/migrator.js @@ -15,94 +15,100 @@ const fs = require('node:fs'); const { execSync } = require('node:child_process'); -const migrationsDir = './lib/model/migrations'; -const holdingPen = './test/db-migrations/.holding-pen'; +const legacy = createMigrator('Legacy', './lib/model/migrations', './test/db-migrations/.holding-pen/legacy'); // eslint-disable-line no-use-before-define, no-multi-spaces +const postKnex = createMigrator('Post-knex', './lib/model/migrations-post-knex', './test/db-migrations/.holding-pen/post-knex', legacy); // eslint-disable-line no-use-before-define -fs.mkdirSync(holdingPen, { recursive: true }); +module.exports = { legacy, postKnex }; -restoreMigrations(); // eslint-disable-line no-use-before-define -const allMigrations = loadMigrationsList(); // eslint-disable-line no-use-before-define -moveMigrationsToHoldingPen(); // eslint-disable-line no-use-before-define +function createMigrator(name, migrationsDir, holdingPen, previousMigrator) { + fs.mkdirSync(holdingPen, { recursive: true }); -let lastRunIdx = -1; + restoreMigrations(); // eslint-disable-line no-use-before-define + const allMigrations = loadMigrationsList(); // eslint-disable-line no-use-before-define + moveMigrationsToHoldingPen(); // eslint-disable-line no-use-before-define -function runBefore(migrationName) { - const idx = getIndex(migrationName); // eslint-disable-line no-use-before-define - if (idx === 0) return; + let lastRunIdx = -1; - const previousMigration = allMigrations[idx - 1]; + return { + exists, // eslint-disable-line no-use-before-define, no-multi-spaces + hasRun, // eslint-disable-line no-use-before-define, no-multi-spaces + runBefore, // eslint-disable-line no-use-before-define, no-multi-spaces + runIncluding, // eslint-disable-line no-use-before-define, no-multi-spaces + restoreMigrations, // eslint-disable-line no-use-before-define + }; - return runIncluding(previousMigration); // eslint-disable-line no-use-before-define -} - -function runIncluding(lastMigrationToRun) { - const finalIdx = getIndex(lastMigrationToRun); // eslint-disable-line no-use-before-define + function runBefore(migrationName) { + const idx = getIndex(migrationName); // eslint-disable-line no-use-before-define + runUntilIndex(idx - 1); // eslint-disable-line no-use-before-define + } - for (let restoreIdx=lastRunIdx+1; restoreIdx<=finalIdx; ++restoreIdx) { // eslint-disable-line no-plusplus - const f = allMigrations[restoreIdx] + '.js'; - fs.renameSync(`${holdingPen}/${f}`, `${migrationsDir}/${f}`); + function runIncluding(lastMigrationToRun) { + runUntilIndex(getIndex(lastMigrationToRun)); // eslint-disable-line no-use-before-define } - log('Running migrations until:', lastMigrationToRun, '...'); - const res = execSync(`node ./lib/bin/run-migrations.js`, { encoding: 'utf8' }); + function runUntilIndex(finalIdx) { + for (let restoreIdx=lastRunIdx+1; restoreIdx<=finalIdx; ++restoreIdx) { // eslint-disable-line no-plusplus + const f = allMigrations[restoreIdx] + '.js'; + fs.renameSync(`${holdingPen}/${f}`, `${migrationsDir}/${f}`); + } - lastRunIdx = finalIdx; + if (previousMigrator) previousMigrator.restoreMigrations(); - log(`Ran migrations up-to-and-including ${lastMigrationToRun}:\n`, res); -} + const lastMigrationToRun = allMigrations[finalIdx]; + log('Running migrations until:', lastMigrationToRun, '...'); + const res = execSync(`node ./lib/bin/run-migrations.js`, { encoding: 'utf8' }); -function getIndex(migrationName) { - const idx = allMigrations.indexOf(migrationName); - log('getIndex()', migrationName, 'found at', idx); - if (idx === -1) throw new Error(`Unknown migration: ${migrationName}`); - return idx; -} + lastRunIdx = finalIdx; -function restoreMigrations() { - moveAll(holdingPen, migrationsDir); // eslint-disable-line no-use-before-define -} + log(`Ran migrations up-to-and-including ${lastMigrationToRun}:\n`, res); + } -function moveMigrationsToHoldingPen() { - moveAll(migrationsDir, holdingPen); // eslint-disable-line no-use-before-define -} + function getIndex(migrationName) { + const idx = allMigrations.indexOf(migrationName); + log('getIndex()', migrationName, 'found at', idx); + if (idx === -1) throw new Error(`Unknown migration: ${migrationName}`); + return idx; + } -function moveAll(src, tgt) { - fs.readdirSync(src) - .forEach(f => fs.renameSync(`${src}/${f}`, `${tgt}/${f}`)); -} + function restoreMigrations() { + moveAll(holdingPen, migrationsDir); // eslint-disable-line no-use-before-define + } -function loadMigrationsList() { - const migrations = fs.readdirSync(migrationsDir) - .filter(f => f.endsWith('.js')) - .map(f => f.replace(/\.js$/, '')) - .sort(); // TODO check that this is how knex sorts migration files - log(); - log('All migrations:'); - log(); - migrations.forEach(m => log('*', m)); - log(); - log('Total:', migrations.length); - log(); - return migrations; -} + function moveMigrationsToHoldingPen() { + moveAll(migrationsDir, holdingPen); // eslint-disable-line no-use-before-define + } -function exists(migrationName) { - try { - getIndex(migrationName); - return true; - } catch (err) { - return false; + function moveAll(src, tgt) { + fs.readdirSync(src) + .filter(f => f.endsWith('.js')) + .forEach(f => fs.renameSync(`${src}/${f}`, `${tgt}/${f}`)); } -} -function hasRun(migrationName) { - return lastRunIdx >= getIndex(migrationName); -} + function loadMigrationsList() { + const migrations = fs.readdirSync(migrationsDir) + .filter(f => f.endsWith('.js')) + .map(f => f.replace(/\.js$/, '')) + .sort(); // TODO check that this is how knex sorts migration files + log(); + log(`${name} migrations:`); + log(); + migrations.forEach(m => log('*', m)); + log(); + log('Total:', migrations.length); + log(); + return migrations; + } + + function exists(migrationName) { + try { + getIndex(migrationName); + return true; + } catch (err) { + return false; + } + } -module.exports = { - exists, - hasRun, - runBefore, - runIncluding, - restoreMigrations, -}; + function hasRun(migrationName) { + return lastRunIdx >= getIndex(migrationName); + } +} diff --git a/test/db-migrations/utils.js b/test/db-migrations/utils.js index 90d9b7af2..fe17b10e0 100644 --- a/test/db-migrations/utils.js +++ b/test/db-migrations/utils.js @@ -2,8 +2,8 @@ const assert = require('node:assert/strict'); const _ = require('lodash'); const migrator = require('./migrator'); -function _describeMigration(describeFn, migrationName, fn) { - assert.strictEqual(arguments.length, 3, 'Incorrect argument count.'); +function _describeMigration(migrator, describeFn, migrationName, fn) { // eslint-disable-line no-shadow + assert.strictEqual(arguments.length, 4, 'Incorrect argument count.'); assert.strictEqual(typeof describeFn, 'function'); @@ -29,9 +29,14 @@ function _describeMigration(describeFn, migrationName, fn) { return fn({ runMigrationBeingTested }); }); } -function describeMigration(...args) { return _describeMigration(describe, ...args); } -describeMigration.only = (...args) => _describeMigration(describe.only, ...args); // eslint-disable-line no-only-tests/no-only-tests, no-multi-spaces -describeMigration.skip = (...args) => _describeMigration(describe.skip, ...args); // eslint-disable-line no-multi-spaces + +function describeLegacyMigration(...args) { return _describeMigration(migrator.legacy, describe, ...args); } // eslint-disable-line no-multi-spaces +describeLegacyMigration.only = (...args) => _describeMigration(migrator.legacy, describe.only, ...args); // eslint-disable-line no-only-tests/no-only-tests, no-multi-spaces +describeLegacyMigration.skip = (...args) => _describeMigration(migrator.legacy, describe.skip, ...args); // eslint-disable-line no-multi-spaces + +function describeNewMigration(...args) { return _describeMigration(migrator.postKnex, describe, ...args); } // eslint-disable-line no-multi-spaces +describeNewMigration.only = (...args) => _describeMigration(migrator.postKnex, describe.only, ...args); // eslint-disable-line no-only-tests/no-only-tests, no-multi-spaces +describeNewMigration.skip = (...args) => _describeMigration(migrator.postKnex, describe.skip, ...args); // eslint-disable-line no-multi-spaces async function assertIndexExists(tableName, expected) { if (arguments.length !== 2) throw new Error('Incorrect arg count.'); @@ -180,7 +185,8 @@ module.exports = { assertTableDoesNotExist, assertTableSchema, - describeMigration, + describeLegacyMigration, + describeNewMigration, // TODO rename to simply describeMigration rowsExistFor, }; diff --git a/test/integration/other/migrations.js b/test/integration/other/migrations.js index 84aefa4c3..d4dceae06 100644 --- a/test/integration/other/migrations.js +++ b/test/integration/other/migrations.js @@ -1,3 +1,4 @@ +// TODO rename e.g. legacy-knex-migrations const { readFileSync } = require('fs'); const appRoot = require('app-root-path'); const uuid = require('uuid').v4; diff --git a/test/integration/setup.js b/test/integration/setup.js index 2cc2f6808..586eccfc8 100644 --- a/test/integration/setup.js +++ b/test/integration/setup.js @@ -1,3 +1,4 @@ +const { execSync } = require('node:child_process'); const { readFileSync } = require('fs'); const appRoot = require('app-root-path'); const { mergeRight } = require('ramda'); @@ -5,18 +6,18 @@ const { sql } = require('slonik'); const { readdirSync } = require('fs'); const { join } = require('path'); const request = require('supertest'); -const { noop } = require(appRoot + '/lib/util/util'); const { task } = require(appRoot + '/lib/task/task'); const authenticateUser = require('../util/authenticate-user'); const testData = require('../data/xml'); // knex things. const config = require('config'); -const { knexConnect } = require(appRoot + '/lib/model/migrate'); +const { withPg } = require(appRoot + '/lib/model/pg-migrator'); // slonik connection pool const { slonikPool } = require(appRoot + '/lib/external/slonik'); -const db = slonikPool(config.get('test.database')); +const dbConfig = config.get('test.database'); +const db = slonikPool(dbConfig); // set up our mailer. const env = config.get('default.env'); @@ -72,18 +73,8 @@ const populate = (container, [ head, ...tail ] = fixtures) => // this hook won't run if `test-unit` is called, as this directory is skipped // in that case. const initialize = async () => { - const migrator = knexConnect(config.get('test.database')); - const { log } = console; - try { - await migrator.raw('drop owned by current_user'); - // Silence logging from migrations. - console.log = noop; // eslint-disable-line no-console - await migrator.migrate.latest({ directory: appRoot + '/lib/model/migrations' }); - } finally { - console.log = log; // eslint-disable-line no-console - await migrator.destroy(); - } - + await withPg(dbConfig, client => client.query('DROP OWNED BY CURRENT_USER')); + execSync('make migrations', { env: { ...process.env, NODE_CONFIG: JSON.stringify({ default: { database: dbConfig } }) } }); return withDefaults({ db, context, enketo, env, s3 }).transacting(populate); };