diff --git a/README.md b/README.md
index 530e669..d3ed601 100644
--- a/README.md
+++ b/README.md
@@ -724,6 +724,59 @@ by `graphile-migrate watch` is defined. By default this is in the
 `migrations/current.sql` file, but it might be `migrations/current/*.sql` if
 you're using folder mode.
 
+#### Including external files in the current migration
+
+You can include external files in your `current.sql` to better assist in source
+control. These includes are identified by paths within the `migrations/fixtures`
+folder.
+
+For example. Given the following directory structure:
+
+```
+/- migrate
+ - migrations
+   |
+   - current.sql
+   - fixtures
+     |
+     - functions
+       |
+       - myfunction.sql
+```
+
+and the contents of `myfunction.sql`:
+
+```sql
+create or replace function myfunction(a int, b int)
+returns int as $$
+  select a + b;
+$$ language sql stable;
+```
+
+When you make changes to `myfunction.sql`, include it in your current migration
+by adding `--!include functions/myfunction.sql` to your `current.sql` (or any
+`current/*.sql`). This statement doesn't need to be at the top of the file,
+wherever it is will be replaced by the content of
+`migrations/fixtures/functions/myfunction.sql` when the migration is committed.
+
+```sql
+--!include fixtures/functions/myfunction.sql
+drop policy if exists access_by_numbers on mytable;
+create policy access_by_numbers on mytable for update using (myfunction(4, 2) < 42);
+```
+
+and when the migration is committed or watched, the contents of `myfunction.sql`
+will be included in the result, such that the following SQL is executed:
+
+```sql
+create or replace function myfunction(a int, b int)
+returns int as $$
+  select a + b;
+$$ language sql stable;
+drop policy if exists access_by_numbers on mytable;
+create policy access_by_numbers on mytable for update using (myfunction(4, 2) < 42);
+```
+
 ### Committed migration(s)
 
 The files for migrations that you've committed with `graphile-migrate commit`
diff --git a/__tests__/__snapshots__/include.test.ts.snap b/__tests__/__snapshots__/include.test.ts.snap
new file mode 100644
index 0000000..13a4422
--- /dev/null
+++ b/__tests__/__snapshots__/include.test.ts.snap
@@ -0,0 +1,9 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`compiles an included file, and won't get stuck in an infinite include loop 1`] = `
+"Circular include detected - '~/migrations/fixtures/foo.sql' is included again! Import statement: \`--!include foo.sql\`; trace:
+  ~/migrations/fixtures/foo.sql
+  ~/migrations/current.sql"
+`;
+
+exports[`disallows calling files outside of the migrations/fixtures folder 1`] = `"Forbidden: cannot include path '~/outsideFolder/foo.sql' because it's not inside '~/migrations/fixtures'"`;
diff --git a/__tests__/commit.test.ts b/__tests__/commit.test.ts
index 97a3e56..5bf2321 100644
--- a/__tests__/commit.test.ts
+++ b/__tests__/commit.test.ts
@@ -1,6 +1,6 @@
 import "./helpers"; // Has side-effects; must come first
 
-import { promises as fsp } from "fs";
+import * as fsp from "fs/promises";
 import mockFs from "mock-fs";
 
 import { commit } from "../src";
diff --git a/__tests__/compile.test.ts b/__tests__/compile.test.ts
index 522d07c..4747483 100644
--- a/__tests__/compile.test.ts
+++ b/__tests__/compile.test.ts
@@ -1,7 +1,8 @@
 import "./helpers";
 
-import { compile } from "../src";
+import * as mockFs from "mock-fs";
 
+import { compile } from "../src";
 let old: string | undefined;
 beforeAll(() => {
   old = process.env.DATABASE_AUTHENTICATOR;
@@ -11,6 +12,10 @@ afterAll(() => {
   process.env.DATABASE_AUTHENTICATOR = old;
 });
 
+afterEach(() => {
+  mockFs.restore();
+});
+
 it("compiles SQL with settings", async () => {
   expect(
     await compile(
diff --git a/__tests__/include.test.ts b/__tests__/include.test.ts
new file mode 100644
index 0000000..adafe55
--- /dev/null
+++ b/__tests__/include.test.ts
@@ -0,0 +1,145 @@
+import "./helpers";
+
+import mockFs from "mock-fs";
+
+import { compileIncludes } from "../src/migration";
+import { ParsedSettings, parseSettings } from "../src/settings";
+
+let old: string | undefined;
+let settings: ParsedSettings;
+beforeAll(async () => {
+  old = process.env.DATABASE_AUTHENTICATOR;
+  process.env.DATABASE_AUTHENTICATOR = "dbauth";
+  settings = await parseSettings({
+    connectionString: "postgres://dbowner:dbpassword@dbhost:1221/dbname",
+    placeholders: {
+      ":DATABASE_AUTHENTICATOR": "!ENV",
+    },
+    migrationsFolder: "migrations",
+  });
+});
+afterAll(() => {
+  process.env.DATABASE_AUTHENTICATOR = old;
+});
+
+afterEach(() => {
+  mockFs.restore();
+});
+
+/** Pretents that our compiled files are 'current.sql' */
+const FAKE_VISITED = new Set([`${process.cwd()}/migrations/current.sql`]);
+
+it("compiles an included file", async () => {
+  mockFs({
+    "migrations/fixtures/foo.sql": "select * from foo;",
+  });
+  expect(
+    await compileIncludes(
+      settings,
+      `\
+--!include foo.sql
+`,
+      FAKE_VISITED,
+    ),
+  ).toEqual(`\
+select * from foo;
+`);
+});
+
+it("compiles multiple included files", async () => {
+  mockFs({
+    "migrations/fixtures/dir1/foo.sql": "select * from foo;",
+    "migrations/fixtures/dir2/bar.sql": "select * from bar;",
+    "migrations/fixtures/dir3/baz.sql": "--!include dir4/qux.sql",
+    "migrations/fixtures/dir4/qux.sql": "select * from qux;",
+  });
+  expect(
+    await compileIncludes(
+      settings,
+      `\
+--!include dir1/foo.sql
+--!include dir2/bar.sql
+--!include dir3/baz.sql
+`,
+      FAKE_VISITED,
+    ),
+  ).toEqual(`\
+select * from foo;
+select * from bar;
+select * from qux;
+`);
+});
+
+it("compiles an included file, and won't get stuck in an infinite include loop", async () => {
+  mockFs({
+    "migrations/fixtures/foo.sql": "select * from foo;\n--!include foo.sql",
+  });
+  const promise = compileIncludes(
+    settings,
+    `\
+--!include foo.sql
+`,
+    FAKE_VISITED,
+  );
+  await expect(promise).rejects.toThrowError(/Circular include/);
+  const message = await promise.catch((e) => e.message);
+  expect(message.replaceAll(process.cwd(), "~")).toMatchSnapshot();
+});
+
+it("disallows calling files outside of the migrations/fixtures folder", async () => {
+  mockFs({
+    "migrations/fixtures/bar.sql": "",
+    "outsideFolder/foo.sql": "select * from foo;",
+  });
+
+  const promise = compileIncludes(
+    settings,
+    `\
+--!include ../../outsideFolder/foo.sql
+`,
+    FAKE_VISITED,
+  );
+  await expect(promise).rejects.toThrowError(/Forbidden: cannot include/);
+  const message = await promise.catch((e) => e.message);
+  expect(message.replaceAll(process.cwd(), "~")).toMatchSnapshot();
+});
+
+it("compiles an included file that contains escapable things", async () => {
+  mockFs({
+    "migrations/fixtures/foo.sql": `\
+begin;
+
+create or replace function current_user_id() returns uuid as $$
+  select nullif(current_setting('user.id', true)::text, '')::uuid;
+$$ language sql stable;
+
+comment on function current_user_id is E'The ID of the current user.';
+
+grant all on function current_user_id to :DATABASE_USER;
+
+commit;
+`,
+  });
+  expect(
+    await compileIncludes(
+      settings,
+      `\
+--!include foo.sql
+`,
+      FAKE_VISITED,
+    ),
+  ).toEqual(`\
+begin;
+
+create or replace function current_user_id() returns uuid as $$
+  select nullif(current_setting('user.id', true)::text, '')::uuid;
+$$ language sql stable;
+
+comment on function current_user_id is E'The ID of the current user.';
+
+grant all on function current_user_id to :DATABASE_USER;
+
+commit;
+
+`);
+});
diff --git a/__tests__/readCurrentMigration.test.ts b/__tests__/readCurrentMigration.test.ts
index ba75bfa..6e0efb5 100644
--- a/__tests__/readCurrentMigration.test.ts
+++ b/__tests__/readCurrentMigration.test.ts
@@ -102,3 +102,14 @@ With multiple lines
   const content = await readCurrentMigration(parsedSettings, currentLocation);
   expect(content).toEqual(contentWithSplits);
 });
+
+it("reads from current.sql, and processes included files", async () => {
+  mockFs({
+    "migrations/current.sql": "--!include foo_current.sql",
+    "migrations/fixtures/foo_current.sql": "-- TEST from foo",
+  });
+
+  const currentLocation = await getCurrentMigrationLocation(parsedSettings);
+  const content = await readCurrentMigration(parsedSettings, currentLocation);
+  expect(content).toEqual("-- TEST from foo");
+});
diff --git a/__tests__/uncommit.test.ts b/__tests__/uncommit.test.ts
index 2087382..a57031c 100644
--- a/__tests__/uncommit.test.ts
+++ b/__tests__/uncommit.test.ts
@@ -1,6 +1,6 @@
 import "./helpers"; // Has side-effects; must come first
 
-import { promises as fsp } from "fs";
+import * as fsp from "fs/promises";
 import mockFs from "mock-fs";
 
 import { commit, migrate, uncommit } from "../src";
diff --git a/__tests__/writeCurrentMigration.test.ts b/__tests__/writeCurrentMigration.test.ts
index 816205b..b13422a 100644
--- a/__tests__/writeCurrentMigration.test.ts
+++ b/__tests__/writeCurrentMigration.test.ts
@@ -1,6 +1,6 @@
 import "./helpers"; // Has side-effects; must come first
 
-import { promises as fsp } from "fs";
+import * as fsp from "fs/promises";
 import mockFs from "mock-fs";
 
 import {
diff --git a/package.json b/package.json
index ec4a026..5dfd46b 100644
--- a/package.json
+++ b/package.json
@@ -42,12 +42,12 @@
   "dependencies": {
     "@graphile/logger": "^0.2.0",
     "@types/json5": "^2.2.0",
-    "@types/node": "^20.11.5",
-    "@types/pg": "^8.10.9",
+    "@types/node": "^18",
+    "@types/pg": ">=6 <9",
     "chalk": "^4",
     "chokidar": "^3.5.3",
     "json5": "^2.2.3",
-    "pg": "^8.11.3",
+    "pg": ">=6.5 <9",
     "pg-connection-string": "^2.6.2",
     "pg-minify": "^1.6.3",
     "tslib": "^2.6.2",
diff --git a/scripts/update-docs.js b/scripts/update-docs.js
index 5fd4536..7645dfb 100755
--- a/scripts/update-docs.js
+++ b/scripts/update-docs.js
@@ -1,5 +1,5 @@
 #!/usr/bin/env node
-const { promises: fsp } = require("fs");
+const fsp = require("fs/promises");
 const { spawnSync } = require("child_process");
 
 async function main() {
diff --git a/src/__mocks__/migration.ts b/src/__mocks__/migration.ts
index 074281b..c5cced4 100644
--- a/src/__mocks__/migration.ts
+++ b/src/__mocks__/migration.ts
@@ -36,3 +36,7 @@ export const runStringMigration = jest.fn(
 export const runCommittedMigration = jest.fn(
   (_client, _settings, _context, _committedMigration, _logSuffix) => {},
 );
+
+export const compileIncludes = jest.fn((parsedSettings, content) => {
+  return content;
+});
diff --git a/src/actions.ts b/src/actions.ts
index 34048c8..bbb1351 100644
--- a/src/actions.ts
+++ b/src/actions.ts
@@ -1,6 +1,6 @@
 import { Logger } from "@graphile/logger";
 import { exec as rawExec } from "child_process";
-import { promises as fsp } from "fs";
+import * as fsp from "fs/promises";
 import { parse } from "pg-connection-string";
 import { inspect, promisify } from "util";
 
diff --git a/src/commands/_common.ts b/src/commands/_common.ts
index ed9a5af..bfd7f41 100644
--- a/src/commands/_common.ts
+++ b/src/commands/_common.ts
@@ -1,4 +1,5 @@
-import { constants, promises as fsp } from "fs";
+import { constants } from "fs";
+import * as fsp from "fs/promises";
 import * as JSON5 from "json5";
 import { resolve } from "path";
 import { parse } from "pg-connection-string";
diff --git a/src/commands/commit.ts b/src/commands/commit.ts
index 1911624..41bede9 100644
--- a/src/commands/commit.ts
+++ b/src/commands/commit.ts
@@ -1,5 +1,5 @@
 import pgMinify = require("pg-minify");
-import { promises as fsp } from "fs";
+import * as fsp from "fs/promises";
 import { CommandModule } from "yargs";
 
 import {
diff --git a/src/commands/compile.ts b/src/commands/compile.ts
index 1bf62bf..2516cf5 100644
--- a/src/commands/compile.ts
+++ b/src/commands/compile.ts
@@ -1,4 +1,4 @@
-import { promises as fsp } from "fs";
+import * as fsp from "fs/promises";
 import { CommandModule } from "yargs";
 
 import { compilePlaceholders } from "../migration";
diff --git a/src/commands/init.ts b/src/commands/init.ts
index f346e57..b617c30 100644
--- a/src/commands/init.ts
+++ b/src/commands/init.ts
@@ -1,4 +1,4 @@
-import { promises as fsp } from "fs";
+import * as fsp from "fs/promises";
 import { CommandModule } from "yargs";
 
 import { getCurrentMigrationLocation, writeCurrentMigration } from "../current";
diff --git a/src/commands/run.ts b/src/commands/run.ts
index f4a065f..7d963c2 100644
--- a/src/commands/run.ts
+++ b/src/commands/run.ts
@@ -1,4 +1,4 @@
-import { promises as fsp } from "fs";
+import * as fsp from "fs/promises";
 import { QueryResultRow } from "pg";
 import { CommandModule } from "yargs";
 
diff --git a/src/commands/uncommit.ts b/src/commands/uncommit.ts
index 771e4e5..01fd188 100644
--- a/src/commands/uncommit.ts
+++ b/src/commands/uncommit.ts
@@ -1,5 +1,5 @@
 import pgMinify = require("pg-minify");
-import { promises as fsp } from "fs";
+import * as fsp from "fs/promises";
 import { CommandModule } from "yargs";
 
 import {
diff --git a/src/commands/watch.ts b/src/commands/watch.ts
index 6f0c502..0ce3277 100644
--- a/src/commands/watch.ts
+++ b/src/commands/watch.ts
@@ -217,29 +217,32 @@ export async function _watch(
           }
         });
     };
-    const watcher = chokidar.watch(currentLocation.path, {
-      /*
-       * Without `usePolling`, on Linux, you can prevent the watching from
-       * working by issuing `git stash && sleep 2 && git stash pop`. This is
-       * annoying.
-       */
-      usePolling: true,
+    const watcher = chokidar.watch(
+      [currentLocation.path, `${parsedSettings.migrationsFolder}/fixtures`],
+      {
+        /*
+         * Without `usePolling`, on Linux, you can prevent the watching from
+         * working by issuing `git stash && sleep 2 && git stash pop`. This is
+         * annoying.
+         */
+        usePolling: true,
 
-      /*
-       * Some editors stream the writes out a little at a time, we want to wait
-       * for the write to finish before triggering.
-       */
-      awaitWriteFinish: {
-        stabilityThreshold: 200,
-        pollInterval: 100,
-      },
+        /*
+         * Some editors stream the writes out a little at a time, we want to wait
+         * for the write to finish before triggering.
+         */
+        awaitWriteFinish: {
+          stabilityThreshold: 200,
+          pollInterval: 100,
+        },
 
-      /*
-       * We don't want to run the queue too many times during startup; so we
-       * call it once on the 'ready' event.
-       */
-      ignoreInitial: true,
-    });
+        /*
+         * We don't want to run the queue too many times during startup; so we
+         * call it once on the 'ready' event.
+         */
+        ignoreInitial: true,
+      },
+    );
     watcher.on("add", queue);
     watcher.on("change", queue);
     watcher.on("unlink", queue);
diff --git a/src/current.ts b/src/current.ts
index e0c324f..748598b 100644
--- a/src/current.ts
+++ b/src/current.ts
@@ -3,7 +3,11 @@ import { promises as fsp, Stats } from "fs";
 
 import { isNoTransactionDefined } from "./header";
 import { errorCode } from "./lib";
-import { parseMigrationText, serializeHeader } from "./migration";
+import {
+  compileIncludes,
+  parseMigrationText,
+  serializeHeader,
+} from "./migration";
 import { ParsedSettings } from "./settings";
 
 export const VALID_FILE_REGEX = /^([0-9]+)(-[-_a-zA-Z0-9]*)?\.sql$/;
@@ -99,14 +103,18 @@ function idFromFilename(file: string): number {
 }
 
 export async function readCurrentMigration(
-  _parsedSettings: ParsedSettings,
+  parsedSettings: ParsedSettings,
   location: CurrentMigrationLocation,
 ): Promise<string> {
   if (location.isFile) {
     const content = await readFileOrNull(location.path);
 
     // If file doesn't exist, treat it as if it were empty.
-    return content || "";
+    return compileIncludes(
+      parsedSettings,
+      content || "",
+      new Set([location.path]),
+    );
   } else {
     const files = await fsp.readdir(location.path);
     const parts = new Map<
@@ -156,7 +164,12 @@ export async function readCurrentMigration(
     for (const id of ids) {
       // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
       const { file, filePath, bodyPromise } = parts.get(id)!;
-      const contents = await bodyPromise;
+      const rawContents = await bodyPromise;
+      const contents = await compileIncludes(
+        parsedSettings,
+        rawContents,
+        new Set([filePath]),
+      );
       const { body, headers } = parseMigrationText(filePath, contents, false);
       headerses.push(headers);
       if (isNoTransactionDefined(body)) {
@@ -181,6 +194,7 @@ export async function readCurrentMigration(
     if (headerLines.length) {
       wholeBody = headerLines.join("\n") + "\n\n" + wholeBody;
     }
+
     return wholeBody;
   }
 }
diff --git a/src/migration.ts b/src/migration.ts
index 5177fd2..6d5a26d 100644
--- a/src/migration.ts
+++ b/src/migration.ts
@@ -1,4 +1,5 @@
-import { promises as fsp } from "fs";
+import * as fsp from "fs/promises";
+import { relative } from "path";
 
 import { VALID_FILE_REGEX } from "./current";
 import { calculateHash } from "./hash";
@@ -118,6 +119,102 @@ export function compilePlaceholders(
   )(content);
 }
 
+async function realpathOrNull(path: string): Promise<string | null> {
+  try {
+    return await fsp.realpath(path);
+  } catch (e) {
+    return null;
+  }
+}
+
+export async function compileIncludes(
+  parsedSettings: ParsedSettings,
+  content: string,
+  processedFiles: ReadonlySet<string>,
+): Promise<string> {
+  const regex = /^--!include[ \t]+(.*\.sql)[ \t]*$/gm;
+
+  // Find all includes in this `content`
+  const matches = [...content.matchAll(regex)];
+
+  // There's no includes
+  if (matches.length === 0) {
+    return content;
+  }
+
+  // Since there's at least one include, we need the fixtures path:
+  const rawFixturesPath = `${parsedSettings.migrationsFolder}/fixtures`;
+  const fixturesPath = await realpathOrNull(rawFixturesPath);
+  if (!fixturesPath) {
+    throw new Error(
+      `File contains '--!include' but fixtures folder '${rawFixturesPath}' doesn't exist?`,
+    );
+  }
+
+  // Go through these matches and resolve their full paths, checking they are allowed
+  const sqlPathByRawSqlPath = Object.create(null) as Record<string, string>;
+  for (const match of matches) {
+    const [line, rawSqlPath] = match;
+    const sqlPath = await realpathOrNull(`${fixturesPath}/${rawSqlPath}`);
+
+    if (!sqlPath) {
+      throw new Error(
+        `Include of '${rawSqlPath}' failed because '${fixturesPath}/${rawSqlPath}' doesn't seem to exist?`,
+      );
+    }
+
+    if (processedFiles.has(sqlPath)) {
+      throw new Error(
+        `Circular include detected - '${sqlPath}' is included again! Import statement: \`${line}\`; trace:\n  ${[...processedFiles].reverse().join("\n  ")}`,
+      );
+    }
+
+    const relativePath = relative(fixturesPath, sqlPath);
+    if (relativePath.startsWith("..")) {
+      throw new Error(
+        `Forbidden: cannot include path '${sqlPath}' because it's not inside '${fixturesPath}'`,
+      );
+    }
+
+    // Looks good to me
+    sqlPathByRawSqlPath[rawSqlPath] = sqlPath;
+  }
+
+  // For the unique set of paths, load the file and then recursively do its own includes
+  const distinctSqlPaths = [...new Set(Object.values(sqlPathByRawSqlPath))];
+  const contentsForDistinctSqlPaths = await Promise.all(
+    distinctSqlPaths.map(async (sqlPath) => {
+      const fileContents = await fsp.readFile(sqlPath, "utf8");
+      const processed = await compileIncludes(
+        parsedSettings,
+        fileContents,
+        new Set([...processedFiles, sqlPath]),
+      );
+      return processed;
+    }),
+  );
+
+  // Turn the results into a map for ease of lookup
+  const contentBySqlPath = Object.create(null) as Record<string, string>;
+  for (let i = 0, l = distinctSqlPaths.length; i < l; i++) {
+    const sqlPath = distinctSqlPaths[i];
+    const content = contentsForDistinctSqlPaths[i];
+    contentBySqlPath[sqlPath] = content;
+  }
+
+  // Simple string replacement for each path matched
+  const compiledContent = content.replace(
+    regex,
+    (_match, rawSqlPath: string) => {
+      const sqlPath = sqlPathByRawSqlPath[rawSqlPath];
+      const content = contentBySqlPath[sqlPath];
+      return content;
+    },
+  );
+
+  return compiledContent;
+}
+
 const TABLE_CHECKS = {
   migrations: {
     columnCount: 4,
diff --git a/yarn.lock b/yarn.lock
index 1039154..47b3c25 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -744,26 +744,33 @@
   dependencies:
     "@types/node" "*"
 
-"@types/node@*", "@types/node@^20.11.5":
+"@types/node@*":
   version "20.11.5"
   resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.5.tgz#be10c622ca7fcaa3cf226cf80166abc31389d86e"
   integrity sha512-g557vgQjUUfN76MZAN/dt1z3dzcUsimuysco0KeluHgrPdJXkP/XdAURgyO2W9fZWHRtRBiVKzKn8vyOAwlG+w==
   dependencies:
     undici-types "~5.26.4"
 
+"@types/node@^18":
+  version "18.19.8"
+  resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.8.tgz#c1e42b165e5a526caf1f010747e0522cb2c9c36a"
+  integrity sha512-g1pZtPhsvGVTwmeVoexWZLTQaOvXwoSq//pTL0DHeNzUDrFnir4fgETdhjhIxjVnN+hKOuh98+E1eMLnUXstFg==
+  dependencies:
+    undici-types "~5.26.4"
+
 "@types/parse-json@^4.0.0":
   version "4.0.2"
   resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.2.tgz#5950e50960793055845e956c427fc2b0d70c5239"
   integrity sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==
 
-"@types/pg@^8.10.9":
-  version "8.10.9"
-  resolved "https://registry.yarnpkg.com/@types/pg/-/pg-8.10.9.tgz#d20bb948c6268c5bd847e2bf968f1194c5a2355a"
-  integrity sha512-UksbANNE/f8w0wOMxVKKIrLCbEMV+oM1uKejmwXr39olg4xqcfBDbXxObJAt6XxHbDa4XTKOlUEcEltXDX+XLQ==
+"@types/pg@>=6 <9":
+  version "8.6.0"
+  resolved "https://registry.yarnpkg.com/@types/pg/-/pg-8.6.0.tgz#34233b891a127d6caaad28e177b1baec1a2958d4"
+  integrity sha512-3JXFrsl8COoqVB1+2Pqelx6soaiFVXzkT3fkuSNe7GB40ysfT0FHphZFPiqIXpMyTHSFRdLTyZzrFBrJRPAArA==
   dependencies:
     "@types/node" "*"
     pg-protocol "*"
-    pg-types "^4.0.1"
+    pg-types "^2.2.0"
 
 "@types/semver@^7.3.12", "@types/semver@^7.5.0":
   version "7.5.6"
@@ -3155,11 +3162,6 @@ object.values@^1.1.7:
     define-properties "^1.2.0"
     es-abstract "^1.22.1"
 
-obuf@~1.1.2:
-  version "1.1.2"
-  resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e"
-  integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==
-
 once@^1.3.0:
   version "1.4.0"
   resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
@@ -3291,17 +3293,12 @@ pg-minify@^1.6.3:
   resolved "https://registry.yarnpkg.com/pg-minify/-/pg-minify-1.6.3.tgz#3def4c876a2d258da20cfdb0e387373d41c7a4dc"
   integrity sha512-NoSsPqXxbkD8RIe+peQCqiea4QzXgosdTKY8p7PsbbGsh2F8TifDj/vJxfuR8qJwNYrijdSs7uf0tAe6WOyCsQ==
 
-pg-numeric@1.0.2:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/pg-numeric/-/pg-numeric-1.0.2.tgz#816d9a44026086ae8ae74839acd6a09b0636aa3a"
-  integrity sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==
-
 pg-pool@^3.6.1:
   version "3.6.1"
   resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.6.1.tgz#5a902eda79a8d7e3c928b77abf776b3cb7d351f7"
   integrity sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==
 
-pg-protocol@*, pg-protocol@^1.6.0:
+pg-protocol@^1.6.0:
   version "1.6.0"
   resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.6.0.tgz#4c91613c0315349363af2084608db843502f8833"
   integrity sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==
@@ -3317,20 +3314,7 @@ pg-types@^2.1.0:
     postgres-date "~1.0.4"
     postgres-interval "^1.1.0"
 
-pg-types@^4.0.1:
-  version "4.0.1"
-  resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-4.0.1.tgz#31857e89d00a6c66b06a14e907c3deec03889542"
-  integrity sha512-hRCSDuLII9/LE3smys1hRHcu5QGcLs9ggT7I/TCs0IE+2Eesxi9+9RWAAwZ0yaGjxoWICF/YHLOEjydGujoJ+g==
-  dependencies:
-    pg-int8 "1.0.1"
-    pg-numeric "1.0.2"
-    postgres-array "~3.0.1"
-    postgres-bytea "~3.0.0"
-    postgres-date "~2.0.1"
-    postgres-interval "^3.0.0"
-    postgres-range "^1.1.1"
-
-pg@^8.11.3:
+"pg@>=6.5 <9":
   version "8.11.3"
   resolved "https://registry.yarnpkg.com/pg/-/pg-8.11.3.tgz#d7db6e3fe268fcedd65b8e4599cda0b8b4bf76cb"
   integrity sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==
@@ -3395,33 +3379,16 @@ postgres-array@~2.0.0:
   resolved "https://registry.yarnpkg.com/postgres-array/-/postgres-array-2.0.0.tgz#48f8fce054fbc69671999329b8834b772652d82e"
   integrity sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==
 
-postgres-array@~3.0.1:
-  version "3.0.2"
-  resolved "https://registry.yarnpkg.com/postgres-array/-/postgres-array-3.0.2.tgz#68d6182cb0f7f152a7e60dc6a6889ed74b0a5f98"
-  integrity sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==
-
 postgres-bytea@~1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/postgres-bytea/-/postgres-bytea-1.0.0.tgz#027b533c0aa890e26d172d47cf9ccecc521acd35"
   integrity sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==
 
-postgres-bytea@~3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/postgres-bytea/-/postgres-bytea-3.0.0.tgz#9048dc461ac7ba70a6a42d109221619ecd1cb089"
-  integrity sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==
-  dependencies:
-    obuf "~1.1.2"
-
 postgres-date@~1.0.4:
   version "1.0.7"
   resolved "https://registry.yarnpkg.com/postgres-date/-/postgres-date-1.0.7.tgz#51bc086006005e5061c591cee727f2531bf641a8"
   integrity sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==
 
-postgres-date@~2.0.1:
-  version "2.0.1"
-  resolved "https://registry.yarnpkg.com/postgres-date/-/postgres-date-2.0.1.tgz#638b62e5c33764c292d37b08f5257ecb09231457"
-  integrity sha512-YtMKdsDt5Ojv1wQRvUhnyDJNSr2dGIC96mQVKz7xufp07nfuFONzdaowrMHjlAzY6GDLd4f+LUHHAAM1h4MdUw==
-
 postgres-interval@^1.1.0:
   version "1.2.0"
   resolved "https://registry.yarnpkg.com/postgres-interval/-/postgres-interval-1.2.0.tgz#b460c82cb1587507788819a06aa0fffdb3544695"
@@ -3429,16 +3396,6 @@ postgres-interval@^1.1.0:
   dependencies:
     xtend "^4.0.0"
 
-postgres-interval@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/postgres-interval/-/postgres-interval-3.0.0.tgz#baf7a8b3ebab19b7f38f07566c7aab0962f0c86a"
-  integrity sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==
-
-postgres-range@^1.1.1:
-  version "1.1.3"
-  resolved "https://registry.yarnpkg.com/postgres-range/-/postgres-range-1.1.3.tgz#9ccd7b01ca2789eb3c2e0888b3184225fa859f76"
-  integrity sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==
-
 prelude-ls@^1.2.1:
   version "1.2.1"
   resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396"