diff --git a/.env b/.env index cb13b451..bd09ad58 100644 --- a/.env +++ b/.env @@ -20,3 +20,33 @@ AUTH0_DOMAIN=https://dev-fmjy7n5n.us.auth0.com AUTH0_KID=uciP2tJdJ4BKWoz73Fmln MAPTILES_WORKING_DIR=./maptiles + +# Google Cloud keys and config +GCS_ENABLE_SERVICES=false +GCS_PROJECT_ID=someproject-45031206 +GCS_CLOUD_BUCKET_ID=openbeta-test + +# Only needed if you intend to use PULL subscriptions, which is only likely +# in the event that you are working on something to do with the GCS cloud, +# otherwise you can just leave it off +# eg: projects/someproject-450306/subscriptions/pull-sub +GCS_NOTIFICATIONS_SUBSCRIPTION= +# only required in the event that your server is supposed to recieve +# events posted to it by a push subscriber already set up to point to +# you in the GCS. This is practically unheard of in development environments +# since it required instrumentation of HTTPS and DNS setup - or some ngrok +# wizardry, I suppose. +# +# eg: /rest/gcs-event +GCS_MEDIA_HOOK_URL= +# This var is not required, except in the case that you would like to run +# the FULL integration tests on the hook url. +# e.g: https://stg-api.openbeta.io/rest/gcs-event +GCS_MEDIA_HOOK_PUBLIC= + +# Check out the readme to see how user can be set up for your purposes. +# e.g: openbeta@someproject-45031206.iam.gserviceaccount.com +GCS_BUCKET_CLIENT_EMAIL= +# starts as BEGIN PRIVATE KEY +GCS_PRIVATE_KEY= + diff --git a/.gitignore b/.gitignore index 2949f9cd..3f6275a9 100644 --- a/.gitignore +++ b/.gitignore @@ -7,9 +7,13 @@ yarn-error.log* lerna-debug.log* .DS_Store +bucket + # Diagnostic reports (https://nodejs.org/api/report.html) report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json +key.json + # Runtime data pids *.pid diff --git a/package.json b/package.json index eb9421e0..edd7f41e 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,12 @@ "@types/supertest": "^2.0.12", "@types/underscore": "^1.11.4", "cross-env": "^7.0.3", + "deepmerge": "^4.3.1", + "file-type": "^20.4.1", "husky": "^8.0.1", + "image-decode": "^1.2.2", + "image-dimensions": "^2.3.0", + "image-size": "^2.0.2", "jest": "^29.7.0", "jest-extended": "^4.0.2", "mongodb-memory-server": "^10.1.2", @@ -27,6 +32,7 @@ "dependencies": { "@apollo/server": "^4.11.2", "@babel/runtime": "^7.17.2", + "@google-cloud/pubsub": "^4.11.0", "@google-cloud/storage": "^6.9.5", "@graphql-tools/schema": "^8.3.1", "@openbeta/sandbag": "^0.0.51", @@ -36,6 +42,7 @@ "@turf/circle": "^6.5.0", "@turf/convex": "^6.5.0", "@turf/helpers": "^6.5.0", + "@types/jsonwebtoken": "^9.0.9", "@types/uuid": "^8.3.3", "apollo-datasource-mongodb": "^0.6.0", "auth0": "^3.4.0", @@ -47,6 +54,7 @@ "dotenv": "^16.4.4", "express": "^4.18.2", "glob": "^10.2.2", + "google-auth-library": "^9.15.1", "graphql": "^16.9.0", "graphql-middleware": "^6.1.31", "graphql-shield": "^7.5.0", @@ -58,12 +66,13 @@ "jwks-rsa": "^2.1.4", "mongoose": "^7.8.3", "mongoose-lean-virtuals": "^1.0.0", + "nanoid": "^5.1.5", "node-fetch": "2", "p-limit": "^4.0.0", "pino": "^9.5.0", "pino-logflare": "^0.4.2", "sanitize-html": "^2.7.2", - "sharp": "^0.32.0", + "sharp": "^0.34.1", "typesense": "^1.8.2", "underscore": "^1.13.2", "uuid": "^8.3.2", @@ -103,8 +112,11 @@ "ignore": [ "build", "hacks", - "**/*.test.ts", - "db-migrations" + "db-migrations", + "src/db/export/**/*.test.ts", + "src/__tests__/bulkImport.test.ts", + "src/model/__tests__/BulkDataSource.test.ts", + "src/model/__tests__/MutableAreaDataSource.test.ts" ] }, "type": "module", diff --git a/src/db/MediaObjectSchema.ts b/src/db/MediaObjectSchema.ts index 690b24aa..3096c81d 100644 --- a/src/db/MediaObjectSchema.ts +++ b/src/db/MediaObjectSchema.ts @@ -34,7 +34,18 @@ const schema = new Schema({ height: { type: Schema.Types.Number, required: true }, size: { type: Schema.Types.Number, required: true }, format: { type: Schema.Types.String, required: true }, - entityTags: [EntitySchema] + entityTags: [EntitySchema], + expiresAt: { + type: Date, + // Defines a TTL index on this path. + expires: 0, + // We want the ttl to be disabled by default since really we want to + // prevent any scenario in which a developer pushes out an update that + // causes media to go missing. + default: undefined, + // We don't need to keep track of this aftert the pending status is lapsed + required: false + } }, { _id: true, timestamps: true, toJSON: { versionKey: false }, toObject: { versionKey: false } }) /** diff --git a/src/db/MediaObjectTypes.ts b/src/db/MediaObjectTypes.ts index 675de223..9c873e1e 100644 --- a/src/db/MediaObjectTypes.ts +++ b/src/db/MediaObjectTypes.ts @@ -14,6 +14,7 @@ export interface MediaObject { createdAt: Date size: number entityTags?: EntityTag[] + expiresAt?: Date } export interface EntityTag { @@ -103,9 +104,12 @@ export interface EntityTagDeleteInput { /** * GQL user input type for add media mutation */ -export type MediaObjectGQLInput = Pick & { +export type MediaObjectGQLInput = Pick & { userUuid: string + mediaUrl?: string entityTag?: Omit + filename?: string + maskFilename?: boolean } /** diff --git a/src/google-cloud/README.md b/src/google-cloud/README.md new file mode 100644 index 00000000..455be8a5 --- /dev/null +++ b/src/google-cloud/README.md @@ -0,0 +1,135 @@ +# Google Cloud Services (GCS) Integration + +We currently store our media in a GCS bucket, many of the below concepts and approaches are generalizable to other storage bucket providers. + +## Abstract + +Rather than trying to ingest image data through the GQL endpoint, we route the users upload content via a side channel. There are good reasons to do this, especially insofar as resource management on our VMs is concerned. + + ```mermaid +sequenceDiagram + User->>Openbeta GQL: Here is an image file + Openbeta GQL->>Openbeta GQL: Check user auth + Openbeta GQL->>Google Cloud: Can I please have a signed url for my user? + Google Cloud-->>Openbeta GQL: ofc bb 😘 + Openbeta GQL->> Openbeta GQL: Create entry in database for the pending media + Openbeta GQL -->>User: Here is an un-reified media object, upload to + User->>Google Cloud: Here is the file + Google Cloud-->>User: Okay + Google Cloud->>Openbeta GQL: A new media object just got added with + Openbeta GQL->Openbeta GQL: Update the un-reified media object such that it is finalized +``` +> From the discussion in [the relevant issue](https://github.com/OpenBeta/openbeta-graphql/issues/443) + +This isn't quite exhaustive in terms of the real sequence but it covers the main design. The actual implementation provided support [two patterns](https://cloud.google.com/storage/docs/pubsub-notifications), one for single-node and one for multiple nodes behind a load balanacer. + +[Pull pattern](https://cloud.google.com/pubsub/docs/subscriber#subscription_type_comparison) is fine for people hosting a single node in front of their data, or for developers who like to develop with integration to a real life GCS instance. + +[Push pattern](https://cloud.google.com/pubsub/docs/subscriber#subscription_type_comparison) works by having a route open `/rest/gc-event`, for example. Google will then send notifications to this endpoint. This will let your load balancing strategy take care of the finer details and you won't need to worry about it. + +## How to upload images (Client Docs) + + ```mermaid +sequenceDiagram + participant Google Cloud + User->>Openbeta GQL: Here is an image file + Openbeta GQL -->>User: Here is an un-reified media object, upload to + User->>Google Cloud: Here is a file + Google Cloud-->>User: Okay +``` + +This is quite nice, since realistically you can write a query for (n) number of new media objects, and get back signed endpoints for all of them. The google storage bucket is a titan of robustness, so you shouldn't have any trouble uploading to the endpoint. + + +## Developing + +Integration with cloud services is notorious for the headaches it can cause while in a development environment. We are sadly not immune to this, though we try to provide a sane development environment so that contributors can get up and running as quickly as possible. + +### Challenges + +in principal this logic is perfectly straightforward to test (see the [integration tests](./__tests__/integration.test.ts)) which are quite thorough at checking the sanity of all ends of the pipeline. The challenge is the same one that all integration tests have: It can be a real head-ache to set up all the moving pieces consistently. + + +### Suggestion to developers + +Unless you have to lay hands on bugs to do with integration directly, write your code so that it can be mocked in a sane way. + +### How to perform integration testing + +You will need to set up google cloud services and update your .env.local with the vars you need. I have included some pointers on how to set up your infrastructure for minimal pain. + +## Deployment + +This portion of the document will not dabble too much in the details and is not a guide, it will just specify the requisites for parameters. + +### Setting up a bucket + +Go to your console, navigate to your project (Or make one), and then [Create a bucket](https://cloud.google.com/storage/docs/creating-buckets) inside that project context. + +### Setting up notifications for bucket + +[sadly notifications don't exactly come out of the box](https://cloud.google.com/storage/docs/reporting-changes#prereqs). After creating your bucket, you can [create a Topic](https://cloud.google.com/pubsub/docs/publish-receive-messages-console#create_a_topic), which will act as a pointer that can recieve events from the bucket. + +### Setting up a service account + +- Read/Write/Delete on a Storage Bucket: This requires the Storage Object Admin role (roles/storage.objectAdmin) on the specific bucket. This role grants comprehensive control over objects within the bucket. +- Ability to Sign URLs: This requires the storage.buckets.get and storage.objects.create permissions. These are typically included in roles like Storage Object Admin or Storage Admin (roles/storage.admin). +- Permissions to Subscribe to a Pull Subscriber: This requires the Pub/Sub Subscriber role (roles/pubsub.subscriber) on the specific subscription. (OPTIONAL) + +To add a service account to your Google Cloud Storage (GCS) project with the specified permissions, you'll need to perform the following steps using the Google Cloud Console or the Google Cloud CLI. + +Using the console you can try + + 1. Create a Service Account (if you don't have one already): + - Go to the Service accounts page in the Google Cloud Console. + - Select your project. + - Click + CREATE SERVICE ACCOUNT. + - Enter a Service account name, Service account ID (will be auto-generated), and an optional Service account description. + - Click CREATE AND CONTINUE. + 1. Grant Permissions to the Service Account: + 2. Download the Service Account Key: + - Go back to the Service accounts page. + - Find the service account you created. + - Click the three dots (Actions) in the Actions column. + - Select Manage keys. + - Click ADD KEY and then Create new key. + - Choose JSON as the Key type (recommended). + - Click CREATE. The JSON key file will be downloaded to your computer. Keep this file secure. You can drop that file into the repo root or you can extract the private key from it and set an OS env var with it. The API will take either. + +#### If you want to use **Pull Subscriptions** + +You will also need to add the `roles/pubsub.subscriber` role on your service account, for the subscriber you wish to use (or project-wide) + +### Env vars + +```bash +# Google Cloud keys and config +GCS_ENABLE_SERVICES=true +GCS_PROJECT_ID=someproject-45031206 +GCS_CLOUD_BUCKET_ID=openbeta-test + +# Only needed if you intend to use PULL subscriptions, which is only likely +# in the event that you are working on something to do with the GCS cloud, +# otherwise you can just leave it off +# eg: projects/someproject-450306/subscriptions/pull-sub +GCS_NOTIFICATIONS_SUBSCRIPTION="" + +# only required in the event that your server is supposed to recieve +# events posted to it by a push subscriber already set up to point to +# you in the GCS. This is practically unheard of in development environments +# since it required instrumentation of HTTPS and DNS setup - or some ngrok +# wizardry, I suppose. +# +# eg: /rest/gcs-event +GCS_MEDIA_HOOK_URL= +# This var is not required, except in the case that you would like to run +# the FULL integration tests on the hook url. +# e.g: https://stg-api.openbeta.io/rest/gcs-event +GCS_MEDIA_HOOK_PUBLIC= + +# Check out the readme to see how user can be set up for your purposes. +GCS_BUCKET_CLIENT_EMAIL="openbeta@someproject-45031206.iam.gserviceaccount.com" +GCS_PRIVATE_KEY="BEGIN PRIVATE KEY" +``` + +You can now set up your env vars in your `.env.local` file, based on the features you woud like to enable. Choose one of `GCS_NOTIFICATIONS_SUBSCRIPTION` or `GCS_MEDIA_HOOK_URL` depending on which strategy you prefer. \ No newline at end of file diff --git a/src/google-cloud/__tests__/adapter.test.ts b/src/google-cloud/__tests__/adapter.test.ts new file mode 100644 index 00000000..d7181cb9 --- /dev/null +++ b/src/google-cloud/__tests__/adapter.test.ts @@ -0,0 +1,98 @@ +import MutableMediaDataSource from '../../model/MutableMediaDataSource' +import inMemoryDB from '../../utils/inMemoryDB' +import { mediaAdded, standardMessageHandlingLifecycle } from '../adapter-interface' +import { MediaObject } from '../../db/MediaObjectTypes' +import { jest } from '@jest/globals' + +describe('Media storage notification adapter tests', () => { + let mediaDs: MutableMediaDataSource + // Our handler will take a trip past the database so we will need to provision the + // database for this suite of tests + beforeAll(async () => { await inMemoryDB.connect(); mediaDs = MutableMediaDataSource.getInstance() }) + afterAll(async () => await inMemoryDB.close()) + + async function unreifiedMedia (): Promise { + const [ref] = await mediaDs.mediaObjectModel.insertMany([{ + mediaUrl: `${process.uptime()}.test`, + width: 100, + height: 100, + format: 'test', + size: 1200, + // 10 second expiry + expiresAt: new Date().getTime() + 10_000 + }]) + + return await mediaDs.mediaObjectModel.findById(ref._id).orFail(new Error('woops')) + } + + async function reifiedMedia (): Promise { + const [ref] = await mediaDs.mediaObjectModel.insertMany([{ + mediaUrl: `${process.uptime()}.test`, + width: 100, + height: 100, + format: 'test', + size: 1200 + }]) + + return await mediaDs.mediaObjectModel.findById(ref._id).orFail(new Error('woops')) + } + + describe('standardMessageHandlingLifecycle', () => { + let mockWork: jest.Mock<(media: MediaObject, mutableDs: MutableMediaDataSource) => Promise> + + beforeEach(() => { + mockWork = jest.fn() + }) + + it('should return early if media object is not found in the database, throwing no error and not performing work', async () => { + await standardMessageHandlingLifecycle({ objectId: 'no such thing' }, mockWork) + expect(await mediaDs.mediaObjectModel.findOne({ mediaUrl: 'test-url' })) + expect(mockWork).not.toHaveBeenCalled() + }) + + it('should execute the work function for a valid, unreified media object found in the database', async () => { + const media = await unreifiedMedia() + await standardMessageHandlingLifecycle({ objectId: media.mediaUrl }, mockWork) + expect(mockWork).toHaveBeenCalled() + }) + + it('should re-throw error from work function', async () => { + const media = await unreifiedMedia() + await expect(standardMessageHandlingLifecycle( + { objectId: media.mediaUrl }, + () => { + throw new Error('error in work') + } + ) + ).rejects.toThrow(new Error('error in work')) + }) + }) + + describe('mediaAdded', () => { + it('should not attempt to update if the media object is not found', async () => { + const media = await unreifiedMedia() + await mediaDs.mediaObjectModel.deleteOne({ _id: media._id }) + await mediaAdded({ objectId: media.mediaUrl }) + await mediaAdded({ objectId: 'does not exist' }) + }) + + it('should not fail if media object has expiresAt as null (already reified)', async () => { + const media = await reifiedMedia() + const mockWork: jest.Mock<(media: MediaObject, mutableDs: MutableMediaDataSource) => Promise> = jest.fn() + await standardMessageHandlingLifecycle({ objectId: media.mediaUrl }, mockWork) + expect(mockWork).toHaveBeenCalled() + }) + + it('should update the media object to unset expiresAt if found and not already reified', async () => { + const media = await unreifiedMedia() + await mediaAdded({ objectId: media.mediaUrl }) + expect(await mediaDs.mediaObjectModel.findOne({ mediaUrl: media.mediaUrl }).then(x => x?.expiresAt)).toBeUndefined() + }) + + it('should not attempt to update if the media object is already reified', async () => { + const media = await reifiedMedia() + await mediaAdded({ objectId: media.mediaUrl }) + expect(await mediaDs.mediaObjectModel.findOne({ mediaUrl: media.mediaUrl }).then(x => x?.expiresAt)).toBeUndefined() + }) + }) +}) diff --git a/src/google-cloud/__tests__/integration.test.ts b/src/google-cloud/__tests__/integration.test.ts new file mode 100644 index 00000000..39a561e8 --- /dev/null +++ b/src/google-cloud/__tests__/integration.test.ts @@ -0,0 +1,172 @@ +import { GCS_BUCKET_CLIENT_EMAIL, GCS_CLOUD_BUCKET_ID as ID, GCS_MEDIA_HOOK_URL, GCS_NOTIFICATIONS_SUBSCRIPTION, GCS_PRIVATE_KEY } from '../index.js' +import { googleStorage } from '../gcs-storage' +import { gcsTopicSubscription } from '../pull-subscriber' +import { Message } from '@google-cloud/pubsub' +import express from 'express' +import { randomUUID } from 'crypto' +import { googleCloudWebHookRecieverWithValidator } from '../push-subscriber.js' +import { validateGoogleJWT } from '../google-auth.js' +import bodyParser from 'body-parser' +import { EventEmitter } from 'events' +import { logger } from '../../logger.js' + +function runIf (condition: boolean): typeof describe { + return (condition) ? describe : describe.skip +} + +const GCS_CLOUD_BUCKET_ID = ID ?? 'GCS_CLOUD_BUCKET_ID' +const PUBLIC_HOOK = process.env.GCS_MEDIA_HOOK_PUBLIC +const requirePublicWebHookInstrumentation = runIf(PUBLIC_HOOK !== undefined && PUBLIC_HOOK !== '') +const requireAuth = runIf(GCS_PRIVATE_KEY !== undefined && GCS_BUCKET_CLIENT_EMAIL !== undefined && GCS_PRIVATE_KEY !== '' && GCS_BUCKET_CLIENT_EMAIL !== '') +const requirePullSub = runIf(GCS_NOTIFICATIONS_SUBSCRIPTION !== undefined && GCS_NOTIFICATIONS_SUBSCRIPTION !== '') + +requireAuth('Google cloud services integration tests', () => { + const storage = googleStorage() + const bucket = storage.bucket(GCS_CLOUD_BUCKET_ID ?? '') + + async function uploadSmallObject (objectName: string, content?: string): Promise { + if (!objectName.endsWith('.test')) { + objectName += '.test' + } + const file = bucket.file(objectName) + await file.save(content ?? objectName) + }; + + beforeAll(async () => { + const [files] = await bucket.getFiles() + // delete all files that have a .test descriptor + await Promise.all(files.filter(file => file.name.includes('.test')).map(async file => await file.delete())) + }) + + test('Auth check', async () => await storage.authClient.getClient()) + test('Can read files with no error', async () => await bucket.getFiles()) + test('Can upload files', async () => { + const files = ['1', '2', '3', '4'].map(i => `${i}.test`) + + for (const filename of files) { + const file = bucket.file(filename) + await expect(file.download()).rejects.toThrow(`No such object: ${GCS_CLOUD_BUCKET_ID}/${filename}`) + } + + await Promise.all(files.map(async (f) => await uploadSmallObject(f))) + + for (const filename of files) { + const file = bucket.file(filename) + const [buffer] = await file.download() + expect(buffer.toString('utf8')).toBe(filename) + } + }) + + test('Can delete files', async () => { + const files = ['1', '2', '3', '4'].map(i => `${process.uptime()}.test`) + await Promise.all(files.map(async (f) => await uploadSmallObject(f))) + await Promise.all(files.map(async filename => await bucket.file(filename, {}).delete())) + }) + + test('Can sign urls for users', async () => { + const file = process.uptime().toString() + '.test' + const message = 'HELLO, WORLD!' + const [url] = await bucket.file(file).getSignedUrl({ + version: 'v4', + action: 'write', + expires: Date.now() + 60 * 1000 + }) + + expect(await fetch(url, { body: message, method: 'PUT' }).then(res => res.status)).toBe(200) + + const [buffer] = await bucket.file(file).download() + expect(buffer.toString('utf8')).toBe(message) + }) + + requirePublicWebHookInstrumentation('FULL integration test instrumented from end to end', () => { + const endpoint = PUBLIC_HOOK ?? '' + let server: ReturnType + let app: express.Express + const validationToken = randomUUID() + const emitter = new EventEmitter() + + beforeAll((done) => { + if (GCS_MEDIA_HOOK_URL === undefined) throw new Error('Cannot do integration tests without GCS_MEDIA_HOOK_URL set') + app = express() + app.get(GCS_MEDIA_HOOK_URL, (_, res) => res.send(validationToken)) + const handler = googleCloudWebHookRecieverWithValidator(validateGoogleJWT) + app.post(GCS_MEDIA_HOOK_URL, bodyParser.json(), (req, res) => { void handler(req, res).then(() => emitter.emit('request', { req })).catch(err => emitter.emit('request', { req, err })) }) + + server = app.listen(4000, () => { + done() + }) + }) + + afterAll((done) => { + server.close(() => { + logger.info('integration test server stopped') + done() + }) + }) + + test('Endpoint hosted by THIS TEST is reachable', async () => { + await expect(await fetch(endpoint).then(resp => resp.status)).toBe(200) + await expect(await fetch(endpoint).then(async resp => await resp.text())).toBe(validationToken) + }) + test('Endpoint supports TLS (if not, google will not post here)', async () => { + expect(new URL(endpoint).protocol).toBe('https:') + }) + + test('Hook is open to post requests', async () => { + const resp = await fetch(endpoint, { body: JSON.stringify({ }), method: 'POST' }) + expect(resp.status).not.toBe(404) + expect(await resp.text()).toBe('"Error: Unauthorized - Missing or invalid Authorization header"') + }) + + test('Subscriber is pushing notifications to hook', async () => { + const Authorization = randomUUID() // random for each test to prevent false positives from race + fetch(endpoint, { headers: { Authorization }, body: JSON.stringify({ }), method: 'POST' }).catch(err => { throw err }) + + await new Promise((resolve, reject) => { + emitter.on('request', ({ req, err }: { req: Request, err?: Error }) => { + if ((req.headers as any).authorization !== Authorization) return + if (err != null) return reject(err) + return resolve(Authorization) + }) + + setTimeout(() => reject(new Error('timeout waiting for google to post us back')), 5_000) + }) + }) + }) + + requirePullSub('Google pull subscriber integration santiy checks', () => { + /** + * wait for an object matching a pattern appears on the event queue. + */ + async function waitForObject (objectname: string): Promise { + const sub = gcsTopicSubscription() + + return await new Promise((resolve, reject) => { + sub.on('message', (message) => { + if (message.attributes.objectId === objectname || message.attributes.objectId === objectname + '.test') { + message.ack() + sub.removeAllListeners() + resolve(message) + } + }) + + // Set a timeout to prevent indefinite hanging + const timeoutId = setTimeout(() => { + sub.removeAllListeners() + reject(new Error('Timeout to connect to Pub/Sub subscription')) + }, 10_000) + + sub.once('error', (err) => { reject(err); clearTimeout(timeoutId) }) + }) + } + + test('Auth', async () => { + const objectname = process.uptime().toString() + uploadSmallObject(objectname, 'yay!').catch(err => { throw err }) + uploadSmallObject(objectname, 'yay!').catch(err => { throw err }) + uploadSmallObject(objectname, 'yay!').catch(err => { throw err }) + uploadSmallObject(objectname, 'yay!').catch(err => { throw err }) + await waitForObject(objectname) + }) + }) +}) diff --git a/src/google-cloud/__tests__/mock-bucket.test.ts b/src/google-cloud/__tests__/mock-bucket.test.ts new file mode 100644 index 00000000..2841dac1 --- /dev/null +++ b/src/google-cloud/__tests__/mock-bucket.test.ts @@ -0,0 +1,128 @@ +import { LocalFileStorage } from '../mock-storage-bucket' +import fs from 'fs/promises' +import path from 'path' +import { BucketStorageError } from '../bucket' +import { jest } from '@jest/globals' +import sharp from 'sharp' + +async function generateSmallImage (format?: 'jpeg' | 'png' | 'avif'): Promise<{ filename: string, outputPath: string, width: number, height: number, data: Buffer }> { + const filename = `test.${process.uptime()}.${format ?? 'jpg'}` + const outputPath = `./bucket/${filename}` + const width: number = Math.floor(100 * Math.random()) + 10 + const height: number = Math.floor(100 * Math.random()) + 10 + + // Create a simple white buffer as the base image + const whiteBuffer = Buffer.from( + ` + + ` + ) + + if (format === 'png') { + await sharp(whiteBuffer, {}) + .png() + .toFile(outputPath) + } else if (format === 'avif') { + await sharp(whiteBuffer, {}) + .avif() + .toFile(outputPath) + } else { + await sharp(whiteBuffer, {}) + .jpeg() + .toFile(outputPath) + } + + return { filename, outputPath, width, height, data: await fs.readFile(outputPath) } +} + +describe('LocalFileStorage', () => { + let storage: LocalFileStorage + const bucketDir = './bucket' + + beforeEach(async () => { + storage = new LocalFileStorage() + // Create the bucket directory if it doesn't exist + await fs.mkdir(bucketDir, { recursive: true }) + }) + + afterEach(async () => { + // Clean up the bucket directory after each test + try { + const files = await fs.readdir(bucketDir) + await Promise.all(files.filter(i => i.startsWith('test.')).map(async file => await fs.unlink(path.join(bucketDir, file)))) + await fs.rmdir(bucketDir) + } catch (error: any) { + if (error.code !== 'ENOENT') { + console.error('Error cleaning up bucket directory:', error) + } + } + jest.clearAllMocks() + }) + + describe('signedUrl', () => { + it('should return a signed URL with the correct path and expiration', async () => { + const filePath = 'test/image.jpg' + const result = await storage.signedUrl(filePath) + expect(result.url).toBe(`http://localhost:4000/rest/media/${filePath}`) + expect(result.expires).toBeGreaterThan(Date.now()) + // Check if expiration is approximately 15 minutes from now + expect(result.expires).toBeLessThan(Date.now() + (15 * 60 * 1000) + 1000) // Adding a small buffer + }) + }) + + describe('deleteFile', () => { + it('should delete an existing file', async () => { + const { filename, outputPath } = await generateSmallImage() + await storage.deleteFile(filename) + await expect(fs.access(outputPath)).rejects.toThrow('ENOENT') + }) + + it('should throw an error if the file does not exist', async () => { + const filePath = 'nonexistent.txt' + await expect(storage.deleteFile(filePath)).rejects.toThrow(Error("ENOENT: no such file or directory, unlink './bucket/nonexistent.txt'")) + }) + }) + + describe('getFileInfo', () => { + it('should return file info for a JPEG image', async () => { + const { filename, data, width, height } = await generateSmallImage() + const result = await storage.getFileInfo(filename) + expect(result.size).toBe(data.length) + expect(result.width).toBe(width) + expect(result.height).toBe(height) + expect(result.format).toBe('jpeg') + }) + + it('should return file info for a PNG image', async () => { + const { filename, data, width, height } = await generateSmallImage('png') + const result = await storage.getFileInfo(filename) + expect(result.size).toBe(data.length) + expect(result.width).toBe(width) + expect(result.height).toBe(height) + expect(result.format).toBe('png') + }) + + it('should return file info for an AVIF image', async () => { + const { filename, data, width, height } = await generateSmallImage('avif') + const result = await storage.getFileInfo(filename) + expect(result.size).toBe(data.length) + expect(result.width).toBe(width) + expect(result.height).toBe(height) + expect(result.format).toBe('avif') + }) + + it('should throw BucketStorageError if fileTypeFromFile returns undefined', async () => { + const filePath = 'unknown.file' + const fullPath = path.join(bucketDir, filePath) + await fs.writeFile(fullPath, Buffer.from('unknown data')) + await expect(storage.getFileInfo(filePath)).rejects.toThrow(BucketStorageError) + }) + + it('should throw BucketStorageError if width or height is undefined after decoding an image', async () => { + const filePath = 'broken.jpg' + const fullPath = path.join(bucketDir, filePath) + await fs.writeFile(fullPath, Buffer.from('corrupted jpeg data')) + await expect(storage.getFileInfo(filePath)).rejects.toThrow(BucketStorageError) + }) + }) +}) diff --git a/src/google-cloud/__tests__/mock-upload-route.test.ts b/src/google-cloud/__tests__/mock-upload-route.test.ts new file mode 100644 index 00000000..3fd88e5e --- /dev/null +++ b/src/google-cloud/__tests__/mock-upload-route.test.ts @@ -0,0 +1,104 @@ +import request from 'supertest' +import express, { Express } from 'express' +import router from '../mock-storage-upload' +import fs from 'fs/promises' +import path from 'path' +import { jest } from '@jest/globals' +import inMemoryDB from '../../utils/inMemoryDB' + +describe('Development Media Upload Route', () => { + let app: Express + const bucketDir = path.join('./bucket') + + beforeAll(async () => { + await inMemoryDB.connect() + // Create a temporary bucket directory if it doesn't exist + await fs.mkdir(bucketDir, { recursive: true }) + app = express() + app.use(express.raw({ type: '*/*' })) + app.use('/rest', router) + }) + + afterEach(async () => { + // Clean up the bucket directory after each test + try { + const files = await fs.readdir(bucketDir) + await Promise.all(files.map(async file => await fs.unlink(path.join(bucketDir, file)))) + } catch (error: any) { + if (error.code !== 'ENOENT') { + console.error('Error cleaning up bucket directory:', error) + } + } + jest.clearAllMocks() + }) + + afterAll(async () => { + // Remove the bucket directory after all tests + try { + await fs.rmdir(bucketDir) + } catch (error: any) { + if (error.code !== 'ENOENT') { + console.error('Error removing bucket directory:', error) + } + } + await inMemoryDB.close() + }) + + it('should successfully upload raw file data', async () => { + const filename = 'test-upload.txt' + const fileContent = Buffer.from('This is some test content.') + + const response = await request(app) + .put(`/rest/media/${filename}`) + .send(fileContent) + .set('Content-Type', 'text/plain') + + expect(response.statusCode).toBe(200) + expect(response.body).toEqual({ message: 'File uploaded successfully.', path: filename }) + + // Check if the file was actually created in the bucket + const filePath = path.join(bucketDir, filename) + const uploadedContent = await fs.readFile(filePath) + expect(uploadedContent).toEqual(fileContent) + }) + + it('should return 400 if no file data is in the request body', async () => { + const filename = 'empty-upload.txt' + + const response = await request(app) + .put(`/rest/media/${filename}`) + .send('') + .set('Content-Type', 'text/plain') + + expect(response.statusCode).toBe(400) + expect(response.body).toEqual({ error: 'No file data in the request body.' }) + + // Check if the file was NOT created + const filePath = path.join(bucketDir, filename) + await expect(fs.access(filePath)).rejects.toThrow('ENOENT') + }) + + it('should handle errors during file writing and return 500', async () => { + const filename = 'error-upload.txt' + const fileContent = Buffer.from('This should fail.') + + // Mock fs.writeFile to throw an error + const originalWriteFile = fs.writeFile + // @ts-expect-error + fs.writeFile = jest.fn().mockRejectedValue(new Error('Simulated file system error')) + + const response = await request(app) + .put(`/rest/media/${filename}`) + .send(fileContent) + .set('Content-Type', 'text/plain') + + expect(response.statusCode).toBe(500) + expect(response.body.error).toContain('Failed to upload file') + + // Check if the file was NOT created + const filePath = path.join(bucketDir, filename) + await expect(fs.access(filePath)).rejects.toThrow('ENOENT') + // Restore the original fs.writeFile + fs.writeFile = originalWriteFile + }) +}) diff --git a/src/google-cloud/__tests__/pull.test.ts b/src/google-cloud/__tests__/pull.test.ts new file mode 100644 index 00000000..772cb745 --- /dev/null +++ b/src/google-cloud/__tests__/pull.test.ts @@ -0,0 +1,93 @@ +import { Message, PubSub, Subscription } from '@google-cloud/pubsub' +import { handleMessageOnChannel } from '../pull-subscriber' +import { Subscriber } from '@google-cloud/pubsub/build/src/subscriber' +import { GCS_CLOUD_BUCKET_ID } from '../index.js' +import { PubsubMessage } from '@google-cloud/pubsub/build/src/publisher' +import merge from 'deepmerge' +import { MessageType } from '../push-subscriber' +import inMemoryDB from '../../utils/inMemoryDB' +import { jest } from '@jest/globals' +import { MessageHandlingError } from '../bucket' + +if (GCS_CLOUD_BUCKET_ID === undefined) throw new Error('We cannot run this test without a bucketID') + +const mockMessage: PubsubMessage = { + attributes: { + bucketId: GCS_CLOUD_BUCKET_ID, + eventType: 'OBJECT_FINALIZE', + objectId: 'test-object-id' + }, + data: Buffer.from(JSON.stringify({ id: `${GCS_CLOUD_BUCKET_ID}/test-object-id` })).toString('base64'), + messageId: 'test-message-id' +} + +describe('Message handlers for pull-pattern GCS subscribers', () => { + const dummy = new PubSub() + const subscriber = new Subscriber(new Subscription(dummy, 'test')) + + // Our handler will take a trip past the database so we will need to provision the + // database for this suite of tests + beforeAll(async () => { await inMemoryDB.connect() }) + afterAll(async () => await inMemoryDB.close()) + + async function message (override: Partial): Promise { + const obj = new Message(subscriber, { message: merge(mockMessage, override) }) + + obj.ack = jest.fn(() => {}) + obj.nack = jest.fn(() => {}) + + return await handleMessageOnChannel(obj) + } + + it('should succeed at basic validation', async () => await message({})) + + it('should throw if the eventType is missing', async () => { + const override = { attributes: { eventType: undefined } } + await expect(message(override)) + .rejects + .toThrow(new MessageHandlingError('No discernable event type (body.message.attributes.eventType)')) + }) + + it('should throw if the bucketId does not match GCS_CLOUD_BUCKET_ID', async () => { + const override: MessageType = { + attributes: { + bucketId: 'wrong-bucket-id' + } + } + await expect(message(override)) + .rejects + .toThrow(new MessageHandlingError('Request is authentically google, but someone is polluting by pointing their unrelated bucket here (wrong-bucket-id)')) + }) + + it('should throw if media identity cannot be discerned (missing objectId)', async () => { + const override: Partial = { + attributes: { + objectId: undefined + } + } + + await expect(message(override)) + .rejects + .toThrow(new MessageHandlingError('Could not discern media identity (objectId Missing)')) + + await expect(message({ + attributes: { + objectId: '' + } + })) + .rejects + .toThrow(new MessageHandlingError('Could not discern media identity (objectId Missing)')) + }) + + it('should call mediaAdded only for OBJECT_FINALIZE events', async () => { + const override: Partial = { + attributes: { + eventType: 'OBJECT_METADATA_UPDATE' + } + } + + await expect(message(override)) + .rejects + .toThrow(new MessageHandlingError(`This hook is not designed to process ${override.attributes?.eventType ?? 'undefined'} events`)) + }) +}) diff --git a/src/google-cloud/__tests__/push.test.ts b/src/google-cloud/__tests__/push.test.ts new file mode 100644 index 00000000..124ee7cb --- /dev/null +++ b/src/google-cloud/__tests__/push.test.ts @@ -0,0 +1,153 @@ +import request from 'supertest' +import express, { } from 'express' +import { googleCloudWebHookRecieverWithValidator, RootEventType } from '../push-subscriber' +import { GCS_CLOUD_BUCKET_ID } from '../index.js' +import inMemoryDB from '../../utils/inMemoryDB' +import { getAreaModel } from '../../db' +import merge from 'deepmerge' +import { MessageHandlingError } from '../bucket' + +if (GCS_CLOUD_BUCKET_ID === undefined) { + throw new Error('We need GCS_CLOUD_BUCKET_ID variable set for tests') +} + +const mockBody: Partial = { + message: { + attributes: { + bucketId: GCS_CLOUD_BUCKET_ID, + eventType: 'OBJECT_FINALIZE', + objectId: 'test-object-id' + }, + data: Buffer.from(JSON.stringify({ id: `${GCS_CLOUD_BUCKET_ID}/test-object-id` })).toString('base64'), + messageId: 'test-message-id' + }, + subscription: 'test-subscription' +} + +describe('googleCloudWebHookReciever', () => { + let app: express.Express + + // Helper function to create an express app with the route + const createApp = (): express.Express => { + const mockApp = express() + mockApp.use(express.json()) + const handler = googleCloudWebHookRecieverWithValidator(async (req) => { + if (req.headers.authorization !== 'evil') return {} + throw new MessageHandlingError('Unauthorized - Invalid JWT') + }) + mockApp.post('/gcs-webhook', (req, res) => { void handler(req, res).catch(console.error) }) + + return mockApp + } + + beforeAll(async () => { + app = createApp() + await inMemoryDB.connect() + await getAreaModel().collection.drop() + }) + + afterAll(inMemoryDB.close) + + it('should return 200 and ack the message for a valid OBJECT_FINALIZE event', async () => { + const response = await request(app) + .post('/gcs-webhook') + .send(mockBody) + + expect(response.statusCode).toBe(200) + expect(response.text).toBe('') + }) + + it('should return 500 if validateGoogleJWT cannot parse the result', async () => { + const response = await request(app) + .post('/gcs-webhook') + .set('Authorization', 'evil') + .send(mockBody) + + expect(response.statusCode).toBe(500) + }) + + it('should return 500 if the request body is malformed (missing message)', async () => { + const response = await request(app) + .post('/gcs-webhook') + .send({ unreleated: 'Some random payload' }) + + expect(response.statusCode).toBe(500) + expect(response.body).toEqual('Error: ' + new MessageHandlingError('malformed data at the hook').message) + }) + + it('should return 500 if the eventType is missing', async () => { + const override = { message: { attributes: { eventType: undefined } } } + const response = await request(app) + .post('/gcs-webhook') + .send(merge(mockBody, override)) + + expect(response.statusCode).toBe(500) + expect(response.body).toEqual('Error: ' + new MessageHandlingError('No discernable event type (body.message.attributes.eventType)').message) + }) + + it('should return 500 if the bucketId does not match GCS_CLOUD_BUCKET_ID', async () => { + const override: Partial = { + message: { + attributes: { + bucketId: 'wrong-bucket-id' + } + } + } + + const response = await request(app) + .post('/gcs-webhook') + .send(merge(mockBody, override)) + + expect(response.statusCode).toBe(500) + expect(response.body).toEqual('Error: ' + new MessageHandlingError('Request is authentically google, but someone is polluting by pointing their unrelated bucket here (wrong-bucket-id)').message) + }) + + it('should return 500 if decoding GCS data fails', async () => { + const override = { + message: { + data: 'invalid-base64' + } + } + + const response = await request(app) + .post('/gcs-webhook') + .send(merge(mockBody, override)) + + expect(response.statusCode).toBe(500) + expect(response.body).toBe('Error: ' + new MessageHandlingError('Failed to decode base64 string').message) + }) + + it('should return 500 if media identity cannot be discerned (missing objectId)', async () => { + const override: Partial = { + message: { + attributes: { + objectId: undefined + } + } + } + + const response = await request(app) + .post('/gcs-webhook') + .send(merge(mockBody, override)) + + expect(response.statusCode).toBe(500) + expect(response.body).toContain('Could not discern a media identity') + }) + + it('should call mediaAdded only for OBJECT_FINALIZE events', async () => { + const override: Partial = { + message: { + attributes: { + eventType: 'OBJECT_METADATA_UPDATE' + } + } + } + + const response = await request(app) + .post('/gcs-webhook') + .send(merge(mockBody, override)) + + expect(response.statusCode).toBe(500) + expect(response.body).toBe('Error: ' + new MessageHandlingError(`This hook is not designed to process ${override.message?.attributes?.eventType ?? ''} events`).message) + }) +}) diff --git a/src/google-cloud/adapter-interface.ts b/src/google-cloud/adapter-interface.ts new file mode 100644 index 00000000..4a401b47 --- /dev/null +++ b/src/google-cloud/adapter-interface.ts @@ -0,0 +1,114 @@ +import { logger } from '../logger.js' +import { ImageFormatType, MediaObject } from '../db/MediaObjectTypes.js' +import MutableMediaDataSource from '../model/MutableMediaDataSource.js' +import { googleStorage } from './gcs-storage.js' +import { GCS_CLOUD_BUCKET_ID } from './index.js' +import { Storage } from '@google-cloud/storage' +import { BucketStorage, BucketStorageError, MediaIdentity } from './bucket.js' + +export async function standardMessageHandlingLifecycle (message: MediaIdentity, work: (media: MediaObject, mutableDs: MutableMediaDataSource) => Promise): Promise { + const mutableDs = MutableMediaDataSource.getInstance() + logger.debug(`GCS delivered message to process ${message.objectId}`) + + try { + const media: MediaObject | null = await mutableDs.mediaObjectModel.findOne({ mediaUrl: message.objectId }) + + if (media === null) { + // In this instance an object has been created that we have not been told about. Presumably, + // the user must have acquired authorization to upload this image so we don't necessarily need + // to throw a fit. We could create the object for them, except that we have no way to trust that + // the filename contains reliable authenticated info. + return + } + + // An unreified and valid media object + await work(media, mutableDs) + } catch (error) { + logger.error(error.message) + throw error + } +} + +export async function mediaAdded (message: MediaIdentity): Promise { + await standardMessageHandlingLifecycle(message, async (media, mutableDs) => { + // If we have already flagged this media as reified then we needn't do any message processing + // and we can step over immediately to acknowledging the message. + if (media.expiresAt === undefined) { + return + } + + // Prevent mongodb from cleaning up this record, since it has been reified by the user. + await mutableDs.mediaObjectModel.updateOne({ _id: media._id }, { $unset: { expiresAt: 1 } }) + }) +} + +export class GoogleStorage implements BucketStorage { + private readonly storage: Storage + private readonly bucketName: string + + constructor (bucketName: string = GCS_CLOUD_BUCKET_ID ?? '') { + if (bucketName === '') throw new Error('env var GCS_CLOUD_BUCKET_ID is not set or you did not provide a proper string to GoogleStorage') + this.storage = googleStorage() + this.bucketName = bucketName + } + + async fileExists (url: string): Promise<[boolean]> { + return await this.storage.bucket(this.bucketName).file(url).exists() + } + + async signedUrl (filename: string): Promise<{ url: string, expires: number }> { + const expires = Date.now() + 15 * 60 * 1000 + const options = { + version: 'v4' as 'v4', + action: 'write' as 'write', + expires + } + + const [url] = await this.storage + .bucket(this.bucketName) + .file(filename) + .getSignedUrl(options) + + return { url, expires } + } + + async getFileInfo (url: string): Promise> { + const parsedUrl = new URL(url) + const pathParts = parsedUrl.pathname.split('/') + const fileName = pathParts.pop() + const bucketName = pathParts[1] + + if (fileName === undefined || fileName === '' || bucketName === undefined || bucketName === '') { + throw new BucketStorageError('Invalid URL format.') + } + + const file = this.storage.bucket(bucketName).file(fileName) + const [metadata] = await file.getMetadata() + + if (metadata === undefined) { + throw new BucketStorageError('File not found.') + } + + const size = parseInt(metadata.size, 10) + const width = parseInt(metadata.width, 10) + const height = parseInt(metadata.height, 10) + const format: ImageFormatType = metadata.contentEncoding + + if (format === undefined) { throw new BucketStorageError(`Format could not be determined from ${JSON.stringify(metadata)}`) } + + return { size, width, height, format } + } + + async deleteFile (url: string): Promise { + const parsedUrl = new URL(url) + const pathParts = parsedUrl.pathname.split('/') + const fileName = pathParts.pop() + const bucketName = pathParts[1] + + if (fileName === undefined || fileName === '' || bucketName === undefined || bucketName === '') { + throw new BucketStorageError('Invalid URL format.') + } + + await this.storage.bucket(bucketName).file(fileName).delete() + } +} diff --git a/src/google-cloud/bucket.ts b/src/google-cloud/bucket.ts new file mode 100644 index 00000000..b5eccb33 --- /dev/null +++ b/src/google-cloud/bucket.ts @@ -0,0 +1,33 @@ +import { MediaObject } from '../db/MediaObjectTypes.js' +import { extname } from 'path' +import { customAlphabet } from 'nanoid' + +const nolookalikesSafe = '6789BCDFGHJKLMNPQRTWbcdfghjkmnpqrtwz' +export const safeRandomFilename = customAlphabet(nolookalikesSafe, 10) +export const safeFilename = (original: string): string => { + return safeRandomFilename() + extname(original) +} + +export interface BucketStorage { + signedUrl: (path: string) => Promise<{ url: string, expires: number }> + deleteFile: (url: string) => Promise + getFileInfo: (url: string) => Promise> + fileExists: (url: string | string[]) => Promise +} + +export class BucketStorageError extends Error {} + +/** + * The adapter interface at this level is quite primitive, but depends on one + * key principal which is not enforced in any meaningful sense but is likely to hold + * as the project proceeds: Regardless of where the media is stored, we hold a url + * reference to it in our data store. + **/ +export interface MediaIdentity { + /** + * This field is cognate to the mediaUrl in our data store. + */ + objectId: string +} + +export class MessageHandlingError extends Error {} diff --git a/src/google-cloud/gcs-storage.ts b/src/google-cloud/gcs-storage.ts new file mode 100644 index 00000000..a5d80ad3 --- /dev/null +++ b/src/google-cloud/gcs-storage.ts @@ -0,0 +1,11 @@ +import { Storage } from '@google-cloud/storage' +import { GCS_BUCKET_CLIENT_EMAIL, GCS_PRIVATE_KEY, GCS_PROJECT_ID } from './index.js' + +export const googleStorage = (): Storage => new Storage({ + projectId: GCS_PROJECT_ID, + credentials: { + type: 'service_account', + private_key: GCS_PRIVATE_KEY, + client_email: GCS_BUCKET_CLIENT_EMAIL + } +}) diff --git a/src/google-cloud/google-auth.ts b/src/google-cloud/google-auth.ts new file mode 100644 index 00000000..25ee0f85 --- /dev/null +++ b/src/google-cloud/google-auth.ts @@ -0,0 +1,55 @@ +import { OAuth2Client } from 'google-auth-library' +import { Request } from 'express' +import { logger } from '../logger.js' +import { MessageHandlingError } from './bucket.js' + +const client = new OAuth2Client() + +export interface JWTPayload { + iss?: string + aud?: string + exp?: number + iat?: number + email?: string + [key: string]: any +} + +export type JwtValidator = (req: Request) => Promise + +/** + * Utilizes a Google client library to verify the token's signature, audience + * against a configured environment variable, issuer against the expected Google + * accounts issuer, and expiration time against the current time. If any of these + * checks fail, the function sends an unauthorized (401) response with a corresponding + * error message (nack) + */ +export async function validateGoogleJWT (req: Request): Promise { + const authorizationHeader = req.headers.authorization + + if (authorizationHeader === undefined || !authorizationHeader.startsWith('Bearer ')) { + throw new MessageHandlingError('Unauthorized - Missing or invalid Authorization header') + } + + const jwtToken = authorizationHeader.substring(7) + + try { + const ticket = await client.verifyIdToken({ idToken: jwtToken }) + const payload = ticket.getPayload() as JWTPayload + const expectedIssuer = 'https://accounts.google.com' + + // Google MUST have issued this token + if ((payload.iss ?? '') === '' || payload.iss !== expectedIssuer) { + throw new MessageHandlingError('Unauthorized - Invalid JWT - Incorrect issuer') + } + + // The token cannot be stale + if ((payload.exp === undefined) || payload.exp <= Math.floor(Date.now() / 1000)) { + throw new MessageHandlingError('Unauthorized - Invalid JWT - Expired') + } + + return payload + } catch (error: any) { + logger.error('Error validating Google JWT:', error) + throw new MessageHandlingError('Unauthorized - Invalid JWT') + } +} diff --git a/src/google-cloud/index.ts b/src/google-cloud/index.ts new file mode 100644 index 00000000..14b0bf58 --- /dev/null +++ b/src/google-cloud/index.ts @@ -0,0 +1,27 @@ +import Config from '../Config.js' +import fs from 'fs' +import { logger } from '../logger.js' + +const GCS_ENABLE_SERVICES: boolean = process.env.GCS_ENABLE_SERVICES === 'true' +const GCS_PROJECT_ID = process.env.GCS_PROJECT_ID +const GCS_CLOUD_BUCKET_ID = process.env.GCS_CLOUD_BUCKET_ID +const GCS_NOTIFICATIONS_SUBSCRIPTION = process.env.GCS_NOTIFICATIONS_SUBSCRIPTION +const GCS_MEDIA_HOOK_URL = process.env.GCS_MEDIA_HOOK_URL +const GCS_BUCKET_CLIENT_EMAIL = process.env.GCS_BUCKET_CLIENT_EMAIL +const GCS_PRIVATE_KEY = process.env.GCS_PRIVATE_KEY ?? + ( + fs.existsSync('./key.json') ? JSON.parse(fs.readFileSync('./key.json')?.toString()).private_key : undefined) + +if (Config.DEPLOYMENT_ENV === 'production' && !GCS_ENABLE_SERVICES) { + logger.warn('GCS is disabled!!!!') +} + +export { + GCS_ENABLE_SERVICES, + GCS_PROJECT_ID, + GCS_CLOUD_BUCKET_ID, + GCS_NOTIFICATIONS_SUBSCRIPTION, + GCS_MEDIA_HOOK_URL, + GCS_BUCKET_CLIENT_EMAIL, + GCS_PRIVATE_KEY +} diff --git a/src/google-cloud/mock-storage-bucket.ts b/src/google-cloud/mock-storage-bucket.ts new file mode 100644 index 00000000..8f71f514 --- /dev/null +++ b/src/google-cloud/mock-storage-bucket.ts @@ -0,0 +1,67 @@ +import { MediaObject } from '../db/MediaObjectTypes' +import fs from 'fs/promises' +import { fileTypeFromFile } from 'file-type' +import decode from 'image-decode' +import sizeOf from 'image-size' +import { BucketStorage, BucketStorageError } from './bucket.js' + +/** + * When doing local development and local integration, we can use a localfilestorage interface + */ +export class LocalFileStorage implements BucketStorage { + async fileExists (url: string | string[]): Promise { + if (Array.isArray(url)) { + return await Promise.all(url.map(async f => await fs.readFile(`./bucket/${f}`).then(() => true).catch(() => false))) + } + return [await fs.readFile(`./bucket/${url}`).then(() => true).catch(() => false)] + } + + async signedUrl (path: string): Promise<{ url: string, expires: number }> { + const expires = Date.now() + 15 * 60 * 1000 + + return { url: `http://localhost:4000/rest/media/${path}`, expires } + } + + async deleteFile (url: string): Promise { + await fs.unlink(`./bucket/${url}`) + } + + async getFileInfo (url: string): Promise> { + const localFilePath = `./bucket/${url}` + const buffer = await fs.readFile(localFilePath) + const stats = await fs.stat(localFilePath) + const size = stats.size + let width: number | undefined + let height: number | undefined + let format: string | undefined + + const fileTypeResult = await fileTypeFromFile(localFilePath) + if (fileTypeResult !== undefined) { + format = fileTypeResult.mime.replace('image/', '') + + try { + const dimensions = sizeOf(buffer) + width = dimensions.width + height = dimensions.height + } catch (error) { + console.warn(`Error getting dimensions with image-size for ${localFilePath}:`, error) + // Fallback to decode if it's a supported image type (excluding AVIF for now) + if (['jpeg', 'png'].includes(format)) { + try { + const image = decode(buffer) + width = image.width + height = image.height + } catch (decodeError) { + console.warn(`Error decoding image ${localFilePath}:`, decodeError) + } + } + } + } + + if (width === undefined || height === undefined || format === undefined) { + throw new BucketStorageError(`Could not determine width/height/format of file ${JSON.stringify({ width, height, format })}`) + } + + return { size, width, height, format: format as MediaObject['format'] } + } +} diff --git a/src/google-cloud/mock-storage-upload.ts b/src/google-cloud/mock-storage-upload.ts new file mode 100644 index 00000000..fb4edf3d --- /dev/null +++ b/src/google-cloud/mock-storage-upload.ts @@ -0,0 +1,66 @@ +import express, { Request, Response } from 'express' +import path from 'path' +import fs from 'fs/promises' +import { logger } from '../logger.js' +import { mediaAdded } from './adapter-interface.js' + +const router = express.Router() + +/** + * PUT /rest/upload/:filename + * Uploads raw file data from the request body to the local bucket. + * The filename in the URL will be used as the final name of the file. + */ +router.put('/media/:filename', (req: Request, res: Response) => { + void (async () => { + const filename = req.params.filename + const filePath = path.join('./bucket', filename) // Adjust path as needed + + try { + if (req.body === undefined || req.body === '' || req.body.length === 0) { + return res.status(400).json({ error: 'No file data in the request body.' }) + } + + // Ensure the 'bucket' directory exists + await fs.mkdir(path.dirname(filePath), { recursive: true }) + // Write the raw request body to the specified file path + await fs.writeFile(filePath, req.body) + + res.status(200).json({ message: 'File uploaded successfully.', path: filename }) + // For mocking purposes, we don't need to do anything other than imagine that + // the hook is called by the remote. + // There is an unfortunate amount of logic wrapped into the google POST handler + void mediaAdded({ objectId: filename }) + } catch (error: any) { + if (error instanceof Error) { + res.status(500).json({ error: `Failed to upload file: ${error.message}` }) + } else { + res.status(500).json({ error: 'Failed to upload file because of an unspecified error' }) + } + } + })().catch(logger.error) +}) + +/** + * GET /rest/upload/:filename + * Retrieves a file from the local bucket. + */ +router.get('/media/:filename', (req: Request, res: Response) => { + void (async () => { + const filename = req.params.filename + const filePath = path.join('./bucket', filename) // Adjust path as needed + + try { + await fs.access(filePath) + res.sendFile(filePath) + } catch (error: any) { + if (error.code === 'ENOENT') { + return res.status(404).json({ error: 'File not found.' }) + } + console.error('Error retrieving file:', error) + res.status(500).json({ error: 'Failed to retrieve file.' }) + } + })().then(() => logger.info('Image uploaded!')).catch(logger.error) +}) + +export default router diff --git a/src/google-cloud/pull-subscriber.ts b/src/google-cloud/pull-subscriber.ts new file mode 100644 index 00000000..fb8cca45 --- /dev/null +++ b/src/google-cloud/pull-subscriber.ts @@ -0,0 +1,45 @@ +import { Message, PubSub, StatusError, Subscription } from '@google-cloud/pubsub' +import { logger } from '../logger.js' +import { GCS_BUCKET_CLIENT_EMAIL, GCS_CLOUD_BUCKET_ID, GCS_NOTIFICATIONS_SUBSCRIPTION, GCS_PRIVATE_KEY } from './index.js' +import { mediaAdded } from './adapter-interface.js' +import { validateMessageAttributes } from './push-subscriber.js' +import { MediaIdentity, MessageHandlingError } from './bucket.js' + +// Initialize the Pub/Sub client +const pubSubClient = new PubSub({ + projectId: GCS_CLOUD_BUCKET_ID, + credentials: { + type: 'service_account', + private_key: GCS_PRIVATE_KEY, + client_email: GCS_BUCKET_CLIENT_EMAIL + } +}) + +export function gcsTopicSubscription (): Subscription { + return pubSubClient.subscription(GCS_NOTIFICATIONS_SUBSCRIPTION ?? '') +} + +export async function handleErrorFromBucket (error: StatusError): Promise { + logger.error(`Google cloud produced a status error ${error.message} (${JSON.stringify(error)})`) +} + +export async function handleMessageOnChannel (message: Message): Promise { + try { + validateMessageAttributes(message.attributes) + + const media: MediaIdentity = { + objectId: message.attributes.objectId + } + + if (media.objectId === '' || media.objectId === undefined) { + throw new MessageHandlingError('Could not discern media identity (objectId Missing)') + } + + await mediaAdded(media) + } catch (e) { + message.nack() + throw e + } + + message.ack() +} diff --git a/src/google-cloud/push-subscriber.ts b/src/google-cloud/push-subscriber.ts new file mode 100644 index 00000000..fa1c11d9 --- /dev/null +++ b/src/google-cloud/push-subscriber.ts @@ -0,0 +1,160 @@ +import { Request, Response } from 'express' +import { logger } from '../logger.js' +import { JwtValidator } from './google-auth.js' +import { mediaAdded } from './adapter-interface.js' +import { GCS_CLOUD_BUCKET_ID } from './index.js' +import { MessageHandlingError, MediaIdentity } from './bucket.js' + +export interface RootEventType { + message: MessageType + subscription: string +} + +export interface MessageType { + attributes: Partial + data?: string + messageId?: string + message_id?: string + publishTime?: string + publish_time?: string +} + +export interface MessageAttributes extends Partial<{ + bucketId: string + eventTime: string + eventType: string + notificationConfig: string + objectGeneration: string + objectId: string + payloadFormat: string +}> {} + +export interface GCSMessageData { + kind: string + id: string + selfLink: string + name: string + bucket: string + generation: string + metageneration: string + contentType: string + timeCreated: string + updated: string + storageClass: string + timeStorageClassUpdated: string + size: string + md5Hash: string + mediaLink: string + crc32c: string + etag: string +} + +function decodeGCSData (data: string | undefined): GCSMessageData { + if (data === undefined) { + throw new MessageHandlingError('FAILURE: -> Attempt to decode empty data string') + } + + try { + // Decode the base64 encoded data + const base64Encoded = data + const decodedString = Buffer.from(base64Encoded, 'base64').toString('utf-8') + + // Parse the JSON string + return JSON.parse(decodedString) + } catch (error) { + logger.error(error) + throw new MessageHandlingError('Failed to decode base64 string') + } +} + +/** + * You may not necessarily have a straightforward to develop and test with this, + * as it requires some external service to post a webhook here (If you use ngrok + * or something like that, then obviously this is not the case) but otherwise + * you may struggle. + * + * To see why we would use this rather than the subscriber API you can take a look in the readme + * + * The hook reciever may recieve all manner of data through this endpoint but mostly + * we are interested in recognising events that we are waiting for and dispatching + * them to the shared logic. + * + * The auth here is unique compared to other parts of this application and that opens up + * a couple of challenges to us if middlewares start getting too involved before the + * webhook can make it down here. + * + * Google Cloud Services have a concept called Service Accounts (see readme) which + * can generate and authenticate using signed tokens. + */ +export async function googleCloudWebHookReciever (req: Request, res: Response, validator: JwtValidator): Promise { + try { + await validator(req) + const body: Partial = validateBody(req) + const media: MediaIdentity = extractMediaIdentity(body) + await mediaAdded(media) + // When we set the status as 200 google will consider this message as being ack'd, and will not + // dispatch it again to the endpoint - essentially consuming it. This is crucial to prevent + // message choking. + res.status(200).send() + logger.info(`ACK to message ${body?.message?.messageId ?? 'unkown message id'}`) + } catch (error) { + logger.debug(`Error in GCS message handler. message: ${JSON.stringify(req.body)}`) + res.status(500).json(error?.toString()).send() + } +} + +/** + * Check that the body of an incoming request contains the data that we need to identify media + * in our own system + */ +function extractMediaIdentity (body: Partial): MediaIdentity { + const media: MediaIdentity = { + objectId: body?.message?.attributes?.objectId ?? '' + } + + if (media.objectId === '') { + throw new MessageHandlingError(`Could not discern a media identity: ${JSON.stringify(media)}`) + } + + return media +} + +/** Check that the body of an incoming request looks like the kind we expect from google */ +function validateBody (req: Request): Partial { + const body: Partial = req.body + if (body.message === undefined) { + throw new MessageHandlingError( + 'malformed data at the hook' + ) + } + + validateMessageAttributes(body.message.attributes) + + // This does produce relevant data but we have it here as a validation step + // and no more. if you are experiencing fragility you can try without it. + decodeGCSData(body.message?.data) + + return body +} + +export function validateMessageAttributes (attributes: Partial): void { + const eventType = attributes?.eventType + if (eventType === undefined) throw new MessageHandlingError('No discernable event type (body.message.attributes.eventType)') + if (eventType !== 'OBJECT_FINALIZE') { + throw new MessageHandlingError(`This hook is not designed to process ${eventType ?? 'undefined'} events`) + } + + if (attributes?.bucketId === undefined) { + throw new MessageHandlingError('Request is authentically google, but target bucketid was not set') + } + + if (attributes.bucketId !== GCS_CLOUD_BUCKET_ID) { + logger.warn(`Recieved a notification for the wrong bucket: ${attributes.bucketId}`) + throw new MessageHandlingError(`Request is authentically google, but someone is polluting by pointing their unrelated bucket here (${attributes.bucketId})`) + } +} + +/** Hoc to leverage partial application against the reciever */ +export function googleCloudWebHookRecieverWithValidator (validator: JwtValidator): (req: Request, res: Response) => Promise { + return async (req: Request, res: Response) => await googleCloudWebHookReciever(req, res, validator) +} diff --git a/src/graphql/media/mutations.ts b/src/graphql/media/mutations.ts index 79669ee3..213bd8d9 100644 --- a/src/graphql/media/mutations.ts +++ b/src/graphql/media/mutations.ts @@ -1,22 +1,22 @@ import muid from 'uuid-mongodb' import mongoose from 'mongoose' -import { GQLContext } from '../../types.js' +import { ContextWithAuth } from '../../types.js' import { EntityTag, EntityTagDeleteGQLInput, AddEntityTagGQLInput, MediaObject, MediaObjectGQLInput, DeleteMediaGQLInput } from '../../db/MediaObjectTypes.js' const MediaMutations = { - addMediaObjects: async (_: any, args, { dataSources }: GQLContext): Promise => { + addMediaObjects: async (_: any, args, { dataSources, user }: ContextWithAuth): Promise => { const { media } = dataSources const { input }: { input: MediaObjectGQLInput[] } = args return await media.addMediaObjects(input) }, - deleteMediaObject: async (_: any, args, { dataSources }: GQLContext): Promise => { + deleteMediaObject: async (_: any, args, { dataSources }: ContextWithAuth): Promise => { const { media } = dataSources const { input }: { input: DeleteMediaGQLInput } = args return await media.deleteMediaObject(new mongoose.Types.ObjectId(input.mediaId)) }, - addEntityTag: async (_: any, args, { dataSources }: GQLContext): Promise => { + addEntityTag: async (_: any, args, { dataSources }: ContextWithAuth): Promise => { const { media } = dataSources const { input }: { input: AddEntityTagGQLInput } = args const { mediaId, entityId, entityType, topoData } = input @@ -28,7 +28,7 @@ const MediaMutations = { }) }, - removeEntityTag: async (_: any, args, { dataSources }: GQLContext): Promise => { + removeEntityTag: async (_: any, args, { dataSources }: ContextWithAuth): Promise => { const { media } = dataSources const { input }: { input: EntityTagDeleteGQLInput } = args const { mediaId, tagId } = input @@ -37,11 +37,6 @@ const MediaMutations = { tagId: new mongoose.Types.ObjectId(tagId) }) } - - // updateTopoData: async (_: any, args, { dataSources }: Context): Promise => { - // const { media } = dataSources - // const { input }: { input: AddEntityTagGQLInput } = args - // const { mediaId, entityId, entityType } export default MediaMutations diff --git a/src/graphql/schema/Media.gql b/src/graphql/schema/Media.gql index 5e263de5..e057cb6a 100644 --- a/src/graphql/schema/Media.gql +++ b/src/graphql/schema/Media.gql @@ -42,7 +42,7 @@ type Query { """ Get media cursor with pagination support. We only support forward cursor. - See + See - https://graphql.org/learn/pagination/ - https://relay.dev/graphql/connections.htm """ @@ -199,15 +199,72 @@ type MediaWithTags implements IMediaMetadata { uploadTime: Date! size: Int! entityTags: [EntityTag] + lng: Float + lat: Float + camera: String } +""" +There are two ways to use a media input, one to create a fully reified media object +and one to create a 'pending' media object. what this means is that you have not yet +uploaded the file yet, but you will be uploading it at some point in the near future. + +The server will return you the media object and an endpoint to send it to with +a simple PUT request with the image data in the body. + +When using this input type it is important to not partially specify the metadata. If you +are going to bother with width and height, the GQL endpoint will enforce specification of the +size and format (Or any other missing attributes, since you can save it a trip past the bucket.) +""" input NewMediaObjectInput { - userUuid: ID! - width: Int! - height: Int! - format: String! - size: Int! - mediaUrl: String! + """ + optionally supply the width AND height, otherwise the GQL endpoint will fetch the object + from the database to get it + """ + width: Int + """ + optionally supply the width AND height, otherwise the GQL endpoint will fetch the object + from the database to get it + """ + height: Int + """ + optionally the image size, otherwise the GQL endpoint will fetch the object + from the database to get it. The size is given in bytes. + """ + size: Int + """ + Optionally specify the fileformat of this media, if you neglect to supply it + the gql endpoint will fetch the object + """ + format: String + + """ + If this media is already reified, you can set this and + openbeta will recall a reference to it. If you need an upload endpoint, + you may leave this field blank and/or supply an optional filename - though + if you supply a URL the filename will be ignored. + + In the event that you do not supply a filename or a url, a random filename + will be chosen for you, and your endpoint will be returned to you with this + set. + """ + mediaUrl: String + """ + You can optionally supply a filename for this media object. This is usually a + good idea if your user is expected to have unique filenames - as it should help + to prevent duplication of media items (namespaced by user). + """ + filename: String + """ + By default, openbeta will mask the original filename provided to us, but if you would + like to maintain the filename you can set this explicitly to 'false'. + + If you are performing some action where the image filenames are unlikely to be sensitive + then this can be convienient to disable - since it will prevent double-upload of the same + image. + """ + maskFilename: Boolean + entityTag: EmbeddedEntityInput } diff --git a/src/model/MediaDataSource.ts b/src/model/MediaDataSource.ts index 1b51f1d6..96b01971 100644 --- a/src/model/MediaDataSource.ts +++ b/src/model/MediaDataSource.ts @@ -1,15 +1,23 @@ -import { MongoDataSource } from 'apollo-datasource-mongodb' +import { MongoDataSource, MongoDataSourceConfig } from 'apollo-datasource-mongodb' import muid, { MUUID } from 'uuid-mongodb' import mongoose from 'mongoose' import { logger } from '../logger.js' import { getMediaObjectModel } from '../db/index.js' import { TagsLeaderboardType, UserMediaQueryInput, AreaMediaQueryInput, ClimbMediaQueryInput, AllTimeTagStats, MediaByUsers, MediaForFeedInput, MediaObject, UserMedia, AreaMedia, ClimbMedia } from '../db/MediaObjectTypes.js' +import { BucketStorage } from '../google-cloud/bucket.js' +import { GoogleStorage, mediaAdded } from '../google-cloud/adapter-interface.js' const HARD_MAX_FILES = 1000 const HARD_MAX_USERS = 100 export default class MediaDataSource extends MongoDataSource { mediaObjectModel = getMediaObjectModel() + bucket: BucketStorage + + constructor (args: MongoDataSourceConfig & { bucket?: BucketStorage }) { + super(args) + this.bucket = args.bucket ?? new GoogleStorage() + } /** * A reusable filter to exclude documents with empty entityTags @@ -20,6 +28,25 @@ export default class MediaDataSource extends MongoDataSource { } }] + /** + * This may ultimately not be necessary, since really the google GCS integration should be solid, + * but the image callback hook is designed to be idempotent and this extra work is not too painful + * since most of the time it shouldn't really be doing anything + */ + async elideUnreifiedMedia (input: T[]): Promise { + const pending = input.filter(i => i.expiresAt !== undefined) + if (pending.length !== 0) { + const status = await this.bucket.fileExists(pending.map(i => i.mediaUrl)) + const sinceReified = pending.filter((item, idx) => status[idx]) + await Promise.all(sinceReified.map(async i => await mediaAdded({ objectId: i.mediaUrl }))) + input.filter((_, idx) => status[idx]).forEach((media, idx) => { + input[idx].expiresAt = undefined + }) + } + + return input.filter(i => i.expiresAt === undefined) + } + /** * Find one media object by id. Throw an exception if not found. * @param _id @@ -99,7 +126,7 @@ export default class MediaDataSource extends MongoDataSource { } } ]) - return rs + return await Promise.all(rs.map(async (i) => ({ ...i, mediaWithTags: await this.elideUnreifiedMedia(i.mediaWithTags) }))) } /** @@ -112,7 +139,7 @@ export default class MediaDataSource extends MongoDataSource { logger.error(`Expecting 1 user in result set but got ${rs.length}`) return [] } - return rs[0].mediaWithTags + return await this.elideUnreifiedMedia(rs[0].mediaWithTags) } /** @@ -128,7 +155,7 @@ export default class MediaDataSource extends MongoDataSource { async getOneUserMediaPagination (input: UserMediaQueryInput): Promise { const { userUuid, first = 6, after } = input const filters = this.mediaFilters(after, userUuid, 'user') - const filteredMedia = await this.aggregateMedia(filters, first) + const filteredMedia: MediaObject[] = await this.elideUnreifiedMedia(await this.aggregateMedia(filters, first)) const itemCount = await this.mediaObjectModel.countDocuments(this.getMatchClause(userUuid, 'user')) let hasNextPage = false if (filteredMedia.length > first) { @@ -151,8 +178,9 @@ export default class MediaDataSource extends MongoDataSource { async getOneAreaMediaPagination (input: AreaMediaQueryInput): Promise { const { areaUuid, first = 6, after } = input const filters = this.mediaFilters(after, areaUuid, 'area') - const filteredMedia = await this.aggregateMedia(filters, first) + const filteredMedia: MediaObject[] = await this.aggregateMedia(filters, first) const itemCount = await this.mediaObjectModel.countDocuments(this.getMatchClause(areaUuid, 'area')) + let hasNextPage = false if (filteredMedia.length > first) { filteredMedia.pop() @@ -193,8 +221,15 @@ export default class MediaDataSource extends MongoDataSource { * @returns Array of TagsLeaderboardType */ async getTagsLeaderboard (limit = 30): Promise { - const rs = await this.mediaObjectModel.aggregate([ + const resultSet = await this.mediaObjectModel.aggregate([ + // Do not count media that has not been tagged ...this.entityTagsNotEmptyFilter, + // When counting media for leaderboard we need not look at pending media + { + $match: { + expiresAt: { $exists: false } + } + }, { $group: { _id: '$userUuid', @@ -233,10 +268,12 @@ export default class MediaDataSource extends MongoDataSource { readPreference: 'secondaryPreferred' }) - if (rs?.length !== 1) throw new Error('Unexpected leaderboard query error') + if (resultSet === undefined) throw new Error('tag leaderboard returned with no data') + if (resultSet.length === 0) throw new Error('Tag leaderboard had zero sets (which is weird)') + if (resultSet.length > 1) throw new Error('Unexpected leaderboard query error - multiple result sets') return { - allTime: rs[0] + allTime: resultSet[0] } } @@ -246,7 +283,7 @@ export default class MediaDataSource extends MongoDataSource { * @param climbId * @returns `MediaWithTags` array */ - async findMediaByClimbId (climbId: MUUID, climbName: string): Promise { + async findMediaByClimbId (climbId: MUUID, climbName?: string): Promise { const rs = await this.mediaObjectModel.find({ 'entityTags.targetId': climbId }).lean() diff --git a/src/model/MutableMediaDataSource.ts b/src/model/MutableMediaDataSource.ts index 46afd86f..341b8628 100644 --- a/src/model/MutableMediaDataSource.ts +++ b/src/model/MutableMediaDataSource.ts @@ -1,11 +1,15 @@ import { ApolloServerErrorCode } from '@apollo/server/errors' import { GraphQLError } from 'graphql' -import mongoose from 'mongoose' +import mongoose, { Document, MergeType } from 'mongoose' import muuid from 'uuid-mongodb' import MediaDataSource from './MediaDataSource.js' import { EntityTag, EntityTagDeleteInput, MediaObject, MediaObjectGQLInput, AddTagEntityInput, NewMediaObjectDoc } from '../db/MediaObjectTypes.js' import MutableAreaDataSource from './MutableAreaDataSource.js' +import { safeFilename } from '../google-cloud/bucket.js' +import { GoogleStorage } from '../google-cloud/adapter-interface.js' +import { LocalFileStorage } from '../google-cloud/mock-storage-bucket.js' +import { GCS_ENABLE_SERVICES } from '../google-cloud/index.js' export default class MutableMediaDataSource extends MediaDataSource { areaDS = MutableAreaDataSource.getInstance() @@ -145,11 +149,60 @@ export default class MutableMediaDataSource extends MediaDataSource { } /** - * Add one or more media objects. The embedded entityTag may have one tag. + * Add one or more media objects. The embedded entityTag may have one tag. + * + * Adding media has two possible paths: + * + * 1. In the event that the media already exists, we are simply creating a + * reference to it in the media collection so that we can ascociate tags with + * the media without losing relational integrity. This case is very simple, + * since we only need to fulfil a database request. + * + * 2. In th event that a user is trying to add media for which an object in the + * storage bucket is PENDING, we need to create the signed url for this user to + * upload the media and fulfil the pending media. + * + * In this case, the reference that we create in the database is a future that is + * awaiting fulfilment by the user. When the user uploads the media that they have + * promised to us, the storage bucket will create an event for us to consume and we + * will fulfill the media promise. In the event that the user does NOT fulfil their + * promise, the document will expire and be cleaned out of the database. */ - async addMediaObjects (input: MediaObjectGQLInput[]): Promise { - const docs: NewMediaObjectDoc[] = await Promise.all(input.map(async entry => { - const { userUuid: userUuidStr, mediaUrl, width, height, format, size, entityTag } = entry + async addMediaObjects (input: MediaObjectGQLInput[]): Promise> { + const pendingUrls: Record = {} + + const documents: NewMediaObjectDoc[] = await Promise.all(input.map(async entry => { + let { mediaUrl, width, height, format, size, entityTag, filename, userUuid, maskFilename } = entry + let expiresAt: Date | undefined + + if (mediaUrl === undefined) { + if (filename === undefined && maskFilename === false) { + throw new GraphQLError('Likely programming error: You cannot specify no mask and not pass a filename', { + extensions: { + code: ApolloServerErrorCode.BAD_USER_INPUT + } + }) + } + + if (filename === undefined) { + filename = safeFilename(`any.${format}`) + } + + // Use the supplied filename if the user has suppressed masking, otherwise use a safe filename + // drop-in replacement. + const path = `/u/${userUuid}/${maskFilename === false ? filename : safeFilename(filename)}` + // Signed urls can be made with multiple references to the same promised filename, + // so it is first-past-the post in terms of which file becomes the one to claim it. + // we needn't record past signed urls as they resolve to the same mediaUrl - so whem + // duplicate pending media requests come in, we can push back the expiry and move on + // - supplying a new url to the requesting user. + const signed = await this.bucket.signedUrl(path) + + mediaUrl = path + pendingUrls[mediaUrl] = signed.url + expiresAt = new Date(signed.expires) + } + let newTag: EntityTag | undefined if (entityTag != null) { newTag = await this.getEntityDoc({ @@ -159,19 +212,59 @@ export default class MutableMediaDataSource extends MediaDataSource { } return ({ - mediaUrl, + size, width, height, format, - size, - userUuid: muuid.from(userUuidStr), + mediaUrl, + expiresAt, + userUuid: muuid.from(userUuid), ...newTag != null && { entityTags: [newTag] } }) })) + // look-ahead for duplicates (pending duplicates, since reified media should throw the normal complaints) + const duplicates = await this.mediaObjectModel.find( + { + mediaUrl: { $in: documents.map(i => i.mediaUrl) }, + // Pending media are the only media for which no key error + // should be a possibility. So we specify that the document must contain + // an upcoming expiry time. + expiresAt: { $exists: true } + }, + { expiresAt: true, mediaUrl: true } + ) + + let extant: Array & MediaObject & Required<{ + _id: mongoose.Types.ObjectId + }>, Omit>> = [] + + if (duplicates.length > 0) { + const extantFilter = { + _id: { $in: duplicates.map(i => i._id) }, + expiresAt: { $exists: true } + } + + // Push back the document expiry time. + await this.mediaObjectModel.updateMany( + extantFilter, + // It should be the case that all documents will + // share a near-identical future expiry date (accurate down to milliseconds), so we can just + // use the same for all of them since we don't expect many close-calls in the timing department. + { expiresAt: duplicates[0].expiresAt } + ) + + // We can re-use the above filter to now grab the objects that we need, rather than creating them. + extant = await this.mediaObjectModel.find(extantFilter) + } + + const extantFilter = new Set(extant.map(i => i.mediaUrl)) + // Do not set `lean = true` as it will not return 'createdAt' - const rs = await this.mediaObjectModel.insertMany(docs) - return rs != null ? rs : [] + let rs = await this.mediaObjectModel.insertMany(documents.filter(i => !extantFilter.has(i.mediaUrl))) + if (rs === null) (rs = []) + + return ([...rs, ...extant]).map(i => ({ ...i.toObject(), uploadTo: pendingUrls[i.mediaUrl] })) } /** @@ -201,7 +294,10 @@ export default class MutableMediaDataSource extends MediaDataSource { static getInstance (): MutableMediaDataSource { if (MutableMediaDataSource.instance == null) { - MutableMediaDataSource.instance = new MutableMediaDataSource({ modelOrCollection: mongoose.connection.db.collection('media') }) + MutableMediaDataSource.instance = new MutableMediaDataSource({ + modelOrCollection: mongoose.connection.db.collection('media'), + bucket: GCS_ENABLE_SERVICES ? new GoogleStorage() : new LocalFileStorage() + }) } return MutableMediaDataSource.instance } diff --git a/src/model/__tests__/MediaDataSource.ts b/src/model/__tests__/MediaDataSource.test.ts similarity index 61% rename from src/model/__tests__/MediaDataSource.ts rename to src/model/__tests__/MediaDataSource.test.ts index a99dea54..eafe5002 100644 --- a/src/model/__tests__/MediaDataSource.ts +++ b/src/model/__tests__/MediaDataSource.test.ts @@ -4,7 +4,7 @@ import MutableMediaDataSource from '../MutableMediaDataSource.js' import AreaDataSource from '../MutableAreaDataSource.js' import ClimbDataSource from '../MutableClimbDataSource.js' -import { createIndexes } from '../../db/index.js' +import { createIndexes, getUserModel } from '../../db/index.js' import { AreaType } from '../../db/AreaTypes.js' import { AddTagEntityInput, @@ -17,6 +17,11 @@ import { } from '../../db/MediaObjectTypes.js' import { newSportClimb1 } from './MutableClimbDataSource.js' import inMemoryDB from '../../utils/inMemoryDB.js' +import { mediaAdded } from '../../google-cloud/adapter-interface.js' +import { safeRandomFilename } from '../../google-cloud/bucket.js' +import { muuidToString } from '../../utils/helpers.js' +import UserDataSource from '../UserDataSource.js' +import assert from 'node:assert' const TEST_MEDIA: MediaObjectGQLInput = { userUuid: 'a2eb6353-65d1-445f-912c-53c6301404bd', @@ -49,6 +54,14 @@ describe('MediaDataSource', () => { areas = AreaDataSource.getInstance() climbs = ClimbDataSource.getInstance() media = MutableMediaDataSource.getInstance() + const userModel = getUserModel() + await userModel.insertMany([{ + _id: TEST_MEDIA.userUuid, + usernameInfo: { + username: 'test_user', + canonicalName: 'test_user' + } + }]) }) beforeEach(async () => { @@ -102,6 +115,227 @@ describe('MediaDataSource', () => { await inMemoryDB.close() }) + describe('Pending media logic', () => { + // When you create media that has no extant mediaUrl it will be made as a + // pending media object. + const pendingPattern: MediaObjectGQLInput = { + userUuid: TEST_MEDIA.userUuid, + width: 100, + height: 100, + size: 100 * 100, + format: 'jpeg' + } + + it('Should create a pending media object with random filename', async () => { + const [pending] = await media.addMediaObjects([{ ...pendingPattern }]) + expect(pending.expiresAt).not.toBe(undefined) + expect(pending.uploadTo).not.toBe(undefined) + expect(pending.expiresAt).not.toBe(null) + expect(pending.uploadTo).not.toBe(null) + }) + + it('Should create a non-pending media object', async () => { + const [pending] = await media.addMediaObjects([{ ...pendingPattern, mediaUrl: safeRandomFilename() + '.jpeg' }]) + expect(pending.expiresAt).toBe(undefined) + expect(pending.uploadTo).toBe(undefined) + }) + + it('Pending media object should be deleted by mongodb after its expiry time', async () => { + const [pending] = await media.addMediaObjects([pendingPattern]) + + // media expires in 10ms + await media.mediaObjectModel.updateOne({ _id: pending._id }, { expiresAt: Date.now() + 10 }) + while (await media.mediaObjectModel.findOne({ _id: pending._id }) !== null) { + await new Promise((resolve) => setTimeout(resolve, 500)) + } + }) + + it('Pending media object should mask filename by default', async () => { + // at this point the media is pending + const filename = safeRandomFilename() + '.jpeg' + const [pending] = await media.addMediaObjects([{ + ...pendingPattern, + filename + }]) + + expect(pending.mediaUrl.split('/')).not.toContain(filename) + expect(pending.mediaUrl).not.toContain(filename) + }) + + it('Pending media object should throw error if mask suppression is requested but no filename is specified', async () => { + // at this point the media is pending + await expect(media.addMediaObjects([{ + ...pendingPattern, + maskFilename: false + }])).rejects.toThrow() + }) + + it('Pending media object should mask filename if requested', async () => { + // at this point the media is pending + const filename = safeRandomFilename() + 'jpeg' + const [pending] = await media.addMediaObjects([{ + filename, + ...pendingPattern, + maskFilename: true + }]) + + expect(pending.mediaUrl.split('/')).not.toContain(filename) + expect(pending.mediaUrl).not.toContain(filename) + }) + + it('Pending media object should use original filename if requested', async () => { + // at this point the media is pending + const filename = safeRandomFilename() + 'jpeg' + const [pending] = await media.addMediaObjects([{ + ...pendingPattern, + filename, + maskFilename: false + }]) + + expect(pending.mediaUrl.split('/')).toContain(filename) + expect(pending.mediaUrl).toContain(filename) + }) + + it('Identical pending media calls should not cause unique-key issues when an identifier is present', async () => { + // at this point the media is pending + const filename = safeRandomFilename() + 'jpeg' + let [pending] = await media.addMediaObjects([{ + filename, + ...pendingPattern, + maskFilename: false + }]) + + expect(pending.mediaUrl.split('/')).toContain(filename) + expect(pending.mediaUrl).toContain(filename) + + pending = await media.addMediaObjects([{ + filename, + ...pendingPattern, + maskFilename: false + }]).then(x => x[0]) + + expect(pending.mediaUrl.split('/')).toContain(filename) + expect(pending.mediaUrl).toContain(filename) + }) + + it('Identical pending media calls SHOULD throw unique-key error when key collision appears accidental', async () => { + // at this point the media is pending + const filename = safeRandomFilename() + 'jpeg' + const [pending] = await media.addMediaObjects([{ + filename, + ...pendingPattern, + maskFilename: false + }]) + await mediaAdded({ objectId: pending.mediaUrl }) + + await expect(media.addMediaObjects([{ + filename, + ...pendingPattern, + maskFilename: false + }]).then(x => x[0])) + .rejects + .toThrow('E11000 duplicate key error collection: openbeta.media_objects index: mediaUrl_1 dup key:') + }) + + it('Pending media object should be reified if hook is called', async () => { + const [pending] = await media.addMediaObjects([{ + ...pendingPattern, + format: 'jpeg' + }]) + + await mediaAdded({ objectId: pending.mediaUrl }) + + expect((await media.mediaObjectModel.findOne({ _id: pending._id }).orFail()).expiresAt).toBe(undefined) + }) + it('Pending media should be elided when resolving climb photos', async () => { + await media.findMediaByClimbId(climbIdForTagging) + await media.getOneClimbMediaPagination({ climbUuid: climbIdForTagging }) + }) + it('Pending media should be elided when resolving area photos', async () => { + await media.getOneAreaMediaPagination({ areaUuid: areaForTagging1.metadata.area_id }) + }) + + it('Pending media should be elided when resolving user photos', async () => { + const [pending] = await media.addMediaObjects([{ + ...pendingPattern, + format: 'jpeg' + }]) + + expect(await media.getOneUserMedia(TEST_MEDIA.userUuid, 1_000).then(i => i.map(i => i.mediaUrl))).not.toContain(pending.mediaUrl) + + expect( + await media.getOneUserMediaPagination({ userUuid: muuid.from(TEST_MEDIA.userUuid) }) + .then(i => i.mediaConnection.edges + .map(i => i.node.mediaUrl)) + ) + .not + .toContain(pending.mediaUrl) + }) + + it('Pending media should be elided from tags leaderboard', async () => { + const userDs = UserDataSource.getInstance() + + const users = [muuid.v4(), muuid.v4()] + await Promise.all(users.map(async (userUuid) => + await userDs.createOrUpdateUserProfile( + userUuid, { + email: `${userUuid.toString()}@openbeta.io`, + username: `user-${process.uptime()}`, + userUuid: userUuid.toString() + }) + )) + + const entityTag = { entityId: muuidToString(climbIdForTagging), entityType: 0 } + await Promise.all(users.map(i => muuidToString(i)).map(async (userUuid) => + await media.addMediaObjects([ + { ...pendingPattern, userUuid, mediaUrl: `/u/${userUuid.toString()}/${safeRandomFilename()}.jpeg`, entityTag } + ]).then((media) => media.forEach(i => { + expect(i.uploadTo).toBeUndefined() + expect(i.expiresAt).toBeUndefined() + })) + )) + + const userTags = await media.getTagsLeaderboard().then(l => l.allTime.byUsers.find(i => i.userUuid.toString() === users[0].toString())) + assert(userTags !== undefined) + // create a new pending media object for the present leader + await media.addMediaObjects([{ + ...pendingPattern, + userUuid: muuidToString(userTags.userUuid) + }]) + + expect(userTags.total).toBe(1) + + const [pending] = await media.addMediaObjects([{ + ...pendingPattern, + userUuid: muuidToString(userTags.userUuid), + entityTag: { + entityId: areaForTagging1.metadata.area_id.toString(), + entityType: 1 + } + }]).then((media) => media.map(mediaObject => { + expect(mediaObject.uploadTo).not.toBeUndefined() + expect(mediaObject.expiresAt).not.toBeUndefined() + expect(mediaObject.entityTags).toHaveLength(1) + return mediaObject + })) + + // This should not change the count, since the media is unreified + expect( + await media.getTagsLeaderboard() + .then(x => x.allTime.byUsers.find(u => u.userUuid.toString() === userTags.userUuid.toString())) + .then(i => i?.total) + ).toBe(userTags.total) + + // reification of this item should make the count increment by 1 + await mediaAdded({ objectId: pending.mediaUrl }) + expect( + await media.getTagsLeaderboard() + .then(x => x.allTime.byUsers.find(u => u.userUuid.toString() === userTags.userUuid.toString())) + .then(i => i?.total) + ).toBe(userTags.total + 1) + }) + }) + it('should not tag a nonexistent area', async () => { const badAreaTag: AddTagEntityInput = { mediaId: testMediaObject._id, diff --git a/src/server.ts b/src/server.ts index 6eab9995..3d230f24 100644 --- a/src/server.ts +++ b/src/server.ts @@ -21,6 +21,12 @@ import localDevBypassAuthPermissions from './auth/local-dev/permissions.js' import MutableOrgDS from './model/MutableOrganizationDataSource.js' import UserDataSource from './model/UserDataSource.js' import BulkImportDataSource from './model/BulkImportDataSource.js' +import { googleCloudWebHookRecieverWithValidator } from './google-cloud/push-subscriber.js' +import { GCS_ENABLE_SERVICES, GCS_MEDIA_HOOK_URL } from './google-cloud/index.js' +import { gcsTopicSubscription, handleMessageOnChannel } from './google-cloud/pull-subscriber.js' +import { logger } from './logger.js' +import { validateGoogleJWT } from './google-cloud/google-auth.js' +import uploadRouter from './google-cloud/mock-storage-upload.js' /** * Create a GraphQL server @@ -57,6 +63,26 @@ export async function createServer (): Promise<{ app: express.Application, serve const context = process.env.LOCAL_DEV_BYPASS_AUTH === 'true' ? localDevBypassAuthContext : createContext + // Look at the readme to see how google cloud services may interact with the API + // Express processes routes in the order they are defined. By placing the webhook route definition first, + // when a POST request comes in on the GCS_MEDIA_HOOK_URL, Express will match it to the defined route + // and execute the reciever function. If the request doesn't match any of the explicitly defined routes, + // it will then fall through to the Apollo Server middleware (mounted at /). + if (GCS_ENABLE_SERVICES) { + if (GCS_MEDIA_HOOK_URL !== undefined) { + logger.info(`Setting up webhook at ${GCS_MEDIA_HOOK_URL}`) + const handler = googleCloudWebHookRecieverWithValidator(validateGoogleJWT) + app.post(GCS_MEDIA_HOOK_URL, bodyParser.json(), (req, res) => { void handler(req, res).catch(logger.error) }) + } else { + logger.info('Setting up a pull notification on the GCS bucket') + // todo: uhh does the gc clean this up at the end of scope? + gcsTopicSubscription().on('message', (msg) => { handleMessageOnChannel(msg).then().catch(logger.warn) }) + } + } else { + logger.info('Mock upload is enabled') + app.use('/rest', uploadRouter) + } + app.use('/', bodyParser.json({ limit: '10mb' }), cors(), diff --git a/yarn.lock b/yarn.lock index c19a69ad..a49d1876 100644 --- a/yarn.lock +++ b/yarn.lock @@ -438,6 +438,13 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== +"@emnapi/runtime@^1.4.0": + version "1.4.1" + resolved "https://registry.yarnpkg.com/@emnapi/runtime/-/runtime-1.4.1.tgz#79e54e3cc7f1955b3080f6e1e0e111e06114e114" + integrity sha512-LMshMVP0ZhACNjQNYXiU1iZJ6QCcv0lUdPDPugqGvCGXt5xtRVBPdtA0qU12pEXZzpWAhWlZYptfdAFq10DOVQ== + dependencies: + tslib "^2.4.0" + "@eslint-community/eslint-utils@^4.2.0": version "4.4.1" resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.1.tgz#d1145bf2c20132d6400495d6df4bf59362fd9d56" @@ -478,16 +485,59 @@ arrify "^2.0.0" extend "^3.0.2" +"@google-cloud/paginator@^5.0.0": + version "5.0.2" + resolved "https://registry.yarnpkg.com/@google-cloud/paginator/-/paginator-5.0.2.tgz#86ad773266ce9f3b82955a8f75e22cd012ccc889" + integrity sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg== + dependencies: + arrify "^2.0.0" + extend "^3.0.2" + +"@google-cloud/precise-date@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@google-cloud/precise-date/-/precise-date-4.0.0.tgz#e179893a3ad628b17a6fabdfcc9d468753aac11a" + integrity sha512-1TUx3KdaU3cN7nfCdNf+UVqA/PSX29Cjcox3fZZBtINlRrXVTmUkQnCKv2MbBUbCopbK4olAT1IHl76uZyCiVA== + "@google-cloud/projectify@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@google-cloud/projectify/-/projectify-3.0.0.tgz#302b25f55f674854dce65c2532d98919b118a408" integrity sha512-HRkZsNmjScY6Li8/kb70wjGlDDyLkVk3KvoEo9uIoxSjYLJasGiCch9+PqRVDOCGUFvEIqyogl+BeqILL4OJHA== +"@google-cloud/projectify@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@google-cloud/projectify/-/projectify-4.0.0.tgz#d600e0433daf51b88c1fa95ac7f02e38e80a07be" + integrity sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA== + "@google-cloud/promisify@^3.0.0": version "3.0.1" resolved "https://registry.yarnpkg.com/@google-cloud/promisify/-/promisify-3.0.1.tgz#8d724fb280f47d1ff99953aee0c1669b25238c2e" integrity sha512-z1CjRjtQyBOYL+5Qr9DdYIfrdLBe746jRTYfaYU6MeXkqp7UfYs/jX16lFFVzZ7PGEJvqZNqYUEtb1mvDww4pA== +"@google-cloud/promisify@~4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@google-cloud/promisify/-/promisify-4.0.0.tgz#a906e533ebdd0f754dca2509933334ce58b8c8b1" + integrity sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g== + +"@google-cloud/pubsub@^4.11.0": + version "4.11.0" + resolved "https://registry.yarnpkg.com/@google-cloud/pubsub/-/pubsub-4.11.0.tgz#cafbd135a66585b32a124d388169a06992e75c9c" + integrity sha512-xWxJAlyUGd6OPp97u8maMcI3xVXuHjxfwh6Dr7P/P+6NK9o446slJobsbgsmK0xKY4nTK8m5uuJrhEKapfZSmQ== + dependencies: + "@google-cloud/paginator" "^5.0.0" + "@google-cloud/precise-date" "^4.0.0" + "@google-cloud/projectify" "^4.0.0" + "@google-cloud/promisify" "~4.0.0" + "@opentelemetry/api" "~1.9.0" + "@opentelemetry/semantic-conventions" "~1.30.0" + arrify "^2.0.0" + extend "^3.0.2" + google-auth-library "^9.3.0" + google-gax "^4.3.3" + heap-js "^2.2.0" + is-stream-ended "^0.1.4" + lodash.snakecase "^4.1.1" + p-defer "^3.0.0" + "@google-cloud/storage@^6.9.5": version "6.12.0" resolved "https://registry.yarnpkg.com/@google-cloud/storage/-/storage-6.12.0.tgz#a5d3093cc075252dca5bd19a3cfda406ad3a9de1" @@ -590,6 +640,24 @@ resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.2.0.tgz#5f3d96ec6b2354ad6d8a28bf216a1d97b5426861" integrity sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ== +"@grpc/grpc-js@^1.10.9": + version "1.13.2" + resolved "https://registry.yarnpkg.com/@grpc/grpc-js/-/grpc-js-1.13.2.tgz#376543c23eedc03ea019ff37050dc0b0936bfe8f" + integrity sha512-nnR5nmL6lxF8YBqb6gWvEgLdLh/Fn+kvAdX5hUOnt48sNSb0riz/93ASd2E5gvanPA41X6Yp25bIfGRp1SMb2g== + dependencies: + "@grpc/proto-loader" "^0.7.13" + "@js-sdsl/ordered-map" "^4.4.2" + +"@grpc/proto-loader@^0.7.13": + version "0.7.13" + resolved "https://registry.yarnpkg.com/@grpc/proto-loader/-/proto-loader-0.7.13.tgz#f6a44b2b7c9f7b609f5748c6eac2d420e37670cf" + integrity sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw== + dependencies: + lodash.camelcase "^4.3.0" + long "^5.0.0" + protobufjs "^7.2.5" + yargs "^17.7.2" + "@humanwhocodes/config-array@^0.13.0": version "0.13.0" resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.13.0.tgz#fb907624df3256d04b9aa2df50d7aa97ec648748" @@ -609,6 +677,124 @@ resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz#4a2868d75d6d6963e423bcf90b7fd1be343409d3" integrity sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA== +"@img/sharp-darwin-arm64@0.34.1": + version "0.34.1" + resolved "https://registry.yarnpkg.com/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.34.1.tgz#e79a4756bea9a06a7aadb4391ee53cb154a4968c" + integrity sha512-pn44xgBtgpEbZsu+lWf2KNb6OAf70X68k+yk69Ic2Xz11zHR/w24/U49XT7AeRwJ0Px+mhALhU5LPci1Aymk7A== + optionalDependencies: + "@img/sharp-libvips-darwin-arm64" "1.1.0" + +"@img/sharp-darwin-x64@0.34.1": + version "0.34.1" + resolved "https://registry.yarnpkg.com/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.34.1.tgz#f1f1d386719f6933796415d84937502b7199a744" + integrity sha512-VfuYgG2r8BpYiOUN+BfYeFo69nP/MIwAtSJ7/Zpxc5QF3KS22z8Pvg3FkrSFJBPNQ7mmcUcYQFBmEQp7eu1F8Q== + optionalDependencies: + "@img/sharp-libvips-darwin-x64" "1.1.0" + +"@img/sharp-libvips-darwin-arm64@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.1.0.tgz#843f7c09c7245dc0d3cfec2b3c83bb08799a704f" + integrity sha512-HZ/JUmPwrJSoM4DIQPv/BfNh9yrOA8tlBbqbLz4JZ5uew2+o22Ik+tHQJcih7QJuSa0zo5coHTfD5J8inqj9DA== + +"@img/sharp-libvips-darwin-x64@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.1.0.tgz#1239c24426c06a8e833815562f78047a3bfbaaf8" + integrity sha512-Xzc2ToEmHN+hfvsl9wja0RlnXEgpKNmftriQp6XzY/RaSfwD9th+MSh0WQKzUreLKKINb3afirxW7A0fz2YWuQ== + +"@img/sharp-libvips-linux-arm64@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.1.0.tgz#20d276cefd903ee483f0441ba35961679c286315" + integrity sha512-IVfGJa7gjChDET1dK9SekxFFdflarnUB8PwW8aGwEoF3oAsSDuNUTYS+SKDOyOJxQyDC1aPFMuRYLoDInyV9Ew== + +"@img/sharp-libvips-linux-arm@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.1.0.tgz#067c0b566eae8063738cf1b1db8f8a8573b5465c" + integrity sha512-s8BAd0lwUIvYCJyRdFqvsj+BJIpDBSxs6ivrOPm/R7piTs5UIwY5OjXrP2bqXC9/moGsyRa37eYWYCOGVXxVrA== + +"@img/sharp-libvips-linux-ppc64@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linux-ppc64/-/sharp-libvips-linux-ppc64-1.1.0.tgz#682334595f2ca00e0a07a675ba170af165162802" + integrity sha512-tiXxFZFbhnkWE2LA8oQj7KYR+bWBkiV2nilRldT7bqoEZ4HiDOcePr9wVDAZPi/Id5fT1oY9iGnDq20cwUz8lQ== + +"@img/sharp-libvips-linux-s390x@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.1.0.tgz#82fcd68444b3666384235279c145c2b28d8ee302" + integrity sha512-xukSwvhguw7COyzvmjydRb3x/09+21HykyapcZchiCUkTThEQEOMtBj9UhkaBRLuBrgLFzQ2wbxdeCCJW/jgJA== + +"@img/sharp-libvips-linux-x64@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.1.0.tgz#65b2b908bf47156b0724fde9095676c83a18cf5a" + integrity sha512-yRj2+reB8iMg9W5sULM3S74jVS7zqSzHG3Ol/twnAAkAhnGQnpjj6e4ayUz7V+FpKypwgs82xbRdYtchTTUB+Q== + +"@img/sharp-libvips-linuxmusl-arm64@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.1.0.tgz#72accf924e80b081c8db83b900b444a67c203f01" + integrity sha512-jYZdG+whg0MDK+q2COKbYidaqW/WTz0cc1E+tMAusiDygrM4ypmSCjOJPmFTvHHJ8j/6cAGyeDWZOsK06tP33w== + +"@img/sharp-libvips-linuxmusl-x64@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.1.0.tgz#1fa052737e203f46bf44192acd01f9faf11522d7" + integrity sha512-wK7SBdwrAiycjXdkPnGCPLjYb9lD4l6Ze2gSdAGVZrEL05AOUJESWU2lhlC+Ffn5/G+VKuSm6zzbQSzFX/P65A== + +"@img/sharp-linux-arm64@0.34.1": + version "0.34.1" + resolved "https://registry.yarnpkg.com/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.34.1.tgz#c36ef964499b8cfc2d2ed88fe68f27ce41522c80" + integrity sha512-kX2c+vbvaXC6vly1RDf/IWNXxrlxLNpBVWkdpRq5Ka7OOKj6nr66etKy2IENf6FtOgklkg9ZdGpEu9kwdlcwOQ== + optionalDependencies: + "@img/sharp-libvips-linux-arm64" "1.1.0" + +"@img/sharp-linux-arm@0.34.1": + version "0.34.1" + resolved "https://registry.yarnpkg.com/@img/sharp-linux-arm/-/sharp-linux-arm-0.34.1.tgz#c96e38ff028d645912bb0aa132a7178b96997866" + integrity sha512-anKiszvACti2sGy9CirTlNyk7BjjZPiML1jt2ZkTdcvpLU1YH6CXwRAZCA2UmRXnhiIftXQ7+Oh62Ji25W72jA== + optionalDependencies: + "@img/sharp-libvips-linux-arm" "1.1.0" + +"@img/sharp-linux-s390x@0.34.1": + version "0.34.1" + resolved "https://registry.yarnpkg.com/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.34.1.tgz#8ac58d9a49dcb08215e76c8d450717979b7815c3" + integrity sha512-7s0KX2tI9mZI2buRipKIw2X1ufdTeaRgwmRabt5bi9chYfhur+/C1OXg3TKg/eag1W+6CCWLVmSauV1owmRPxA== + optionalDependencies: + "@img/sharp-libvips-linux-s390x" "1.1.0" + +"@img/sharp-linux-x64@0.34.1": + version "0.34.1" + resolved "https://registry.yarnpkg.com/@img/sharp-linux-x64/-/sharp-linux-x64-0.34.1.tgz#3d8652efac635f0dba39d5e3b8b49515a2b2dee1" + integrity sha512-wExv7SH9nmoBW3Wr2gvQopX1k8q2g5V5Iag8Zk6AVENsjwd+3adjwxtp3Dcu2QhOXr8W9NusBU6XcQUohBZ5MA== + optionalDependencies: + "@img/sharp-libvips-linux-x64" "1.1.0" + +"@img/sharp-linuxmusl-arm64@0.34.1": + version "0.34.1" + resolved "https://registry.yarnpkg.com/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.34.1.tgz#b267e6a3e06f9e4d345cde471e5480c5c39e6969" + integrity sha512-DfvyxzHxw4WGdPiTF0SOHnm11Xv4aQexvqhRDAoD00MzHekAj9a/jADXeXYCDFH/DzYruwHbXU7uz+H+nWmSOQ== + optionalDependencies: + "@img/sharp-libvips-linuxmusl-arm64" "1.1.0" + +"@img/sharp-linuxmusl-x64@0.34.1": + version "0.34.1" + resolved "https://registry.yarnpkg.com/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.34.1.tgz#a8dee4b6227f348c4bbacaa6ac3dc584a1a80391" + integrity sha512-pax/kTR407vNb9qaSIiWVnQplPcGU8LRIJpDT5o8PdAx5aAA7AS3X9PS8Isw1/WfqgQorPotjrZL3Pqh6C5EBg== + optionalDependencies: + "@img/sharp-libvips-linuxmusl-x64" "1.1.0" + +"@img/sharp-wasm32@0.34.1": + version "0.34.1" + resolved "https://registry.yarnpkg.com/@img/sharp-wasm32/-/sharp-wasm32-0.34.1.tgz#f7dfd66b6c231269042d3d8750c90f28b9ddcba1" + integrity sha512-YDybQnYrLQfEpzGOQe7OKcyLUCML4YOXl428gOOzBgN6Gw0rv8dpsJ7PqTHxBnXnwXr8S1mYFSLSa727tpz0xg== + dependencies: + "@emnapi/runtime" "^1.4.0" + +"@img/sharp-win32-ia32@0.34.1": + version "0.34.1" + resolved "https://registry.yarnpkg.com/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.34.1.tgz#4bc293705df76a5f0a02df66ca3dc12e88f61332" + integrity sha512-WKf/NAZITnonBf3U1LfdjoMgNO5JYRSlhovhRhMxXVdvWYveM4kM3L8m35onYIdh75cOMCo1BexgVQcCDzyoWw== + +"@img/sharp-win32-x64@0.34.1": + version "0.34.1" + resolved "https://registry.yarnpkg.com/@img/sharp-win32-x64/-/sharp-win32-x64-0.34.1.tgz#8a7922fec949f037c204c79f6b83238d2482384b" + integrity sha512-hw1iIAHpNE8q3uMIRCgGOeDoz9KtFNarFLQclLxr/LK1VBkj8nby18RjFvr6aP7USRYAjTZW6yisnBWMX571Tw== + "@isaacs/cliui@^8.0.2": version "8.0.2" resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" @@ -861,6 +1047,11 @@ "@jridgewell/resolve-uri" "^3.1.0" "@jridgewell/sourcemap-codec" "^1.4.14" +"@js-sdsl/ordered-map@^4.4.2": + version "4.4.2" + resolved "https://registry.yarnpkg.com/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz#9299f82874bab9e4c7f9c48d865becbfe8d6907c" + integrity sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw== + "@mongodb-js/saslprep@^1.1.0", "@mongodb-js/saslprep@^1.1.9": version "1.1.9" resolved "https://registry.yarnpkg.com/@mongodb-js/saslprep/-/saslprep-1.1.9.tgz#e974bab8eca9faa88677d4ea4da8d09a52069004" @@ -894,6 +1085,16 @@ resolved "https://registry.yarnpkg.com/@openbeta/sandbag/-/sandbag-0.0.51.tgz#21ce618d2414dc0b8d4f31ef260ac2ebad5a43c8" integrity sha512-qMVohgqRdFjXH8a3aSEZa6zemwSpak/HMttR/pqvclDIXqgPKzWvjFRA3o/YDGieI/19P4dtizLo91TKx0smGQ== +"@opentelemetry/api@~1.9.0": + version "1.9.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.9.0.tgz#d03eba68273dc0f7509e2a3d5cba21eae10379fe" + integrity sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg== + +"@opentelemetry/semantic-conventions@~1.30.0": + version "1.30.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.30.0.tgz#3a42c4c475482f2ec87c12aad98832dc0087dc9a" + integrity sha512-4VlGgo32k2EQ2wcCY3vEU28A0O13aOtHz3Xt2/2U5FAh9EfhD6t6DqL5Z6yAnRCntbTFDU4YfbpyzSlHNWycPw== + "@panva/asn1.js@^1.0.0": version "1.0.0" resolved "https://registry.yarnpkg.com/@panva/asn1.js/-/asn1.js-1.0.0.tgz#dd55ae7b8129e02049f009408b97c61ccf9032f6" @@ -981,6 +1182,20 @@ dependencies: "@sinonjs/commons" "^3.0.0" +"@tokenizer/inflate@^0.2.6": + version "0.2.7" + resolved "https://registry.yarnpkg.com/@tokenizer/inflate/-/inflate-0.2.7.tgz#32dd9dfc9abe457c89b3d9b760fc0690c85a103b" + integrity sha512-MADQgmZT1eKjp06jpI2yozxaU9uVs4GzzgSL+uEq7bVcJ9V1ZXQkeGNql1fsSI0gMy1vhvNTNbUqrx+pZfJVmg== + dependencies: + debug "^4.4.0" + fflate "^0.8.2" + token-types "^6.0.0" + +"@tokenizer/token@^0.3.0": + version "0.3.0" + resolved "https://registry.yarnpkg.com/@tokenizer/token/-/token-0.3.0.tgz#fe98a93fe789247e998c75e74e9c7c63217aa276" + integrity sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A== + "@tootallnate/once@2": version "2.0.0" resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-2.0.0.tgz#f544a148d3ab35801c1f633a7441fd87c2e484bf" @@ -1099,6 +1314,11 @@ "@types/connect" "*" "@types/node" "*" +"@types/caseless@*": + version "0.12.5" + resolved "https://registry.yarnpkg.com/@types/caseless/-/caseless-0.12.5.tgz#db9468cb1b1b5a925b8f34822f1669df0c5472f5" + integrity sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg== + "@types/connect@*": version "3.4.38" resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.38.tgz#5ba7f3bc4fbbdeaff8dded952e5ff2cc53f8d858" @@ -1194,6 +1414,14 @@ dependencies: "@types/node" "*" +"@types/jsonwebtoken@^9.0.9": + version "9.0.9" + resolved "https://registry.yarnpkg.com/@types/jsonwebtoken/-/jsonwebtoken-9.0.9.tgz#a4c3a446c0ebaaf467a58398382616f416345fb3" + integrity sha512-uoe+GxEuHbvy12OUQct2X9JenKM3qAscquYymuQN4fMWG9DBQtykrQEFcAbVACF7qaLw9BePSodUL0kquqBJpQ== + dependencies: + "@types/ms" "*" + "@types/node" "*" + "@types/lodash@^4.14.175": version "4.17.13" resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.13.tgz#786e2d67cfd95e32862143abe7463a7f90c300eb" @@ -1214,6 +1442,11 @@ resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.5.tgz#1ef302e01cf7d2b5a0fa526790c9123bf1d06690" integrity sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w== +"@types/ms@*": + version "2.1.0" + resolved "https://registry.yarnpkg.com/@types/ms/-/ms-2.1.0.tgz#052aa67a48eccc4309d7f0191b7e41434b90bb78" + integrity sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA== + "@types/node-fetch@^2.6.1": version "2.6.12" resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.12.tgz#8ab5c3ef8330f13100a7479e2cd56d3386830a03" @@ -1229,6 +1462,13 @@ dependencies: undici-types "~6.20.0" +"@types/node@>=13.7.0": + version "22.14.0" + resolved "https://registry.yarnpkg.com/@types/node/-/node-22.14.0.tgz#d3bfa3936fef0dbacd79ea3eb17d521c628bb47e" + integrity sha512-Kmpl+z84ILoG+3T/zQFyAJsU6EPTmOCj8/2+83fSN6djd6I4o7uOuGIH6vq3PrjY5BGitSbFuMN18j3iknubbA== + dependencies: + undici-types "~6.21.0" + "@types/node@^18.13.0": version "18.19.66" resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.66.tgz#0937a47904ceba5994eedf5cf4b6d503d8d6136c" @@ -1246,6 +1486,16 @@ resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.7.tgz#50ae4353eaaddc04044279812f52c8c65857dbcb" integrity sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ== +"@types/request@^2.48.8": + version "2.48.12" + resolved "https://registry.yarnpkg.com/@types/request/-/request-2.48.12.tgz#0f590f615a10f87da18e9790ac94c29ec4c5ef30" + integrity sha512-G3sY+NpsA9jnwm0ixhAFQSJ3Q9JkpLZpJbI3GMv0mIAT0y3mRabYeINzal5WOChIiaTEGQYlHOKgkaM9EisWHw== + dependencies: + "@types/caseless" "*" + "@types/node" "*" + "@types/tough-cookie" "*" + form-data "^2.5.0" + "@types/semver@^7.3.12": version "7.5.8" resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.8.tgz#8268a8c57a3e4abd25c165ecd36237db7948a55e" @@ -1290,6 +1540,11 @@ dependencies: "@types/superagent" "*" +"@types/tough-cookie@*": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.5.tgz#cb6e2a691b70cb177c6e3ae9c1d2e8b2ea8cd304" + integrity sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA== + "@types/underscore@^1.11.4": version "1.13.0" resolved "https://registry.yarnpkg.com/@types/underscore/-/underscore-1.13.0.tgz#dd8c034a92e5b8e24650c31af43d807c5340cee4" @@ -1465,6 +1720,11 @@ agent-base@^7.0.2: dependencies: debug "^4.3.4" +agent-base@^7.1.2: + version "7.1.3" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-7.1.3.tgz#29435eb821bc4194633a5b89e5bc4703bafc25a1" + integrity sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw== + ajv@^6.12.4: version "6.12.6" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" @@ -1671,6 +1931,11 @@ asynckit@^0.4.0: resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== +atob-lite@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/atob-lite/-/atob-lite-2.0.0.tgz#0fef5ad46f1bd7a8502c65727f0367d5ee43d696" + integrity sha512-LEeSAWeh2Gfa2FtlQE1shxQ8zi5F9GHarrGKz08TMdODD5T4eH6BMsvtnhbWZ+XQn+Gb6om/917ucvRu7l7ukw== + atomic-sleep@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/atomic-sleep/-/atomic-sleep-1.0.0.tgz#eb85b77a601fc932cfe432c5acd364a9e2c9075b" @@ -1779,40 +2044,12 @@ balanced-match@^1.0.0: resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== -bare-events@^2.0.0, bare-events@^2.2.0: +bare-events@^2.2.0: version "2.5.0" resolved "https://registry.yarnpkg.com/bare-events/-/bare-events-2.5.0.tgz#305b511e262ffd8b9d5616b056464f8e1b3329cc" integrity sha512-/E8dDe9dsbLyh2qrZ64PEPadOQ0F4gbl1sUJOrmph7xOiIxfY8vwab/4bFLh4Y88/Hk/ujKcrQKc+ps0mv873A== -bare-fs@^2.1.1: - version "2.3.5" - resolved "https://registry.yarnpkg.com/bare-fs/-/bare-fs-2.3.5.tgz#05daa8e8206aeb46d13c2fe25a2cd3797b0d284a" - integrity sha512-SlE9eTxifPDJrT6YgemQ1WGFleevzwY+XAP1Xqgl56HtcrisC2CHCZ2tq6dBpcH2TnNxwUEUGhweo+lrQtYuiw== - dependencies: - bare-events "^2.0.0" - bare-path "^2.0.0" - bare-stream "^2.0.0" - -bare-os@^2.1.0: - version "2.4.4" - resolved "https://registry.yarnpkg.com/bare-os/-/bare-os-2.4.4.tgz#01243392eb0a6e947177bb7c8a45123d45c9b1a9" - integrity sha512-z3UiI2yi1mK0sXeRdc4O1Kk8aOa/e+FNWZcTiPB/dfTWyLypuE99LibgRaQki914Jq//yAWylcAt+mknKdixRQ== - -bare-path@^2.0.0, bare-path@^2.1.0: - version "2.1.3" - resolved "https://registry.yarnpkg.com/bare-path/-/bare-path-2.1.3.tgz#594104c829ef660e43b5589ec8daef7df6cedb3e" - integrity sha512-lh/eITfU8hrj9Ru5quUp0Io1kJWIk1bTjzo7JH1P5dWmQ2EL4hFUlfI8FonAhSlgIfhn63p84CDY/x+PisgcXA== - dependencies: - bare-os "^2.1.0" - -bare-stream@^2.0.0: - version "2.4.2" - resolved "https://registry.yarnpkg.com/bare-stream/-/bare-stream-2.4.2.tgz#5a4241ff8a3bdd6d037fc459ab3e41189d2f2576" - integrity sha512-XZ4ln/KV4KT+PXdIWTKjsLY+quqCaEtqqtgGJVPw9AoM73By03ij64YjepK0aQvHSWDb6AfAZwqKaFu68qkrdA== - dependencies: - streamx "^2.20.0" - -base64-js@^1.3.0, base64-js@^1.3.1: +base64-js@^1.3.0: version "1.5.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== @@ -1829,14 +2066,10 @@ bignumber.js@^9.0.0: resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.1.2.tgz#b7c4242259c008903b13707983b5f4bbd31eda0c" integrity sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug== -bl@^4.0.3: - version "4.1.0" - resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" - integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== - dependencies: - buffer "^5.5.0" - inherits "^2.0.4" - readable-stream "^3.4.0" +bmp-js@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/bmp-js/-/bmp-js-0.1.0.tgz#e05a63f796a6c1ff25f4771ec7adadc148c07233" + integrity sha512-vHdS19CnY3hwiNdkaqk93DvjVLfbEcI8mys4UjuWrlX1haDmroo8o4xCzh4wD6DGV6HxRCyauwhHRqMTfERtjw== body-parser@1.20.3, body-parser@^1.20.2: version "1.20.3" @@ -1927,13 +2160,10 @@ buffer-from@^1.0.0: resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== -buffer@^5.5.0: - version "5.7.1" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" - integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== - dependencies: - base64-js "^1.3.1" - ieee754 "^1.1.13" +buffer-to-uint8array@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/buffer-to-uint8array/-/buffer-to-uint8array-1.1.0.tgz#cf6f41287c022f458da752c391c1a8d535ec5f72" + integrity sha512-JVTSbtA6YuOGdu5NL0ffizsBwuwbTXfV7OC91FhazMz9UKP/KlDS+Z7wuiSRClbnTQz52fJgVXI9YDXQRVl2sQ== builtins@^5.0.1: version "5.1.0" @@ -1947,6 +2177,14 @@ bytes@3.1.2: resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== +call-bind-apply-helpers@^1.0.1, call-bind-apply-helpers@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz#4b5428c222be985d79c3d82657479dbe0b59b2d6" + integrity sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ== + dependencies: + es-errors "^1.3.0" + function-bind "^1.1.2" + call-bind@^1.0.2, call-bind@^1.0.5, call-bind@^1.0.6, call-bind@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.7.tgz#06016599c40c56498c18769d2730be242b6fa3b9" @@ -2021,11 +2259,6 @@ char-regex@^1.0.2: resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== -chownr@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" - integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== - ci-info@^3.2.0: version "3.9.0" resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.9.0.tgz#4279a62028a7b1f262f3473fc9605f5e218c59b4" @@ -2274,23 +2507,18 @@ debug@^3.2.7: dependencies: ms "^2.1.1" -decompress-response@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-6.0.0.tgz#ca387612ddb7e104bd16d85aab00d5ecf09c66fc" - integrity sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ== +debug@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.4.0.tgz#2b3f2aea2ffeb776477460267377dc8710faba8a" + integrity sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA== dependencies: - mimic-response "^3.1.0" + ms "^2.1.3" dedent@^1.0.0: version "1.5.3" resolved "https://registry.yarnpkg.com/dedent/-/dedent-1.5.3.tgz#99aee19eb9bae55a67327717b6e848d0bf777e5a" integrity sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ== -deep-extend@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" - integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== - deep-is@^0.1.3: version "0.1.4" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" @@ -2301,7 +2529,7 @@ deepmerge@^3.2.0: resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-3.3.0.tgz#d3c47fd6f3a93d517b14426b0628a17b0125f5f7" integrity sha512-GRQOafGHwMHpjPx9iCvTgpu9NojZ49q794EEL94JVEw6VaeA8XTUyBKvAkOOjBX9oJNiV6G3P+T+tihFjo2TqA== -deepmerge@^4.2.2: +deepmerge@^4.2.2, deepmerge@^4.3.1: version "4.3.1" resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== @@ -2339,7 +2567,7 @@ destroy@1.2.0: resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== -detect-libc@^2.0.0, detect-libc@^2.0.2: +detect-libc@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.3.tgz#f0cd503b40f9939b894697d19ad50895e30cf700" integrity sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw== @@ -2438,6 +2666,20 @@ dotenv@^16.4.4: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.4.5.tgz#cdd3b3b604cb327e286b4762e13502f717cb099f" integrity sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg== +dtype@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/dtype/-/dtype-2.0.0.tgz#cd052323ce061444ecd2e8f5748f69a29be28434" + integrity sha512-s2YVcLKdFGS0hpFqJaTwscsyt0E8nNFdmo73Ocd81xNPj4URI4rj6D60A+vFMIw7BXWlb4yRkEwfBqcZzPGiZg== + +dunder-proto@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/dunder-proto/-/dunder-proto-1.0.1.tgz#d7ae667e1dc83482f8b70fd0f6eefc50da30f58a" + integrity sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A== + dependencies: + call-bind-apply-helpers "^1.0.1" + es-errors "^1.3.0" + gopd "^1.2.0" + duplexify@^4.0.0, duplexify@^4.1.1: version "4.1.3" resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-4.1.3.tgz#a07e1c0d0a2c001158563d32592ba58bddb0236f" @@ -2587,6 +2829,11 @@ es-define-property@^1.0.0: dependencies: get-intrinsic "^1.2.4" +es-define-property@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.1.tgz#983eb2f9a6724e9303f61addf011c72e09e0b0fa" + integrity sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g== + es-errors@^1.2.1, es-errors@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f" @@ -2620,6 +2867,13 @@ es-object-atoms@^1.0.0: dependencies: es-errors "^1.3.0" +es-object-atoms@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/es-object-atoms/-/es-object-atoms-1.1.1.tgz#1c4f2c4837327597ce69d2ca190a7fdd172338c1" + integrity sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA== + dependencies: + es-errors "^1.3.0" + es-set-tostringtag@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz#8bb60f0a440c2e4281962428438d58545af39777" @@ -2629,6 +2883,16 @@ es-set-tostringtag@^2.0.3: has-tostringtag "^1.0.2" hasown "^2.0.1" +es-set-tostringtag@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz#f31dbbe0c183b00a6d26eb6325c810c0fd18bd4d" + integrity sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA== + dependencies: + es-errors "^1.3.0" + get-intrinsic "^1.2.6" + has-tostringtag "^1.0.2" + hasown "^2.0.2" + es-shim-unscopables@^1.0.0, es-shim-unscopables@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz#1f6942e71ecc7835ed1c8a83006d8771a63a3763" @@ -2937,11 +3201,6 @@ exit@^0.1.2: resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== -expand-template@^2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c" - integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg== - expect@^29.0.0, expect@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/expect/-/expect-29.7.0.tgz#578874590dcb3214514084c08115d8aee61e11bc" @@ -3067,6 +3326,11 @@ fb-watchman@^2.0.0: dependencies: bser "2.1.1" +fflate@^0.8.2: + version "0.8.2" + resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.8.2.tgz#fc8631f5347812ad6028bbe4a2308b2792aa1dea" + integrity sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A== + file-entry-cache@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" @@ -3074,6 +3338,21 @@ file-entry-cache@^6.0.1: dependencies: flat-cache "^3.0.4" +file-type@^10.9.0: + version "10.11.0" + resolved "https://registry.yarnpkg.com/file-type/-/file-type-10.11.0.tgz#2961d09e4675b9fb9a3ee6b69e9cd23f43fd1890" + integrity sha512-uzk64HRpUZyTGZtVuvrjP0FYxzQrBf4rojot6J65YMEbwBLB0CWm0CLojVpwpmFmxcE/lkvYICgfcGozbBq6rw== + +file-type@^20.4.1: + version "20.4.1" + resolved "https://registry.yarnpkg.com/file-type/-/file-type-20.4.1.tgz#8a58cf0922c6098af0ca5d84d5cf859c0c0f56a5" + integrity sha512-hw9gNZXUfZ02Jo0uafWLaFVPter5/k2rfcrjFJJHX/77xtSDOfJuEFb6oKlFV86FLP1SuyHMW1PSk0U9M5tKkQ== + dependencies: + "@tokenizer/inflate" "^0.2.6" + strtok3 "^10.2.0" + token-types "^6.0.0" + uint8array-extras "^1.4.0" + filelist@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/filelist/-/filelist-1.0.4.tgz#f78978a1e944775ff9e62e744424f215e58352b5" @@ -3160,6 +3439,13 @@ flatted@^3.2.9: resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.3.2.tgz#adba1448a9841bec72b42c532ea23dbbedef1a27" integrity sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA== +flatten-vertex-data@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/flatten-vertex-data/-/flatten-vertex-data-1.0.2.tgz#889fd60bea506006ca33955ee1105175fb620219" + integrity sha512-BvCBFK2NZqerFTdMDgqfHBwxYWnxeCkwONsw6PvBMcUXqo8U/KDWwmXhqx1x2kLIg7DqIsJfOaJFOmlua3Lxuw== + dependencies: + dtype "^2.0.0" + follow-redirects@^1.15.6, follow-redirects@^1.15.9: version "1.15.9" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.9.tgz#a604fa10e443bf98ca94228d9eebcc2e8a2c8ee1" @@ -3180,6 +3466,17 @@ foreground-child@^3.1.0: cross-spawn "^7.0.0" signal-exit "^4.0.1" +form-data@^2.5.0: + version "2.5.3" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.5.3.tgz#f9bcf87418ce748513c0c3494bb48ec270c97acc" + integrity sha512-XHIrMD0NpDrNM/Ckf7XJiBbLl57KEhT3+i3yY+eWm+cqYZJQTZrKo8Y8AWKnuV5GT4scfuUGt9LzNoIx3dU1nQ== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + es-set-tostringtag "^2.1.0" + mime-types "^2.1.35" + safe-buffer "^5.2.1" + form-data@^3.0.1: version "3.0.2" resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.2.tgz#83ad9ced7c03feaad97e293d6f6091011e1659c8" @@ -3218,11 +3515,6 @@ fresh@0.5.2: resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== -fs-constants@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" - integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== - fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" @@ -3263,6 +3555,17 @@ gaxios@^5.0.0, gaxios@^5.0.1: is-stream "^2.0.0" node-fetch "^2.6.9" +gaxios@^6.0.0, gaxios@^6.1.1: + version "6.7.1" + resolved "https://registry.yarnpkg.com/gaxios/-/gaxios-6.7.1.tgz#ebd9f7093ede3ba502685e73390248bb5b7f71fb" + integrity sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ== + dependencies: + extend "^3.0.2" + https-proxy-agent "^7.0.1" + is-stream "^2.0.0" + node-fetch "^2.6.9" + uuid "^9.0.1" + gcp-metadata@^5.3.0: version "5.3.0" resolved "https://registry.yarnpkg.com/gcp-metadata/-/gcp-metadata-5.3.0.tgz#6f45eb473d0cb47d15001476b48b663744d25408" @@ -3271,6 +3574,15 @@ gcp-metadata@^5.3.0: gaxios "^5.0.0" json-bigint "^1.0.0" +gcp-metadata@^6.1.0: + version "6.1.1" + resolved "https://registry.yarnpkg.com/gcp-metadata/-/gcp-metadata-6.1.1.tgz#f65aa69f546bc56e116061d137d3f5f90bdec494" + integrity sha512-a4tiq7E0/5fTjxPAaH4jpjkSv/uCaU2p5KC6HVGrvl0cDjA8iBZv4vv1gyzlmK0ZUKqwpOyQMKzZQe3lTit77A== + dependencies: + gaxios "^6.1.1" + google-logging-utils "^0.0.2" + json-bigint "^1.0.0" + gensync@^1.0.0-beta.2: version "1.0.0-beta.2" resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" @@ -3292,11 +3604,35 @@ get-intrinsic@^1.1.3, get-intrinsic@^1.2.1, get-intrinsic@^1.2.3, get-intrinsic@ has-symbols "^1.0.3" hasown "^2.0.0" +get-intrinsic@^1.2.6: + version "1.3.0" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.3.0.tgz#743f0e3b6964a93a5491ed1bffaae054d7f98d01" + integrity sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ== + dependencies: + call-bind-apply-helpers "^1.0.2" + es-define-property "^1.0.1" + es-errors "^1.3.0" + es-object-atoms "^1.1.1" + function-bind "^1.1.2" + get-proto "^1.0.1" + gopd "^1.2.0" + has-symbols "^1.1.0" + hasown "^2.0.2" + math-intrinsics "^1.1.0" + get-package-type@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== +get-proto@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/get-proto/-/get-proto-1.0.1.tgz#150b3f2743869ef3e851ec0c49d15b1d14d00ee1" + integrity sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g== + dependencies: + dunder-proto "^1.0.1" + es-object-atoms "^1.0.0" + get-stdin@^8.0.0: version "8.0.0" resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-8.0.0.tgz#cbad6a73feb75f6eeb22ba9e01f89aa28aa97a53" @@ -3316,11 +3652,6 @@ get-symbol-description@^1.0.2: es-errors "^1.3.0" get-intrinsic "^1.2.4" -github-from-package@0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/github-from-package/-/github-from-package-0.0.0.tgz#97fb5d96bfde8973313f20e8288ef9a167fa64ce" - integrity sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw== - glob-parent@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" @@ -3406,6 +3737,41 @@ google-auth-library@^8.0.1: jws "^4.0.0" lru-cache "^6.0.0" +google-auth-library@^9.15.1, google-auth-library@^9.3.0: + version "9.15.1" + resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-9.15.1.tgz#0c5d84ed1890b2375f1cd74f03ac7b806b392928" + integrity sha512-Jb6Z0+nvECVz+2lzSMt9u98UsoakXxA2HGHMCxh+so3n90XgYWkq5dur19JAJV7ONiJY22yBTyJB1TSkvPq9Ng== + dependencies: + base64-js "^1.3.0" + ecdsa-sig-formatter "^1.0.11" + gaxios "^6.1.1" + gcp-metadata "^6.1.0" + gtoken "^7.0.0" + jws "^4.0.0" + +google-gax@^4.3.3: + version "4.4.1" + resolved "https://registry.yarnpkg.com/google-gax/-/google-gax-4.4.1.tgz#95a9cf7ee7777ac22d1926a45b5f886dd8beecae" + integrity sha512-Phyp9fMfA00J3sZbJxbbB4jC55b7DBjE3F6poyL3wKMEBVKA79q6BGuHcTiM28yOzVql0NDbRL8MLLh8Iwk9Dg== + dependencies: + "@grpc/grpc-js" "^1.10.9" + "@grpc/proto-loader" "^0.7.13" + "@types/long" "^4.0.0" + abort-controller "^3.0.0" + duplexify "^4.0.0" + google-auth-library "^9.3.0" + node-fetch "^2.7.0" + object-hash "^3.0.0" + proto3-json-serializer "^2.0.2" + protobufjs "^7.3.2" + retry-request "^7.0.0" + uuid "^9.0.1" + +google-logging-utils@^0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/google-logging-utils/-/google-logging-utils-0.0.2.tgz#5fd837e06fa334da450433b9e3e1870c1594466a" + integrity sha512-NEgUnEcBiP5HrPzufUkBzJOD/Sxsco3rLNo1F1TNf7ieU8ryUzBhqba8r756CjLX7rn3fHl6iLEwPYuqpoKgQQ== + google-p12-pem@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/google-p12-pem/-/google-p12-pem-4.0.1.tgz#82841798253c65b7dc2a4e5fe9df141db670172a" @@ -3420,6 +3786,11 @@ gopd@^1.0.1: dependencies: get-intrinsic "^1.1.3" +gopd@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.2.0.tgz#89f56b8217bdbc8802bd299df6d7f1081d7e51a1" + integrity sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg== + graceful-fs@^4.1.15, graceful-fs@^4.2.9: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" @@ -3474,6 +3845,14 @@ gtoken@^6.1.0: google-p12-pem "^4.0.0" jws "^4.0.0" +gtoken@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/gtoken/-/gtoken-7.1.0.tgz#d61b4ebd10132222817f7222b1e6064bd463fc26" + integrity sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw== + dependencies: + gaxios "^6.0.0" + jws "^4.0.0" + has-bigints@^1.0.1, has-bigints@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" @@ -3501,6 +3880,11 @@ has-symbols@^1.0.2, has-symbols@^1.0.3: resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== +has-symbols@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.1.0.tgz#fc9c6a783a084951d0b971fe1018de813707a338" + integrity sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ== + has-tostringtag@^1.0.0, has-tostringtag@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.2.tgz#2cdc42d40bef2e5b4eeab7c01a73c54ce7ab5abc" @@ -3515,6 +3899,11 @@ hasown@^2.0.0, hasown@^2.0.1, hasown@^2.0.2: dependencies: function-bind "^1.1.2" +heap-js@^2.2.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/heap-js/-/heap-js-2.6.0.tgz#72a2fc9efdb8b7b103c351b6e936d18325104a15" + integrity sha512-trFMIq3PATiFRiQmNNeHtsrkwYRByIXUbYNbotiY9RLVfMkdwZdd2eQ38mGt7BRiCKBaj1DyBAIHmm7mmXPuuw== + hexoid@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/hexoid/-/hexoid-1.0.0.tgz#ad10c6573fb907de23d9ec63a711267d9dc9bc18" @@ -3563,6 +3952,14 @@ https-proxy-agent@^5.0.0: agent-base "6" debug "4" +https-proxy-agent@^7.0.1: + version "7.0.6" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz#da8dfeac7da130b05c2ba4b59c9b6cd66611a6b9" + integrity sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw== + dependencies: + agent-base "^7.1.2" + debug "4" + https-proxy-agent@^7.0.5: version "7.0.5" resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz#9e8b5013873299e11fab6fd548405da2d6c602b2" @@ -3595,7 +3992,7 @@ iconv-lite@0.4.24: dependencies: safer-buffer ">= 2.1.2 < 3" -ieee754@^1.1.13: +ieee754@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== @@ -3605,6 +4002,37 @@ ignore@^5.1.1, ignore@^5.2.0: resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.2.tgz#3cd40e729f3643fd87cb04e50bf0eb722bc596f5" integrity sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g== +image-decode@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/image-decode/-/image-decode-1.2.2.tgz#4ca74fc534e7133c5096ed712d8c5bceb5ba1fc6" + integrity sha512-WJSWrPNmEnWVYGkY22bA6206MKYjfGl01fdlwIp6ovdnPL2qGJyC3wQj8QqYbdblchzwgglFg8Jcb/1f6fhaTQ== + dependencies: + bmp-js "^0.1.0" + buffer-to-uint8array "^1.1.0" + image-type "^3.0.0" + jpeg-js "^0.3.4" + omggif "^1.0.9" + pngjs "^3.3.3" + to-array-buffer "^3.0.0" + utif "^2.0.1" + +image-dimensions@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/image-dimensions/-/image-dimensions-2.3.0.tgz#ddb549de9e0825a4df2568bafae9a6dd306354d4" + integrity sha512-8Ar3lsO6+/JLfnUeHnR8Jp/IyQR85Jut5t4Swy1yiXNwj/xM9h5V53v5KE/m/ZSMG4qGRopnSy37uPzKyQCv0A== + +image-size@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/image-size/-/image-size-2.0.2.tgz#84a7b43704db5736f364bf0d1b029821299b4bdc" + integrity sha512-IRqXKlaXwgSMAMtpNzZa1ZAe8m+Sa1770Dhk8VkSsP9LS+iHD62Zd8FQKs8fbPiagBE7BzoFX23cxFnwshpV6w== + +image-type@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/image-type/-/image-type-3.1.0.tgz#9148944c40c16d657174af2bd1af39d767c2c29f" + integrity sha512-edYRXKQ3WD2yHXFGUbwoJVn5v7j1A6Z505uZUYIfzCwOOhPGLYSc3VOucF9fqbsaUbgb37DdjOU+WV4uo7ZooQ== + dependencies: + file-type "^10.9.0" + immer@^9.0.15: version "9.0.21" resolved "https://registry.yarnpkg.com/immer/-/immer-9.0.21.tgz#1e025ea31a40f24fb064f1fef23e931496330176" @@ -3644,11 +4072,6 @@ inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4: resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== -ini@~1.3.0: - version "1.3.8" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" - integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== - internal-slot@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.7.tgz#c06dcca3ed874249881007b0a5523b172a190802" @@ -3696,6 +4119,11 @@ is-async-function@^2.0.0: dependencies: has-tostringtag "^1.0.0" +is-base64@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/is-base64/-/is-base64-0.1.0.tgz#a6f20610c6ef4863a51cba32bc0222544b932622" + integrity sha512-WRRyllsGXJM7ZN7gPTCCQ/6wNPTRDwiWdPK66l5sJzcU/oOzcIcRRf0Rux8bkpox/1yjt0F6VJRsQOIG2qz5sg== + is-bigint@^1.0.1: version "1.0.4" resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" @@ -3703,6 +4131,11 @@ is-bigint@^1.0.1: dependencies: has-bigints "^1.0.1" +is-blob@^2.0.1: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-blob/-/is-blob-2.1.0.tgz#e36cd82c90653f1e1b930f11baf9c64216a05385" + integrity sha512-SZ/fTft5eUhQM6oF/ZaASFDEdbFVe89Imltn9uZr03wdKMcWNVYSMjQPFtg05QuNkt5l5c135ElvXEQG0rk4tw== + is-boolean-object@^1.1.0: version "1.1.2" resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" @@ -3832,6 +4265,11 @@ is-shared-array-buffer@^1.0.2, is-shared-array-buffer@^1.0.3: dependencies: call-bind "^1.0.7" +is-stream-ended@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/is-stream-ended/-/is-stream-ended-0.1.4.tgz#f50224e95e06bce0e356d440a4827cd35b267eda" + integrity sha512-xj0XPvmr7bQFTvirqnFr50o0hQIh6ZItDqloxt5aJrR4NQsYeSsyFQERYGCAzfindAcnKjINnwEEgLx4IqVzQw== + is-stream@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" @@ -4356,6 +4794,11 @@ jose@^4.14.6: resolved "https://registry.yarnpkg.com/jose/-/jose-4.15.9.tgz#9b68eda29e9a0614c042fa29387196c7dd800100" integrity sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA== +jpeg-js@^0.3.4: + version "0.3.7" + resolved "https://registry.yarnpkg.com/jpeg-js/-/jpeg-js-0.3.7.tgz#471a89d06011640592d314158608690172b1028d" + integrity sha512-9IXdWudL61npZjvLuVe/ktHiA41iE8qFyLB+4VDTblEsWBzeg8WQTlktdUK4CdncUqtUgUg0bbOmTE2bKBKaBQ== + "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" @@ -4625,6 +5068,11 @@ lodash-es@^4.17.21: resolved "https://registry.yarnpkg.com/lodash-es/-/lodash-es-4.17.21.tgz#43e626c46e6591b7750beb2b50117390c609e3ee" integrity sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw== +lodash.camelcase@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" + integrity sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA== + lodash.clonedeep@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" @@ -4680,6 +5128,11 @@ lodash.once@^4.0.0: resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" integrity sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg== +lodash.snakecase@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz#39d714a35357147837aefd64b5dcbb16becd8f8d" + integrity sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw== + lodash.sortby@^4.7.0: version "4.7.0" resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" @@ -4705,6 +5158,11 @@ long@^4.0.0: resolved "https://registry.yarnpkg.com/long/-/long-4.0.0.tgz#9a7b71cfb7d361a194ea555241c92f7468d5bf28" integrity sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA== +long@^5.0.0: + version "5.3.1" + resolved "https://registry.yarnpkg.com/long/-/long-5.3.1.tgz#9d4222d3213f38a5ec809674834e0f0ab21abe96" + integrity sha512-ka87Jz3gcx/I7Hal94xaN2tZEOPoUOEVftkQqZx2EeQRN7LGdfLlI3FvZ+7WDplm+vK2Urx9ULrvSowtdCieng== + loose-envify@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" @@ -4782,6 +5240,11 @@ makeerror@1.0.12: dependencies: tmpl "1.0.5" +math-intrinsics@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz#a0dd74be81e2aa5c2f27e65ce283605ee4e2b7f9" + integrity sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g== + media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" @@ -4830,7 +5293,7 @@ mime-db@1.52.0: resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.53.0.tgz#3cb63cd820fc29896d9d4e8c32ab4fcd74ccb447" integrity sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg== -mime-types@^2.0.8, mime-types@^2.1.12, mime-types@~2.1.24, mime-types@~2.1.34: +mime-types@^2.0.8, mime-types@^2.1.12, mime-types@^2.1.35, mime-types@~2.1.24, mime-types@~2.1.34: version "2.1.35" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== @@ -4857,11 +5320,6 @@ mimic-fn@^2.1.0: resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== -mimic-response@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9" - integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== - minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" @@ -4883,7 +5341,7 @@ minimatch@^9.0.4: dependencies: brace-expansion "^2.0.1" -minimist@^1.2.0, minimist@^1.2.3, minimist@^1.2.6: +minimist@^1.2.0, minimist@^1.2.6: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== @@ -4893,11 +5351,6 @@ minimist@^1.2.0, minimist@^1.2.3, minimist@^1.2.6: resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== -mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3: - version "0.5.3" - resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" - integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== - mongodb-connection-string-url@^2.6.0: version "2.6.0" resolved "https://registry.yarnpkg.com/mongodb-connection-string-url/-/mongodb-connection-string-url-2.6.0.tgz#57901bf352372abdde812c81be47b75c6b2ec5cf" @@ -5017,10 +5470,10 @@ nanoid@^3.3.7: resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.8.tgz#b1be3030bee36aaff18bacb375e5cce521684baf" integrity sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w== -napi-build-utils@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/napi-build-utils/-/napi-build-utils-1.0.2.tgz#b1fddc0b2c46e380a0b7a76f984dd47c41a13806" - integrity sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg== +nanoid@^5.1.5: + version "5.1.5" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-5.1.5.tgz#f7597f9d9054eb4da9548cdd53ca70f1790e87de" + integrity sha512-Ir/+ZpE9fDsNH0hQ3C68uyThDXzYcim2EqcZ8zn8Chtt1iylPT9xXJB0kPCnqzgcEGikO9RxSrh63MsmVCU7Fw== natural-compare-lite@^1.4.0: version "1.4.0" @@ -5058,24 +5511,12 @@ nock@^13.3.0: json-stringify-safe "^5.0.1" propagate "^2.0.0" -node-abi@^3.3.0: - version "3.71.0" - resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.71.0.tgz#52d84bbcd8575efb71468fbaa1f9a49b2c242038" - integrity sha512-SZ40vRiy/+wRTf21hxkkEjPJZpARzUMVcJoQse2EF8qkUWbbO2z7vd5oA/H6bVH6SZQ5STGcu0KRDS7biNRfxw== - dependencies: - semver "^7.3.5" - node-abort-controller@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/node-abort-controller/-/node-abort-controller-3.1.1.tgz#a94377e964a9a37ac3976d848cb5c765833b8548" integrity sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ== -node-addon-api@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76" - integrity sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA== - -node-fetch@2, node-fetch@^2.6.1, node-fetch@^2.6.7, node-fetch@^2.6.9: +node-fetch@2, node-fetch@^2.6.1, node-fetch@^2.6.7, node-fetch@^2.6.9, node-fetch@^2.7.0: version "2.7.0" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== @@ -5176,6 +5617,11 @@ object.values@^1.1.6, object.values@^1.2.0: define-properties "^1.2.1" es-object-atoms "^1.0.0" +omggif@^1.0.9: + version "1.0.10" + resolved "https://registry.yarnpkg.com/omggif/-/omggif-1.0.10.tgz#ddaaf90d4a42f532e9e7cb3a95ecdd47f17c7b19" + integrity sha512-LMJTtvgc/nugXj0Vcrrs68Mn2D1r0zf630VNtqtpI1FEO7e+O9FP4gqs9AcnBaSEeoHIPm28u6qgPR0oyEpGSw== + on-exit-leak-free@^2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz#fed195c9ebddb7d9e4c3842f93f281ac8dadd3b8" @@ -5214,6 +5660,11 @@ optionator@^0.9.3: type-check "^0.4.0" word-wrap "^1.2.5" +p-defer@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-3.0.0.tgz#d1dceb4ee9b2b604b1d94ffec83760175d4e6f83" + integrity sha512-ugZxsxmtTln604yeYd29EGrNhazN2lywetzpKhfmQjW/VJmhpDmWbiX+h0zL8V91R0UXkhb3KtPmyq9PZw3aYw== + p-limit@^2.0.0, p-limit@^2.2.0: version "2.3.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" @@ -5273,6 +5724,11 @@ package-json-from-dist@^1.0.0: resolved "https://registry.yarnpkg.com/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz#4f1471a010827a86f94cfd9b0727e36d267de505" integrity sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw== +pako@^1.0.5: + version "1.0.11" + resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" + integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== + param-case@^1.1.0: version "1.1.2" resolved "https://registry.yarnpkg.com/param-case/-/param-case-1.1.2.tgz#dcb091a43c259b9228f1c341e7b6a44ea0bf9743" @@ -5378,6 +5834,11 @@ path-type@^4.0.0: resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== +peek-readable@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/peek-readable/-/peek-readable-7.0.0.tgz#c6e4e78ec76f7005e5f6b51ffc93fdb91ede6512" + integrity sha512-nri2TO5JE3/mRryik9LlHFT53cgHfRK0Lt0BAZQXku/AW3E6XLt2GaY8siWi7dvW/m1z0ecn+J+bpDa9ZN3IsQ== + pend@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50" @@ -5487,6 +5948,11 @@ pkg-dir@^4.1.0, pkg-dir@^4.2.0: dependencies: find-up "^4.0.0" +pngjs@^3.3.3: + version "3.4.0" + resolved "https://registry.yarnpkg.com/pngjs/-/pngjs-3.4.0.tgz#99ca7d725965fb655814eaf65f38f12bbdbf555f" + integrity sha512-NCrCHhWmnQklfH4MtJMRjZ2a8c80qXeMlQMv2uVp9ISJMTt562SbGd6n2oq0PaPgKm7Z6pL9E2UlLIhC+SHL3w== + point-in-polygon@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/point-in-polygon/-/point-in-polygon-1.1.0.tgz#b0af2616c01bdee341cbf2894df643387ca03357" @@ -5506,24 +5972,6 @@ postcss@^8.3.11: picocolors "^1.1.1" source-map-js "^1.2.1" -prebuild-install@^7.1.1: - version "7.1.2" - resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-7.1.2.tgz#a5fd9986f5a6251fbc47e1e5c65de71e68c0a056" - integrity sha512-UnNke3IQb6sgarcZIDU3gbMeTp/9SSU1DAIkil7PrqG1vZlBtY5msYccSKSHDqa3hNg436IXK+SNImReuA1wEQ== - dependencies: - detect-libc "^2.0.0" - expand-template "^2.0.3" - github-from-package "0.0.0" - minimist "^1.2.3" - mkdirp-classic "^0.5.3" - napi-build-utils "^1.0.1" - node-abi "^3.3.0" - pump "^3.0.0" - rc "^1.2.7" - simple-get "^4.0.0" - tar-fs "^2.0.0" - tunnel-agent "^0.6.0" - prelude-ls@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" @@ -5575,6 +6023,31 @@ property-expr@^2.0.4, property-expr@^2.0.5: resolved "https://registry.yarnpkg.com/property-expr/-/property-expr-2.0.6.tgz#f77bc00d5928a6c748414ad12882e83f24aec1e8" integrity sha512-SVtmxhRE/CGkn3eZY1T6pC8Nln6Fr/lu1mKSgRud0eC73whjGfoAogbn78LkD8aFL0zz3bAFerKSnOl7NlErBA== +proto3-json-serializer@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/proto3-json-serializer/-/proto3-json-serializer-2.0.2.tgz#5b705203b4d58f3880596c95fad64902617529dd" + integrity sha512-SAzp/O4Yh02jGdRc+uIrGoe87dkN/XtwxfZ4ZyafJHymd79ozp5VG5nyZ7ygqPM5+cpLDjjGnYFUkngonyDPOQ== + dependencies: + protobufjs "^7.2.5" + +protobufjs@^7.2.5, protobufjs@^7.3.2: + version "7.4.0" + resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.4.0.tgz#7efe324ce9b3b61c82aae5de810d287bc08a248a" + integrity sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw== + dependencies: + "@protobufjs/aspromise" "^1.1.2" + "@protobufjs/base64" "^1.1.2" + "@protobufjs/codegen" "^2.0.4" + "@protobufjs/eventemitter" "^1.1.0" + "@protobufjs/fetch" "^1.1.0" + "@protobufjs/float" "^1.0.2" + "@protobufjs/inquire" "^1.1.0" + "@protobufjs/path" "^1.1.2" + "@protobufjs/pool" "^1.1.0" + "@protobufjs/utf8" "^1.1.0" + "@types/node" ">=13.7.0" + long "^5.0.0" + proxy-addr@~2.0.7: version "2.0.7" resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" @@ -5676,16 +6149,6 @@ rbush@^3.0.1: dependencies: quickselect "^2.0.0" -rc@^1.2.7: - version "1.2.8" - resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" - integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== - dependencies: - deep-extend "^0.6.0" - ini "~1.3.0" - minimist "^1.2.0" - strip-json-comments "~2.0.1" - react-is@^16.13.1: version "16.13.1" resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" @@ -5696,7 +6159,7 @@ react-is@^18.0.0: resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.3.1.tgz#e83557dc12eae63a99e003a46388b1dcbb44db7e" integrity sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg== -"readable-stream@2 || 3", readable-stream@^3.0.0, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0: +"readable-stream@2 || 3", readable-stream@^3.0.0, readable-stream@^3.1.1, readable-stream@^3.6.0: version "3.6.2" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== @@ -5806,6 +6269,15 @@ retry-request@^5.0.0: debug "^4.1.1" extend "^3.0.2" +retry-request@^7.0.0: + version "7.0.2" + resolved "https://registry.yarnpkg.com/retry-request/-/retry-request-7.0.2.tgz#60bf48cfb424ec01b03fca6665dee91d06dd95f3" + integrity sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w== + dependencies: + "@types/request" "^2.48.8" + extend "^3.0.2" + teeny-request "^9.0.0" + retry@0.13.1, retry@^0.13.1: version "0.13.1" resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" @@ -5845,7 +6317,7 @@ safe-array-concat@^1.1.2: has-symbols "^1.0.3" isarray "^2.0.5" -safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@~5.2.0: +safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@^5.2.1, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== @@ -5891,11 +6363,16 @@ semver@^6.0.0, semver@^6.3.0, semver@^6.3.1: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.0.0, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4, semver@^7.6.3: +semver@^7.0.0, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4, semver@^7.6.3: version "7.6.3" resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143" integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A== +semver@^7.7.1: + version "7.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.7.1.tgz#abd5098d82b18c6c81f6074ff2647fd3e7220c9f" + integrity sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA== + send@0.19.0: version "0.19.0" resolved "https://registry.yarnpkg.com/send/-/send-0.19.0.tgz#bbc5a388c8ea6c048967049dbeac0e4a3f09d7f8" @@ -5967,19 +6444,35 @@ sha.js@^2.4.11: inherits "^2.0.1" safe-buffer "^5.0.1" -sharp@^0.32.0: - version "0.32.6" - resolved "https://registry.yarnpkg.com/sharp/-/sharp-0.32.6.tgz#6ad30c0b7cd910df65d5f355f774aa4fce45732a" - integrity sha512-KyLTWwgcR9Oe4d9HwCwNM2l7+J0dUQwn/yf7S0EnTtb0eVS4RxO0eUSvxPtzT4F3SY+C4K6fqdv/DO27sJ/v/w== +sharp@^0.34.1: + version "0.34.1" + resolved "https://registry.yarnpkg.com/sharp/-/sharp-0.34.1.tgz#e5922894b0cc7ddf159eeabc6d5668e4e8b11d61" + integrity sha512-1j0w61+eVxu7DawFJtnfYcvSv6qPFvfTaqzTQ2BLknVhHTwGS8sc63ZBF4rzkWMBVKybo4S5OBtDdZahh2A1xg== dependencies: color "^4.2.3" - detect-libc "^2.0.2" - node-addon-api "^6.1.0" - prebuild-install "^7.1.1" - semver "^7.5.4" - simple-get "^4.0.1" - tar-fs "^3.0.4" - tunnel-agent "^0.6.0" + detect-libc "^2.0.3" + semver "^7.7.1" + optionalDependencies: + "@img/sharp-darwin-arm64" "0.34.1" + "@img/sharp-darwin-x64" "0.34.1" + "@img/sharp-libvips-darwin-arm64" "1.1.0" + "@img/sharp-libvips-darwin-x64" "1.1.0" + "@img/sharp-libvips-linux-arm" "1.1.0" + "@img/sharp-libvips-linux-arm64" "1.1.0" + "@img/sharp-libvips-linux-ppc64" "1.1.0" + "@img/sharp-libvips-linux-s390x" "1.1.0" + "@img/sharp-libvips-linux-x64" "1.1.0" + "@img/sharp-libvips-linuxmusl-arm64" "1.1.0" + "@img/sharp-libvips-linuxmusl-x64" "1.1.0" + "@img/sharp-linux-arm" "0.34.1" + "@img/sharp-linux-arm64" "0.34.1" + "@img/sharp-linux-s390x" "0.34.1" + "@img/sharp-linux-x64" "0.34.1" + "@img/sharp-linuxmusl-arm64" "0.34.1" + "@img/sharp-linuxmusl-x64" "0.34.1" + "@img/sharp-wasm32" "0.34.1" + "@img/sharp-win32-ia32" "0.34.1" + "@img/sharp-win32-x64" "0.34.1" shebang-command@^2.0.0: version "2.0.0" @@ -6018,20 +6511,6 @@ signal-exit@^4.0.1: resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== -simple-concat@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/simple-concat/-/simple-concat-1.0.1.tgz#f46976082ba35c2263f1c8ab5edfe26c41c9552f" - integrity sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q== - -simple-get@^4.0.0, simple-get@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-4.0.1.tgz#4a39db549287c979d352112fa03fd99fd6bc3543" - integrity sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA== - dependencies: - decompress-response "^6.0.0" - once "^1.3.1" - simple-concat "^1.0.0" - simple-swizzle@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/simple-swizzle/-/simple-swizzle-0.2.2.tgz#a4da6b635ffcccca33f70d17cb92592de95e557a" @@ -6165,7 +6644,7 @@ stream-shift@^1.0.2: resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.3.tgz#85b8fab4d71010fc3ba8772e8046cc49b8a3864b" integrity sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ== -streamx@^2.15.0, streamx@^2.20.0: +streamx@^2.15.0: version "2.20.2" resolved "https://registry.yarnpkg.com/streamx/-/streamx-2.20.2.tgz#6a8911959d6f307c19781a1d19ecd94b5f042d78" integrity sha512-aDGDLU+j9tJcUdPGOaHmVF1u/hhI+CsGkT02V3OKlHDV7IukOI+nTWAGkiZEKCO35rWN1wIr4tS7YFr1f4qSvA== @@ -6184,6 +6663,14 @@ string-length@^4.0.1: char-regex "^1.0.2" strip-ansi "^6.0.0" +string-to-arraybuffer@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/string-to-arraybuffer/-/string-to-arraybuffer-1.0.2.tgz#161147fbadea02e28b0935002cec4c40f1ca7f0a" + integrity sha512-DaGZidzi93dwjQen5I2osxR9ERS/R7B1PFyufNMnzhj+fmlDQAc1DSDIJVJhgI8Oq221efIMbABUBdPHDRt43Q== + dependencies: + atob-lite "^2.0.0" + is-base64 "^0.1.0" + "string-width-cjs@npm:string-width@^4.2.0": version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" @@ -6313,16 +6800,19 @@ strip-json-comments@^3.1.1: resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== -strip-json-comments@~2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" - integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== - strnum@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/strnum/-/strnum-1.0.5.tgz#5c4e829fe15ad4ff0d20c3db5ac97b73c9b072db" integrity sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA== +strtok3@^10.2.0: + version "10.2.2" + resolved "https://registry.yarnpkg.com/strtok3/-/strtok3-10.2.2.tgz#a4c6d78d15db02c5eb20d92af3eedf81edaf09d2" + integrity sha512-Xt18+h4s7Z8xyZ0tmBoRmzxcop97R4BAh+dXouUDCYn+Em+1P3qpkUfI5ueWLT8ynC5hZ+q4iPEmGG1urvQGBg== + dependencies: + "@tokenizer/token" "^0.3.0" + peek-readable "^7.0.0" + stubs@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/stubs/-/stubs-3.0.0.tgz#e8d2ba1fa9c90570303c030b6900f7d5f89abe5b" @@ -6396,39 +6886,7 @@ swap-case@^1.1.0: lower-case "^1.1.1" upper-case "^1.1.1" -tar-fs@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" - integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== - dependencies: - chownr "^1.1.1" - mkdirp-classic "^0.5.2" - pump "^3.0.0" - tar-stream "^2.1.4" - -tar-fs@^3.0.4: - version "3.0.6" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-3.0.6.tgz#eaccd3a67d5672f09ca8e8f9c3d2b89fa173f217" - integrity sha512-iokBDQQkUyeXhgPYaZxmczGPhnhXZ0CmrqI+MOb/WFGS9DW5wnfrLgtjUJBvz50vQ3qfRwJ62QVoCFu8mPVu5w== - dependencies: - pump "^3.0.0" - tar-stream "^3.1.5" - optionalDependencies: - bare-fs "^2.1.1" - bare-path "^2.1.0" - -tar-stream@^2.1.4: - version "2.2.0" - resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" - integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== - dependencies: - bl "^4.0.3" - end-of-stream "^1.4.1" - fs-constants "^1.0.0" - inherits "^2.0.3" - readable-stream "^3.1.1" - -tar-stream@^3.1.5, tar-stream@^3.1.7: +tar-stream@^3.1.7: version "3.1.7" resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-3.1.7.tgz#24b3fb5eabada19fe7338ed6d26e5f7c482e792b" integrity sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ== @@ -6448,6 +6906,17 @@ teeny-request@^8.0.0: stream-events "^1.0.5" uuid "^9.0.0" +teeny-request@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/teeny-request/-/teeny-request-9.0.0.tgz#18140de2eb6595771b1b02203312dfad79a4716d" + integrity sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g== + dependencies: + http-proxy-agent "^5.0.0" + https-proxy-agent "^5.0.0" + node-fetch "^2.6.9" + stream-events "^1.0.5" + uuid "^9.0.0" + test-exclude@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" @@ -6505,6 +6974,15 @@ tmpl@1.0.5: resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== +to-array-buffer@^3.0.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/to-array-buffer/-/to-array-buffer-3.2.0.tgz#cb684dd691a7368c3b249c2348d75227f7d4dbb4" + integrity sha512-zN33mwi0gpL+7xW1ITLfJ48CEj6ZQW0ZAP0MU+2W3kEY0PAIncyuxmD4OqkUVhPAbTP7amq9j/iwvZKYS+lzSQ== + dependencies: + flatten-vertex-data "^1.0.2" + is-blob "^2.0.1" + string-to-arraybuffer "^1.0.0" + to-regex-range@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" @@ -6517,6 +6995,14 @@ toidentifier@1.0.1: resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== +token-types@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/token-types/-/token-types-6.0.0.tgz#1ab26be1ef9c434853500c071acfe5c8dd6544a3" + integrity sha512-lbDrTLVsHhOMljPscd0yitpozq7Ga2M5Cvez5AjGg8GASBjtt6iERCAJ93yommPmz62fb45oFIXHEZ3u9bfJEA== + dependencies: + "@tokenizer/token" "^0.3.0" + ieee754 "^1.2.1" + toposort@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/toposort/-/toposort-2.0.2.tgz#ae21768175d1559d48bef35420b2f4962f09c330" @@ -6606,13 +7092,6 @@ tsutils@^3.21.0: dependencies: tslib "^1.8.1" -tunnel-agent@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" - integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w== - dependencies: - safe-buffer "^5.0.1" - type-check@^0.4.0, type-check@~0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" @@ -6711,6 +7190,11 @@ typesense@^1.8.2: axios "^1.6.0" loglevel "^1.8.1" +uint8array-extras@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/uint8array-extras/-/uint8array-extras-1.4.0.tgz#e42a678a6dd335ec2d21661333ed42f44ae7cc74" + integrity sha512-ZPtzy0hu4cZjv3z5NW9gfKnNLjoz4y6uv4HlelAjDK7sY/xOkKZv9xK/WQpcsBB3jEybChz9DPC2U/+cusjJVQ== + unbox-primitive@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" @@ -6736,6 +7220,11 @@ undici-types@~6.20.0: resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.20.0.tgz#8171bf22c1f588d1554d55bf204bc624af388433" integrity sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg== +undici-types@~6.21.0: + version "6.21.0" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.21.0.tgz#691d00af3909be93a7faa13be61b3a5b50ef12cb" + integrity sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ== + unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" @@ -6768,6 +7257,13 @@ uri-js@^4.2.2: dependencies: punycode "^2.1.0" +utif@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/utif/-/utif-2.0.1.tgz#9e1582d9bbd20011a6588548ed3266298e711759" + integrity sha512-Z/S1fNKCicQTf375lIP9G8Sa1H/phcysstNrrSdZKj1f9g58J4NMgb5IgiEZN9/nLMPDwF0W7hdOe9Qq2IYoLg== + dependencies: + pako "^1.0.5" + util-deprecate@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" @@ -6795,7 +7291,7 @@ uuid@^8.0.0, uuid@^8.3.2: resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== -uuid@^9.0.0: +uuid@^9.0.0, uuid@^9.0.1: version "9.0.1" resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30" integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA== @@ -7003,7 +7499,7 @@ yargs-parser@^21.1.1: resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== -yargs@^17.3.1: +yargs@^17.3.1, yargs@^17.7.2: version "17.7.2" resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==