From 6eeefebe8a54f4db9e4826cba4a0847ae8ec2d14 Mon Sep 17 00:00:00 2001 From: Travis Vachon Date: Thu, 4 Apr 2024 13:39:55 -0700 Subject: [PATCH] feat: lock w3up uploads feature switch open (#2556) commenting code out here because I'll make a cleanup pass to remove the switch entirely once this is stable in production --------- Co-authored-by: Benjamin Goering <171782+gobengo@users.noreply.github.com> --- packages/api/src/routes/nfts-upload.js | 14 +- packages/api/test/nfts-upload.spec.js | 294 ++++++++++++------------- 2 files changed, 153 insertions(+), 155 deletions(-) diff --git a/packages/api/src/routes/nfts-upload.js b/packages/api/src/routes/nfts-upload.js index bafc622365..d13f42021a 100644 --- a/packages/api/src/routes/nfts-upload.js +++ b/packages/api/src/routes/nfts-upload.js @@ -117,13 +117,13 @@ export async function nftUpload(event, ctx) { * @param {string} event.user.email - email address of user associated with event */ function w3upFeatureSwitchEnabled(context, event) { - const { W3_NFTSTORAGE_ENABLE_W3UP_FOR_EMAILS = '[]' } = context - const allowedEmails = JSON.parse(W3_NFTSTORAGE_ENABLE_W3UP_FOR_EMAILS) - if (!Array.isArray(allowedEmails)) return false - const eventHasAllowedEmail = allowedEmails.find( - (allowed) => allowed === event.user.email - ) - return eventHasAllowedEmail + // const { W3_NFTSTORAGE_ENABLE_W3UP_FOR_EMAILS = '[]' } = context + // const allowedEmails = JSON.parse(W3_NFTSTORAGE_ENABLE_W3UP_FOR_EMAILS) + // if (!Array.isArray(allowedEmails)) return false + // const eventHasAllowedEmail = allowedEmails.find( + // (allowed) => allowed === event.user.email + // ) + return true } /** diff --git a/packages/api/test/nfts-upload.spec.js b/packages/api/test/nfts-upload.spec.js index 781412ef8c..a187266843 100644 --- a/packages/api/test/nfts-upload.spec.js +++ b/packages/api/test/nfts-upload.spec.js @@ -173,8 +173,8 @@ test.serial('should forward uploads to W3UP_URL', async (t) => { headers: { Authorization: `Bearer ${client2.token}` }, body: file, }) - // should not have incremented - t.is(mockW3upStoreAddCount, storeAddCountBeforeClient2) + // should have incremented because feature switch allows w3up for all uploaders + t.is(mockW3upStoreAddCount, storeAddCountBeforeClient2 + 1) } }) @@ -330,92 +330,91 @@ test.serial('should upload a single CAR file', async (t) => { t.is(data.content.dag_size, 15, 'correct dag size') }) -test.serial( - 'should check dag completness with linkdex-api for partial CAR', - async (t) => { - const client = await createClientWithUser(t) - const config = getTestServiceConfig(t) - const mf = getMiniflareContext(t) +// TODO verify with @alanshaw that we don't need to do this in the new upload flow +// TODO remove this once we remove legacy uploads +test.skip('should check dag completness with linkdex-api for partial CAR', async (t) => { + const client = await createClientWithUser(t) + const config = getTestServiceConfig(t) + const mf = getMiniflareContext(t) - const leaf1 = await Block.encode({ - value: pb.prepare({ Data: 'leaf1' }), - codec: pb, - hasher: sha256, - }) - const leaf2 = await Block.encode({ - value: pb.prepare({ Data: 'leaf2' }), - codec: pb, - hasher: sha256, - }) - const parent = await Block.encode({ - value: pb.prepare({ Links: [leaf1.cid, leaf2.cid] }), - codec: pb, - hasher: sha256, - }) - const cid = parent.cid.toString() - const { writer, out } = CarWriter.create(parent.cid) - writer.put(parent) - writer.put(leaf1) - // leave out leaf2 to make patial car - writer.close() - const carBytes = [] - for await (const chunk of out) { - carBytes.push(chunk) - } - const body = new Blob(carBytes) + const leaf1 = await Block.encode({ + value: pb.prepare({ Data: 'leaf1' }), + codec: pb, + hasher: sha256, + }) + const leaf2 = await Block.encode({ + value: pb.prepare({ Data: 'leaf2' }), + codec: pb, + hasher: sha256, + }) + const parent = await Block.encode({ + value: pb.prepare({ Links: [leaf1.cid, leaf2.cid] }), + codec: pb, + hasher: sha256, + }) + const cid = parent.cid.toString() + const { writer, out } = CarWriter.create(parent.cid) + writer.put(parent) + writer.put(leaf1) + // leave out leaf2 to make patial car + writer.close() + const carBytes = [] + for await (const chunk of out) { + carBytes.push(chunk) + } + const body = new Blob(carBytes) - if (!config.LINKDEX_URL) { - throw new Error('LINDEX_URL should be set in test config') - } + if (!config.LINKDEX_URL) { + throw new Error('LINDEX_URL should be set in test config') + } - const linkdexMock = getLinkdexMock(t) - mockLinkdexResponse(linkdexMock, 'Complete') + const linkdexMock = getLinkdexMock(t) + mockLinkdexResponse(linkdexMock, 'Complete') - const res = await mf.dispatchFetch('http://miniflare.test/upload', { - method: 'POST', - headers: { - Authorization: `Bearer ${client.token}`, - 'Content-Type': 'application/car', - }, - body, - }) + const res = await mf.dispatchFetch('http://miniflare.test/upload', { + method: 'POST', + headers: { + Authorization: `Bearer ${client.token}`, + 'Content-Type': 'application/car', + }, + body, + }) - t.truthy(res, 'Server responded') - t.true(res.ok, 'Server response ok') - const { ok, value } = await res.json() - t.truthy(ok, 'Server response payload has `ok` property') - t.is(value.cid, cid, 'Server responded with expected CID') - t.is(value.type, 'application/car', 'type should match blob mime-type') + t.truthy(res, 'Server responded') + t.true(res.ok, 'Server response ok') + const { ok, value } = await res.json() + t.truthy(ok, 'Server response payload has `ok` property') + t.is(value.cid, cid, 'Server responded with expected CID') + t.is(value.type, 'application/car', 'type should match blob mime-type') - const db = getRawClient(config) + const db = getRawClient(config) - const { data: upload } = await db - .from('upload') - .select('*') - .match({ source_cid: cid, user_id: client.userId }) - .single() + const { data: upload } = await db + .from('upload') + .select('*') + .match({ source_cid: cid, user_id: client.userId }) + .single() - // @ts-ignore - t.is(upload.source_cid, cid) - t.is(upload.deleted_at, null) + // @ts-ignore + t.is(upload.source_cid, cid) + t.is(upload.deleted_at, null) - // wait for the call to mock linkdex-api to complete - await res.waitUntil() - const { data: pin } = await db - .from('pin') - .select('*') - .match({ content_cid: cid, service: 'ElasticIpfs' }) - .single() + // wait for the call to mock linkdex-api to complete + await res.waitUntil() + const { data: pin } = await db + .from('pin') + .select('*') + .match({ content_cid: cid, service: 'ElasticIpfs' }) + .single() - t.is( - pin.status, - 'Pinned', - "Status should be pinned when linkdex-api returns 'Complete'" - ) - t.is(pin.service, 'ElasticIpfs') - t.is(pin.status, 'Pinned') - } -) + t.is( + pin.status, + 'Pinned', + "Status should be pinned when linkdex-api returns 'Complete'" + ) + t.is(pin.service, 'ElasticIpfs') + t.is(pin.status, 'Pinned') +}) test.serial('should allow a CAR with unsupported hash function', async (t) => { const client = await createClientWithUser(t) @@ -600,7 +599,8 @@ test.serial('should upload to elastic ipfs', async (t) => { t.is(data.content.pin[0].service, 'ElasticIpfs') }) -test.serial('should create S3 & R2 backups', async (t) => { +// TODO: remove once we have fully removed legacy upload path +test.skip('should create S3 & R2 backups', async (t) => { const client = await createClientWithUser(t) const config = getTestServiceConfig(t) const mf = getMiniflareContext(t) @@ -634,79 +634,75 @@ test.serial('should create S3 & R2 backups', async (t) => { t.is(backup_urls[1], expectedR2BackupUrl(config, carCid)) }) -test.serial( - 'should backup chunked uploads, preserving backup_urls for each chunk', - async (t) => { - t.timeout(10_000) - const client = await createClientWithUser(t) - const config = getTestServiceConfig(t) - const mf = getMiniflareContext(t) - const chunkSize = 1024 - const nChunks = 5 - - const files = [] - for (let i = 0; i < nChunks; i++) { - files.push({ - path: `/dir/file-${i}.bin`, - content: getRandomBytes(chunkSize), - }) - } - - const { root, car } = await packToBlob({ - input: files, - maxChunkSize: chunkSize, +// TODO: remove once legacy codepath is fully removed +test.skip('should backup chunked uploads, preserving backup_urls for each chunk', async (t) => { + t.timeout(10_000) + const client = await createClientWithUser(t) + const config = getTestServiceConfig(t) + const mf = getMiniflareContext(t) + const chunkSize = 1024 + const nChunks = 5 + + const files = [] + for (let i = 0; i < nChunks; i++) { + files.push({ + path: `/dir/file-${i}.bin`, + content: getRandomBytes(chunkSize), }) - const splitter = await TreewalkCarSplitter.fromBlob(car, chunkSize) - const linkdexMock = getLinkdexMock(t) - // respond with 'Partial' 5 times, then 'Complete' once. - mockLinkdexResponse(linkdexMock, 'Partial', 5) - mockLinkdexResponse(linkdexMock, 'Complete', 1) - - const backupUrls = [] - for await (const chunk of splitter.cars()) { - const carParts = [] - for await (const part of chunk) { - carParts.push(part) - } - const carFile = new Blob(carParts, { type: 'application/car' }) - const res = await mf.dispatchFetch('http://miniflare.test/upload', { - method: 'POST', - headers: { Authorization: `Bearer ${client.token}` }, - body: carFile, - }) + } - const { value } = await res.json() - t.is(root.toString(), value.cid) - const carCid = await getCarCid( - new Uint8Array(await carFile.arrayBuffer()) - ) - const carHash = await getHash(new Uint8Array(await carFile.arrayBuffer())) - backupUrls.push(expectedS3BackupUrl(config, root, client.userId, carHash)) - backupUrls.push(expectedR2BackupUrl(config, carCid)) + const { root, car } = await packToBlob({ + input: files, + maxChunkSize: chunkSize, + }) + const splitter = await TreewalkCarSplitter.fromBlob(car, chunkSize) + const linkdexMock = getLinkdexMock(t) + // respond with 'Partial' 5 times, then 'Complete' once. + mockLinkdexResponse(linkdexMock, 'Partial', 5) + mockLinkdexResponse(linkdexMock, 'Complete', 1) + + const backupUrls = [] + for await (const chunk of splitter.cars()) { + const carParts = [] + for await (const part of chunk) { + carParts.push(part) } + const carFile = new Blob(carParts, { type: 'application/car' }) + const res = await mf.dispatchFetch('http://miniflare.test/upload', { + method: 'POST', + headers: { Authorization: `Bearer ${client.token}` }, + body: carFile, + }) - const upload = await client.client.getUpload(root.toString(), client.userId) - t.truthy(upload) - t.truthy(upload?.backup_urls) - const backup_urls = upload?.backup_urls || [] - t.truthy(backup_urls.length >= nChunks) // using >= to account for CAR / UnixFS overhead - t.is( - backup_urls.length, - backupUrls.length, - `expected ${backupUrls.length} backup urls, got: ${backup_urls.length}` - ) + const { value } = await res.json() + t.is(root.toString(), value.cid) + const carCid = await getCarCid(new Uint8Array(await carFile.arrayBuffer())) + const carHash = await getHash(new Uint8Array(await carFile.arrayBuffer())) + backupUrls.push(expectedS3BackupUrl(config, root, client.userId, carHash)) + backupUrls.push(expectedR2BackupUrl(config, carCid)) + } - /** @type string[] */ - // @ts-expect-error upload.backup_urls has type unknown[], but it's really string[] - const resultUrls = upload.backup_urls - for (const url of resultUrls) { - t.true( - backupUrls.includes(url), - `upload is missing expected backup url ${url}` - ) - } + const upload = await client.client.getUpload(root.toString(), client.userId) + t.truthy(upload) + t.truthy(upload?.backup_urls) + const backup_urls = upload?.backup_urls || [] + t.truthy(backup_urls.length >= nChunks) // using >= to account for CAR / UnixFS overhead + t.is( + backup_urls.length, + backupUrls.length, + `expected ${backupUrls.length} backup urls, got: ${backup_urls.length}` + ) + + /** @type string[] */ + // @ts-expect-error upload.backup_urls has type unknown[], but it's really string[] + const resultUrls = upload.backup_urls + for (const url of resultUrls) { + t.true( + backupUrls.includes(url), + `upload is missing expected backup url ${url}` + ) } -) +}) test.serial('should upload a single file using ucan', async (t) => { const client = await createClientWithUser(t) @@ -857,7 +853,8 @@ test.serial('should update a single file', async (t) => { t.is(uploadData.name, name) }) -test.serial('should write satnav index', async (t) => { +// TODO: remove once legacy upload flow is fully removed +test.skip('should write satnav index', async (t) => { const client = await createClientWithUser(t) const config = getTestServiceConfig(t) const mf = getMiniflareContext(t) @@ -898,7 +895,8 @@ test.serial('should write satnav index', async (t) => { ) }) -test.serial('should write dudewhere index', async (t) => { +// TODO remove once legacy upload path is removed +test.skip('should write dudewhere index', async (t) => { const client = await createClientWithUser(t) const config = getTestServiceConfig(t) const mf = getMiniflareContext(t)