diff --git a/packages/auto-dag-data/src/ipld/builders.ts b/packages/auto-dag-data/src/ipld/builders.ts index d799b3d8..8d148ff4 100644 --- a/packages/auto-dag-data/src/ipld/builders.ts +++ b/packages/auto-dag-data/src/ipld/builders.ts @@ -14,10 +14,10 @@ import { export interface Builders { inlink: (links: CID[], size: number, linkDepth: number, chunkSize: number) => PBNode - chunk: (data: Buffer) => PBNode + chunk: (data: Buffer, maxNodeSize?: number) => PBNode root: ( links: CID[], - size: number, + size: bigint, linkDepth: number, name?: string, maxNodeSize?: number, diff --git a/packages/auto-dag-data/src/ipld/chunker.ts b/packages/auto-dag-data/src/ipld/chunker.ts index b188c6f8..7a54d0e1 100644 --- a/packages/auto-dag-data/src/ipld/chunker.ts +++ b/packages/auto-dag-data/src/ipld/chunker.ts @@ -3,41 +3,71 @@ import type { AwaitIterable } from 'interface-store' import { CID } from 'multiformats' import { cidOfNode } from '../cid/index.js' import { decodeIPLDNodeData, FileUploadOptions, OffchainMetadata } from '../metadata/index.js' +import { stringifyMetadata } from '../utils/metadata.js' import { Builders, fileBuilders, metadataBuilders } from './builders.js' import { createFolderInlinkIpldNode, createFolderIpldNode } from './nodes.js' import { chunkBuffer, encodeNode, PBNode } from './utils.js' type ChunkerLimits = { - maxChunkSize: number + maxNodeSize: number maxLinkPerNode: number } type ChunkerOptions = ChunkerLimits & FileUploadOptions -export const DEFAULT_MAX_CHUNK_SIZE = 64 * 1024 +const DEFAULT_NODE_MAX_SIZE = 65535 -const ESTIMATED_LINK_SIZE_IN_BYTES = 64 -export const DEFAULT_MAX_LINK_PER_NODE = DEFAULT_MAX_CHUNK_SIZE / ESTIMATED_LINK_SIZE_IN_BYTES +// u8 -> 1 byte (may grow in the future but unlikely further than 255) +const NODE_TYPE_SIZE = 1 +// u32 -> 4 bytes +const NODE_LINK_DEPTH_SIZE = 4 +// u64 -> 8 bytes +const NODE_SIZE_SIZE = 8 +// Limit at 255 string length (Mac Limit) +const MAX_NAME_SIZE = 255 +const END_OF_STRING_BYTE = 1 +const NODE_NAME_SIZE = MAX_NAME_SIZE + END_OF_STRING_BYTE +// Upload options may be amplified in the future +const NODE_UPLOAD_OPTIONS_SIZE = 100 +// Reserve 100 bytes for future use +const NODE_RESERVED_SIZE = 100 + +export const NODE_METADATA_SIZE = + NODE_TYPE_SIZE + + NODE_LINK_DEPTH_SIZE + + NODE_SIZE_SIZE + + NODE_NAME_SIZE + + NODE_RESERVED_SIZE + + NODE_UPLOAD_OPTIONS_SIZE + +export const DEFAULT_MAX_CHUNK_SIZE = DEFAULT_NODE_MAX_SIZE - NODE_METADATA_SIZE + +export const LINK_SIZE_IN_BYTES = 40 +export const DEFAULT_MAX_LINK_PER_NODE = Math.floor(DEFAULT_MAX_CHUNK_SIZE / LINK_SIZE_IN_BYTES) export const processFileToIPLDFormat = ( blockstore: BaseBlockstore, file: AwaitIterable, - totalSize: number, + totalSize: bigint, filename?: string, { - maxChunkSize = DEFAULT_MAX_CHUNK_SIZE, + maxNodeSize = DEFAULT_MAX_CHUNK_SIZE, maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, encryption = undefined, compression = undefined, }: Partial = { - maxChunkSize: DEFAULT_MAX_CHUNK_SIZE, + maxNodeSize: DEFAULT_MAX_CHUNK_SIZE, maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE, encryption: undefined, compression: undefined, }, ): Promise => { + if (filename && filename.length > MAX_NAME_SIZE) { + throw new Error(`Filename is too long: ${filename.length} > ${MAX_NAME_SIZE}`) + } + return processBufferToIPLDFormat(blockstore, file, filename, totalSize, fileBuilders, { - maxChunkSize, + maxNodeSize, maxLinkPerNode, encryption, compression, @@ -47,20 +77,24 @@ export const processFileToIPLDFormat = ( export const processMetadataToIPLDFormat = async ( blockstore: BaseBlockstore, metadata: OffchainMetadata, - limits: { maxChunkSize: number; maxLinkPerNode: number } = { - maxChunkSize: DEFAULT_MAX_CHUNK_SIZE, + limits: { maxNodeSize: number; maxLinkPerNode: number } = { + maxNodeSize: DEFAULT_MAX_CHUNK_SIZE, maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE, }, ): Promise => { - const buffer = Buffer.from(JSON.stringify(metadata)) - const name = `${metadata.name}.metadata.json` + if (metadata.name && metadata.name.length > MAX_NAME_SIZE) { + throw new Error(`Filename is too long: ${metadata.name.length} > ${MAX_NAME_SIZE}`) + } + + const buffer = Buffer.from(stringifyMetadata(metadata)) + return processBufferToIPLDFormat( blockstore, (async function* () { yield buffer })(), - name, - buffer.byteLength, + metadata.name, + BigInt(buffer.byteLength), metadataBuilders, limits, ) @@ -70,21 +104,25 @@ const processBufferToIPLDFormat = async ( blockstore: BaseBlockstore, buffer: AwaitIterable, filename: string | undefined, - totalSize: number, + totalSize: bigint, builders: Builders, { - maxChunkSize = DEFAULT_MAX_CHUNK_SIZE, + maxNodeSize: maxNodeSize = DEFAULT_MAX_CHUNK_SIZE, maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, encryption = undefined, compression = undefined, }: ChunkerOptions = { - maxChunkSize: DEFAULT_MAX_CHUNK_SIZE, + maxNodeSize: DEFAULT_MAX_CHUNK_SIZE, maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE, encryption: undefined, compression: undefined, }, ): Promise => { - const bufferChunks = chunkBuffer(buffer, { maxChunkSize: maxChunkSize }) + if (filename && filename.length > MAX_NAME_SIZE) { + throw new Error(`Filename is too long: ${filename.length} > ${MAX_NAME_SIZE}`) + } + + const bufferChunks = chunkBuffer(buffer, { maxChunkSize: maxNodeSize - NODE_METADATA_SIZE }) let CIDs: CID[] = [] for await (const chunk of bufferChunks) { @@ -96,7 +134,7 @@ const processBufferToIPLDFormat = async ( return processBufferToIPLDFormatFromChunks(blockstore, CIDs, filename, totalSize, builders, { maxLinkPerNode, - maxChunkSize, + maxNodeSize, encryption, compression, }) @@ -106,20 +144,24 @@ export const processBufferToIPLDFormatFromChunks = async ( blockstore: BaseBlockstore, chunks: AwaitIterable, filename: string | undefined, - totalSize: number, + totalSize: bigint, builders: Builders, { - maxChunkSize = DEFAULT_MAX_CHUNK_SIZE, + maxNodeSize: maxNodeSize = DEFAULT_MAX_CHUNK_SIZE, maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, encryption = undefined, compression = undefined, }: Partial = { - maxChunkSize: DEFAULT_MAX_CHUNK_SIZE, + maxNodeSize: DEFAULT_MAX_CHUNK_SIZE, maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE, encryption: undefined, compression: undefined, }, ): Promise => { + if (filename && filename.length > MAX_NAME_SIZE) { + throw new Error(`Filename is too long: ${filename.length} > ${MAX_NAME_SIZE}`) + } + let chunkCount = 0 let CIDs: CID[] = [] for await (const chunk of chunks) { @@ -147,7 +189,7 @@ export const processBufferToIPLDFormatFromChunks = async ( for (let i = 0; i < CIDs.length; i += maxLinkPerNode) { const chunk = CIDs.slice(i, i + maxLinkPerNode) - const node = builders.inlink(chunk, chunk.length, depth, maxChunkSize) + const node = builders.inlink(chunk, chunk.length, depth, maxNodeSize) const cid = cidOfNode(node) await blockstore.put(cid, encodeNode(node)) newCIDs.push(cid) @@ -155,7 +197,7 @@ export const processBufferToIPLDFormatFromChunks = async ( depth++ CIDs = newCIDs } - const head = builders.root(CIDs, totalSize, depth, filename, maxChunkSize, { + const head = builders.root(CIDs, totalSize, depth, filename, maxNodeSize, { compression, encryption, }) @@ -169,19 +211,23 @@ export const processFolderToIPLDFormat = async ( blockstore: BaseBlockstore, children: CID[], name: string, - size: number, + size: bigint, { maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, - maxChunkSize = DEFAULT_MAX_CHUNK_SIZE, + maxNodeSize: maxNodeSize = DEFAULT_MAX_CHUNK_SIZE, compression = undefined, encryption = undefined, }: Partial = { maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE, - maxChunkSize: DEFAULT_MAX_CHUNK_SIZE, + maxNodeSize: DEFAULT_MAX_CHUNK_SIZE, compression: undefined, encryption: undefined, }, ): Promise => { + if (name.length > MAX_NAME_SIZE) { + throw new Error(`Filename is too long: ${name.length} > ${MAX_NAME_SIZE}`) + } + let cids = children let depth = 0 while (cids.length > maxLinkPerNode) { @@ -197,7 +243,7 @@ export const processFolderToIPLDFormat = async ( depth++ } - const node = createFolderIpldNode(cids, name, depth, size, maxChunkSize, { + const node = createFolderIpldNode(cids, name, depth, size, maxNodeSize, { compression, encryption, }) @@ -215,12 +261,15 @@ export const processChunksToIPLDFormat = async ( blockstore: BaseBlockstore, chunks: AwaitIterable, builders: Builders, - { maxChunkSize = DEFAULT_MAX_CHUNK_SIZE }: { maxChunkSize?: number }, + { maxNodeSize = DEFAULT_MAX_CHUNK_SIZE }: { maxNodeSize?: number }, ): Promise => { - const bufferChunks = chunkBuffer(chunks, { maxChunkSize, ignoreLastChunk: false }) + const bufferChunks = chunkBuffer(chunks, { + maxChunkSize: maxNodeSize - NODE_METADATA_SIZE, + ignoreLastChunk: false, + }) for await (const chunk of bufferChunks) { - if (chunk.byteLength < maxChunkSize) { + if (chunk.byteLength < maxNodeSize) { return chunk } diff --git a/packages/auto-dag-data/src/ipld/nodes.ts b/packages/auto-dag-data/src/ipld/nodes.ts index cc7a5cf6..eb31e3c0 100644 --- a/packages/auto-dag-data/src/ipld/nodes.ts +++ b/packages/auto-dag-data/src/ipld/nodes.ts @@ -1,19 +1,26 @@ import { CID } from 'multiformats/cid' import { FileUploadOptions, OffchainMetadata } from '../metadata/index.js' import { encodeIPLDNodeData, MetadataType } from '../metadata/onchain/index.js' +import { stringifyMetadata } from '../utils/metadata.js' import { DEFAULT_MAX_CHUNK_SIZE, ensureNodeMaxSize } from './chunker.js' import { createNode, PBNode } from './index.js' /// Creates a file chunk ipld node -export const createFileChunkIpldNode = (data: Buffer): PBNode => - createNode( - encodeIPLDNodeData({ - type: MetadataType.FileChunk, - size: data.length, - linkDepth: 0, - data, - }), - [], +export const createFileChunkIpldNode = ( + data: Buffer, + maxNodeSize: number = DEFAULT_MAX_CHUNK_SIZE, +): PBNode => + ensureNodeMaxSize( + createNode( + encodeIPLDNodeData({ + type: MetadataType.FileChunk, + size: BigInt(data.length).valueOf(), + linkDepth: 0, + data, + }), + [], + ), + maxNodeSize, ) // Creates a file ipld node @@ -21,7 +28,7 @@ export const createFileChunkIpldNode = (data: Buffer): PBNode => // @todo: add the file's metadata export const createChunkedFileIpldNode = ( links: CID[], - size: number, + size: bigint, linkDepth: number, name?: string, maxNodeSize: number = DEFAULT_MAX_CHUNK_SIZE, @@ -52,7 +59,7 @@ export const createFileInlinkIpldNode = ( createNode( encodeIPLDNodeData({ type: MetadataType.FileInlink, - size, + size: BigInt(size).valueOf(), linkDepth, }), links.map((cid) => ({ Hash: cid })), @@ -67,17 +74,21 @@ export const createSingleFileIpldNode = ( data: Buffer, name?: string, uploadOptions?: FileUploadOptions, + maxNodeSize: number = DEFAULT_MAX_CHUNK_SIZE, ): PBNode => - createNode( - encodeIPLDNodeData({ - type: MetadataType.File, - name, - size: data.length, - linkDepth: 0, - data, - uploadOptions, - }), - [], + ensureNodeMaxSize( + createNode( + encodeIPLDNodeData({ + type: MetadataType.File, + name, + size: BigInt(data.length).valueOf(), + linkDepth: 0, + data, + uploadOptions, + }), + [], + ), + maxNodeSize, ) // Creates a file ipld node @@ -93,7 +104,7 @@ export const createMetadataInlinkIpldNode = ( createNode( encodeIPLDNodeData({ type: MetadataType.FileInlink, - size, + size: BigInt(size).valueOf(), linkDepth, }), links.map((cid) => ({ Hash: cid })), @@ -109,26 +120,32 @@ export const createSingleMetadataIpldNode = (data: Buffer, name?: string): PBNod encodeIPLDNodeData({ type: MetadataType.Metadata, name, - size: data.length, + size: BigInt(data.length).valueOf(), linkDepth: 0, data, }), [], ) -export const createMetadataChunkIpldNode = (data: Buffer): PBNode => - createNode( - encodeIPLDNodeData({ - type: MetadataType.MetadataChunk, - size: data.length, - linkDepth: 0, - data, - }), +export const createMetadataChunkIpldNode = ( + data: Buffer, + maxNodeSize: number = DEFAULT_MAX_CHUNK_SIZE, +): PBNode => + ensureNodeMaxSize( + createNode( + encodeIPLDNodeData({ + type: MetadataType.MetadataChunk, + size: BigInt(data.length).valueOf(), + linkDepth: 0, + data, + }), + ), + maxNodeSize, ) export const createChunkedMetadataIpldNode = ( links: CID[], - size: number, + size: bigint, linkDepth: number, name?: string, maxNodeSize: number = DEFAULT_MAX_CHUNK_SIZE, @@ -153,7 +170,7 @@ export const createFolderIpldNode = ( links: CID[], name: string, linkDepth: number, - size: number, + size: bigint, maxNodeSize: number = DEFAULT_MAX_CHUNK_SIZE, uploadOptions?: FileUploadOptions, ): PBNode => @@ -192,7 +209,7 @@ export const createMetadataNode = ( metadata: OffchainMetadata, maxNodeSize: number = DEFAULT_MAX_CHUNK_SIZE, ): PBNode => { - const data = Buffer.from(JSON.stringify(metadata)) + const data = Buffer.from(stringifyMetadata(metadata)) return ensureNodeMaxSize( createNode( diff --git a/packages/auto-dag-data/src/metadata/offchain/file.ts b/packages/auto-dag-data/src/metadata/offchain/file.ts index 1c9c7a49..641075fc 100644 --- a/packages/auto-dag-data/src/metadata/offchain/file.ts +++ b/packages/auto-dag-data/src/metadata/offchain/file.ts @@ -6,21 +6,21 @@ export type OffchainFileMetadata = { dataCid: string name?: string mimeType?: string - totalSize: number + totalSize: bigint totalChunks: number chunks: ChunkInfo[] uploadOptions?: FileUploadOptions } export interface ChunkInfo { - size: number + size: bigint cid: string } export const fileMetadata = ( headCID: CID, chunks: ChunkInfo[], - totalSize: number, + totalSize: bigint, name?: string | null, mimeType?: string | null, uploadOptions: FileUploadOptions = { diff --git a/packages/auto-dag-data/src/metadata/offchain/folder.ts b/packages/auto-dag-data/src/metadata/offchain/folder.ts index 495581d1..d2f0c38d 100644 --- a/packages/auto-dag-data/src/metadata/offchain/folder.ts +++ b/packages/auto-dag-data/src/metadata/offchain/folder.ts @@ -7,14 +7,14 @@ interface ChildrenMetadata { type: 'folder' | 'file' name?: string cid: string - totalSize: number + totalSize: bigint } export type OffchainFolderMetadata = { type: 'folder' dataCid: string name?: string - totalSize: number + totalSize: bigint totalFiles: number children: ChildrenMetadata[] uploadOptions: FileUploadOptions @@ -29,7 +29,7 @@ export const childrenMetadataFromNode = (node: PBNode): ChildrenMetadata => { return { type: ipldData.type === MetadataType.File ? 'file' : 'folder', cid: cidToString(cidOfNode(node)), - totalSize: ipldData.size ?? 0, + totalSize: ipldData.size ?? BigInt(0).valueOf(), name: ipldData.name, } } @@ -44,7 +44,7 @@ export const folderMetadata = ( return { dataCid: cid, - totalSize: children.reduce((acc, child) => acc + child.totalSize, 0), + totalSize: children.reduce((acc, child) => acc + child.totalSize, BigInt(0).valueOf()), totalFiles: children.length, children, type: 'folder', diff --git a/packages/auto-dag-data/src/metadata/onchain/protobuf/OnchainMetadata.proto b/packages/auto-dag-data/src/metadata/onchain/protobuf/OnchainMetadata.proto index 3a62fc38..e5796859 100644 --- a/packages/auto-dag-data/src/metadata/onchain/protobuf/OnchainMetadata.proto +++ b/packages/auto-dag-data/src/metadata/onchain/protobuf/OnchainMetadata.proto @@ -1,12 +1,13 @@ syntax = "proto3"; message IPLDNodeData { - MetadataType type = 1; - int32 linkDepth = 2; - optional int32 size = 3; - optional string name = 4; - optional bytes data = 5; - optional FileUploadOptions uploadOptions = 6; + MetadataType type = 1; // maxLength = 1 + int32 linkDepth = 2; // maxLength = 4 + optional int64 size = 3; // maxLength = 8 + optional string name = 4; // maxLength = 256 + optional bytes data = 5; // maxLength = XXX + optional FileUploadOptions uploadOptions = 6; // maxLength = 100 + /// Reserve 100 bytes for future use } // MetadataType defines the possible types of metadata. diff --git a/packages/auto-dag-data/src/metadata/onchain/protobuf/OnchainMetadata.ts b/packages/auto-dag-data/src/metadata/onchain/protobuf/OnchainMetadata.ts index 1fae439c..ce9d7e48 100644 --- a/packages/auto-dag-data/src/metadata/onchain/protobuf/OnchainMetadata.ts +++ b/packages/auto-dag-data/src/metadata/onchain/protobuf/OnchainMetadata.ts @@ -10,7 +10,7 @@ import type { Uint8ArrayList } from 'uint8arraylist' export interface IPLDNodeData { type: MetadataType linkDepth: number - size?: number + size?: bigint name?: string data?: Uint8Array uploadOptions?: FileUploadOptions @@ -38,7 +38,7 @@ export namespace IPLDNodeData { if (obj.size != null) { w.uint32(24) - w.int32(obj.size) + w.int64(obj.size) } if (obj.name != null) { @@ -80,7 +80,7 @@ export namespace IPLDNodeData { break } case 3: { - obj.size = reader.int32() + obj.size = reader.int64() break } case 4: { diff --git a/packages/auto-dag-data/src/utils/metadata.ts b/packages/auto-dag-data/src/utils/metadata.ts new file mode 100644 index 00000000..f5dc87b8 --- /dev/null +++ b/packages/auto-dag-data/src/utils/metadata.ts @@ -0,0 +1,6 @@ +import { OffchainMetadata } from '../metadata/index.js' + +export const stringifyMetadata = (metadata: OffchainMetadata): string => + JSON.stringify(metadata, (_, v) => + typeof v === 'bigint' || v instanceof BigInt ? v.toString() : v, + ) diff --git a/packages/auto-dag-data/tests/chunker.spec.ts b/packages/auto-dag-data/tests/chunker.spec.ts index 2bd8d844..2b45307f 100644 --- a/packages/auto-dag-data/tests/chunker.spec.ts +++ b/packages/auto-dag-data/tests/chunker.spec.ts @@ -1,12 +1,14 @@ import { BaseBlockstore, MemoryBlockstore } from 'blockstore-core' -import { cidOfNode, cidToString } from '../src' +import { cidOfNode, cidToString, createSingleFileIpldNode } from '../src' import { + LINK_SIZE_IN_BYTES, + NODE_METADATA_SIZE, processFileToIPLDFormat, processFolderToIPLDFormat, processMetadataToIPLDFormat, } from '../src/ipld/chunker' import { createNode, decodeNode, PBNode } from '../src/ipld/utils' -import { IPLDNodeData, MetadataType, OffchainMetadata } from '../src/metadata' +import { decodeIPLDNodeData, IPLDNodeData, MetadataType, OffchainMetadata } from '../src/metadata' describe('chunker', () => { describe('file creation', () => { @@ -16,7 +18,12 @@ describe('chunker', () => { const name = 'test.txt' const blockstore = new MemoryBlockstore() - await processFileToIPLDFormat(blockstore, bufferToIterable(Buffer.from(text)), size, name) + await processFileToIPLDFormat( + blockstore, + bufferToIterable(Buffer.from(text)), + BigInt(size), + name, + ) const nodes = await nodesFromBlockstore(blockstore) expect(nodes.length).toBe(1) @@ -31,7 +38,7 @@ describe('chunker', () => { expect(decoded.type).toBe(MetadataType.File) expect(Buffer.from(decoded.data ?? '').toString()).toBe(text) expect(decoded.linkDepth).toBe(0) - expect(decoded.size).toBe(text.length) + expect(decoded.size?.toString()).toBe(text.length.toString()) /// Check no links expect(node?.Links.length).toBe(0) @@ -52,10 +59,10 @@ describe('chunker', () => { const headCID = await processFileToIPLDFormat( blockstore, bufferToIterable(Buffer.from(text)), - size, + BigInt(size), name, { - maxChunkSize, + maxNodeSize: maxChunkSize + NODE_METADATA_SIZE, maxLinkPerNode: maxChunkSize / 64, }, ) @@ -72,7 +79,7 @@ describe('chunker', () => { expect(decoded.name).toBe(name) expect(decoded.type).toBe(MetadataType.File) expect(decoded.linkDepth).toBe(1) - expect(decoded.size).toBe(text.length) + expect(decoded.size!.toString()).toBe(text.length.toString()) nodes.forEach((node) => { if (cidToString(cidOfNode(node)) !== cidToString(headCID)) { @@ -82,9 +89,10 @@ describe('chunker', () => { }) it('create a file dag with inlinks', async () => { - const maxChunkSize = 1000 + const chunkLength = 1000 + const maxNodeSize = chunkLength + NODE_METADATA_SIZE const chunkNum = 10 - const chunk = 'h'.repeat(maxChunkSize) + const chunk = 'h'.repeat(chunkLength) const name = 'test.txt' const text = chunk.repeat(chunkNum) const size = text.length @@ -96,10 +104,10 @@ describe('chunker', () => { const headCID = await processFileToIPLDFormat( blockstore, bufferToIterable(Buffer.from(text)), - size, + BigInt(size), name, { - maxChunkSize, + maxNodeSize, maxLinkPerNode: 4, }, ) @@ -136,7 +144,7 @@ describe('chunker', () => { const name = 'folder' const size = 1000 const blockstore = new MemoryBlockstore() - const headCID = processFolderToIPLDFormat(blockstore, links, name, size, { + const headCID = processFolderToIPLDFormat(blockstore, links, name, BigInt(size), { maxLinkPerNode: 4, }) @@ -149,7 +157,7 @@ describe('chunker', () => { expect(decoded.name).toBe(name) expect(decoded.type).toBe(MetadataType.Folder) expect(decoded.linkDepth).toBe(0) - expect(decoded.size).toBe(size) + expect(decoded.size!.toString()).toBe(size.toString()) }) it('create a folder dag with inlinks', async () => { @@ -163,7 +171,7 @@ describe('chunker', () => { const EXPECTED_NODE_COUNT = 4 const blockstore = new MemoryBlockstore() - const headCID = processFolderToIPLDFormat(blockstore, links, name, size, { + const headCID = processFolderToIPLDFormat(blockstore, links, name, BigInt(size), { maxLinkPerNode: 4, }) @@ -195,7 +203,7 @@ describe('chunker', () => { dataCid: 'test', name: 'test', mimeType: 'text/plain', - totalSize: 1000, + totalSize: BigInt(1000), totalChunks: 10, chunks: [], } @@ -211,15 +219,15 @@ describe('chunker', () => { type: 'file', dataCid: 'test', name: 'test', - mimeType: 'text/plain'.repeat(100), - totalSize: 1000, + mimeType: 'text/plain'.repeat(1000), + totalSize: BigInt(10000), totalChunks: 10, chunks: [], } const blockstore = new MemoryBlockstore() const headCID = await processMetadataToIPLDFormat(blockstore, metadata, { - maxChunkSize: 200, + maxNodeSize: 2000, maxLinkPerNode: 2, }) const nodes = await nodesFromBlockstore(blockstore) @@ -235,13 +243,13 @@ describe('chunker', () => { const singleBufferCID = await processFileToIPLDFormat( blockstore, bufferToIterable(buffer), - buffer.length, + BigInt(buffer.length), 'test.txt', ) const chunkedBufferCID = await processFileToIPLDFormat( chunkedBlockstore, separateBufferToIterable(buffer, 5), - buffer.length, + BigInt(buffer.length), 'test.txt', ) @@ -255,19 +263,74 @@ describe('chunker', () => { const singleBufferCID = await processFileToIPLDFormat( blockstore, bufferToIterable(buffer), - buffer.length, + BigInt(buffer.length), 'test.txt', ) const chunkedBufferCID = await processFileToIPLDFormat( chunkedBlockstore, separateBufferToIterable(buffer, 5), - buffer.length, + BigInt(buffer.length), 'test.txt', ) expect(singleBufferCID).toEqual(chunkedBufferCID) }) }) + + describe('nodes sizes', () => { + it('file root node with inlinks', async () => { + const maxNodeSize = 1000 + const maxChunkSize = maxNodeSize - NODE_METADATA_SIZE + const maxLinkPerNode = Math.floor(maxChunkSize / LINK_SIZE_IN_BYTES) + const buffer = Buffer.from('h'.repeat(maxChunkSize).repeat(maxLinkPerNode ** 3)) + + const blockstore = new MemoryBlockstore() + + await processFileToIPLDFormat( + blockstore, + bufferToIterable(buffer), + BigInt(buffer.length), + 'test.txt', + { + maxNodeSize, + maxLinkPerNode, + }, + ) + + const nodes = await nodesFromBlockstore(blockstore) + + const inlinks = nodes.filter( + (node) => + IPLDNodeData.decode(node.Data ?? new Uint8Array()).type === MetadataType.FileInlink, + ) + inlinks.map((e) => e.Links.length).forEach((e) => expect(e).toBe(maxLinkPerNode)) + }) + + it('folder root node with inlinks', async () => { + const maxLinkPerNode = 4 + const maxNodeSize = maxLinkPerNode * LINK_SIZE_IN_BYTES + NODE_METADATA_SIZE + const links = Array.from({ length: 16 }, () => + cidOfNode(createNode(Buffer.from(Math.random().toString()))), + ) + + const blockstore = new MemoryBlockstore() + processFolderToIPLDFormat(blockstore, links, 'test', BigInt(1000), { + maxLinkPerNode, + maxNodeSize, + }) + + const nodes = await nodesFromBlockstore(blockstore) + for (const node of nodes) { + expect(node.Data?.length).toBeLessThanOrEqual(maxNodeSize) + } + + const inlinks = nodes.filter( + (node) => + IPLDNodeData.decode(node.Data ?? new Uint8Array()).type === MetadataType.FolderInlink, + ) + inlinks.map((e) => e.Links.length).forEach((e) => expect(e).toBe(maxLinkPerNode)) + }) + }) }) const bufferToIterable = (buffer: Buffer): AsyncIterable => { diff --git a/packages/auto-dag-data/tests/nodes.spec.ts b/packages/auto-dag-data/tests/nodes.spec.ts index 1844ce60..55411f73 100644 --- a/packages/auto-dag-data/tests/nodes.spec.ts +++ b/packages/auto-dag-data/tests/nodes.spec.ts @@ -4,7 +4,7 @@ import { createFileChunkIpldNode, createSingleFileIpldNode, } from '../src/index.js' -import { createNode } from '../src/ipld/index.js' +import { createNode, DEFAULT_MAX_CHUNK_SIZE } from '../src/ipld/index.js' import { IPLDNodeData, MetadataType } from '../src/metadata/onchain/protobuf/OnchainMetadata.js' describe('node creation', () => { @@ -15,7 +15,7 @@ describe('node creation', () => { const node = createSingleFileIpldNode(buffer, filename) const decoded = IPLDNodeData.decode(node.Data ?? new Uint8Array()) expect(decoded.name).toBe(filename) - expect(decoded.size).toBe(buffer.length) + expect(decoded.size!.toString()).toBe(buffer.length.toString()) expect(Buffer.from(decoded.data ?? '').toString()).toBe(buffer.toString()) }) @@ -25,23 +25,29 @@ describe('node creation', () => { const decoded = IPLDNodeData.decode(node.Data ?? new Uint8Array()) expect(decoded.type).toBe(MetadataType.File) expect(decoded.name).toBeUndefined() - expect(decoded.size).toBe(buffer.length) + expect(decoded.size!.toString()).toBe(buffer.length.toString()) expect(Buffer.from(decoded.data ?? '').toString()).toBe(buffer.toString()) }) + it('single file root node | buffer too large', () => { + const maxNodeSize = DEFAULT_MAX_CHUNK_SIZE + const buffer = Buffer.from('h'.repeat(maxNodeSize)) + expect(() => createSingleFileIpldNode(buffer, 'test.txt')).toThrow() + }) + it('chunked file root node | correctly params setup', () => { const links = Array.from({ length: 10 }, () => cidOfNode(createNode(Buffer.from(Math.random().toString()))), ) - const size = 1000 + const size = BigInt(1000) const linkDepth = 1 const filename = 'test.txt' - const node = createChunkedFileIpldNode(links, size, linkDepth, filename) + const node = createChunkedFileIpldNode(links, BigInt(size), linkDepth, filename) const decoded = IPLDNodeData.decode(node.Data ?? new Uint8Array()) expect(decoded.type).toBe(MetadataType.File) expect(decoded.name).toBe(filename) - expect(decoded.size).toBe(size) + expect(decoded.size!.toString()).toBe(size.toString()) expect(decoded.linkDepth).toBe(linkDepth) }) @@ -49,14 +55,14 @@ describe('node creation', () => { const links = Array.from({ length: 10 }, () => cidOfNode(createNode(Buffer.from(Math.random().toString()))), ) - const size = 1000 + const size = BigInt(1000) const linkDepth = 1 const node = createChunkedFileIpldNode(links, size, linkDepth) const decoded = IPLDNodeData.decode(node.Data ?? new Uint8Array()) expect(decoded.type).toBe(MetadataType.File) expect(decoded.name).toBeUndefined() - expect(decoded.size).toBe(size) + expect(decoded.size!.toString()).toBe(size.toString()) expect(decoded.linkDepth).toBe(linkDepth) }) @@ -67,7 +73,7 @@ describe('node creation', () => { const decoded = IPLDNodeData.decode(node.Data ?? new Uint8Array()) expect(decoded.type).toBe(MetadataType.FileChunk) expect(decoded.name).toBeUndefined() - expect(decoded.size).toBe(buffer.length) + expect(decoded.size!.toString()).toBe(buffer.length.toString()) expect(decoded.linkDepth).toBe(0) }) })