Skip to content

Commit

Permalink
Merge pull request #177 from autonomys/test/retrievability
Browse files Browse the repository at this point in the history
Add more testing to `@autonomys/auto-dag-data`
  • Loading branch information
clostao authored Nov 19, 2024
2 parents c2dff28 + 9bc7dae commit 4b2dde9
Show file tree
Hide file tree
Showing 19 changed files with 611 additions and 24 deletions.
2 changes: 1 addition & 1 deletion lerna.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "1.0.7",
"version": "1.0.8",
"npmClient": "yarn"
}
4 changes: 2 additions & 2 deletions packages/auto-consensus/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@autonomys/auto-consensus",
"version": "1.0.7",
"version": "1.0.8",
"license": "MIT",
"main": "dist/index.js",
"scripts": {
Expand Down Expand Up @@ -32,5 +32,5 @@
"ts-jest": "^29.1.4",
"typescript": "^5.4.5"
},
"gitHead": "2ee93dd0867c16e0a42f2f47b2fa66bc41ba29cf"
"gitHead": "88b1b90467db7dd1e301387451ed727189f9808c"
}
1 change: 1 addition & 0 deletions packages/auto-dag-data/jest.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,5 @@ module.exports = {
},
],
},
coveragePathIgnorePatterns: ['./src/metadata/onchain/protobuf'],
}
4 changes: 2 additions & 2 deletions packages/auto-dag-data/package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "@autonomys/auto-dag-data",
"packageManager": "[email protected]",
"version": "1.0.7",
"version": "1.0.8",
"license": "MIT",
"main": "dist/index.js",
"repository": {
Expand Down Expand Up @@ -48,5 +48,5 @@
"protons": "^7.6.0",
"protons-runtime": "^5.5.0"
},
"gitHead": "2ee93dd0867c16e0a42f2f47b2fa66bc41ba29cf"
"gitHead": "88b1b90467db7dd1e301387451ed727189f9808c"
}
2 changes: 1 addition & 1 deletion packages/auto-dag-data/src/compression/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ export async function* decompressFile(
compressedFile: AwaitIterable<Buffer>,
{
chunkSize = COMPRESSION_CHUNK_SIZE,
algorithm = CompressionAlgorithm.ZLIB,
algorithm,
level = 9,
}: PickPartial<CompressionOptions, 'algorithm'>,
): AsyncIterable<Buffer> {
Expand Down
5 changes: 3 additions & 2 deletions packages/auto-dag-data/src/encryption/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { Crypto } from '@peculiar/webcrypto'
import { randomBytes } from 'crypto'
import { AwaitIterable } from 'interface-store'
import { EncryptionAlgorithm, EncryptionOptions } from '../metadata/index.js'
import { asyncByChunk } from '../utils/async.js'
import type { PickPartial } from '../utils/types.js'
Expand Down Expand Up @@ -41,7 +42,7 @@ export const getKeyFromPassword = async ({ password, salt }: PasswordGenerationO
}

export const encryptFile = async function* (
file: AsyncIterable<Buffer>,
file: AwaitIterable<Buffer>,
password: string,
{ chunkSize = ENCRYPTING_CHUNK_SIZE, algorithm }: PickPartial<EncryptionOptions, 'algorithm'>,
): AsyncIterable<Buffer> {
Expand All @@ -62,7 +63,7 @@ export const encryptFile = async function* (
}

export const decryptFile = async function* (
file: AsyncIterable<Buffer>,
file: AwaitIterable<Buffer>,
password: string,
{ chunkSize = ENCRYPTED_CHUNK_SIZE, algorithm }: PickPartial<EncryptionOptions, 'algorithm'>,
): AsyncIterable<Buffer> {
Expand Down
2 changes: 1 addition & 1 deletion packages/auto-dag-data/src/ipld/chunker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ const NODE_LINK_DEPTH_SIZE = 4
// u64 -> 8 bytes
const NODE_SIZE_SIZE = 8
// Limit at 255 string length (Mac Limit)
const MAX_NAME_SIZE = 255
export const MAX_NAME_SIZE = 255
const END_OF_STRING_BYTE = 1
const NODE_NAME_SIZE = MAX_NAME_SIZE + END_OF_STRING_BYTE
// Upload options may be amplified in the future
Expand Down
1 change: 1 addition & 0 deletions packages/auto-dag-data/src/ipld/nodes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -218,6 +218,7 @@ export const createMetadataNode = (
name: metadata.name,
linkDepth: 0,
data,
size: BigInt(data.length).valueOf(),
}),
),
maxNodeSize,
Expand Down
137 changes: 133 additions & 4 deletions packages/auto-dag-data/tests/chunker.spec.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,24 @@
import { BaseBlockstore, MemoryBlockstore } from 'blockstore-core'
import { cidOfNode, cidToString, createSingleFileIpldNode } from '../src'
import {
cidOfNode,
cidToString,
createFileChunkIpldNode,
createSingleFileIpldNode,
fileBuilders,
} from '../src'
import {
DEFAULT_MAX_CHUNK_SIZE,
LINK_SIZE_IN_BYTES,
MAX_NAME_SIZE,
NODE_METADATA_SIZE,
processBufferToIPLDFormatFromChunks,
processChunksToIPLDFormat,
processFileToIPLDFormat,
processFolderToIPLDFormat,
processMetadataToIPLDFormat,
} from '../src/ipld/chunker'
import { createNode, decodeNode, PBNode } from '../src/ipld/utils'
import { decodeIPLDNodeData, IPLDNodeData, MetadataType, OffchainMetadata } from '../src/metadata'
import { createNode, decodeNode, encodeNode, PBNode } from '../src/ipld/utils'
import { fileMetadata, IPLDNodeData, MetadataType, OffchainMetadata } from '../src/metadata'

describe('chunker', () => {
describe('file creation', () => {
Expand Down Expand Up @@ -88,6 +98,22 @@ describe('chunker', () => {
})
})

it('create a file with long name should throw an error', async () => {
const name = 'a'.repeat(MAX_NAME_SIZE + 1)
const blockstore = new MemoryBlockstore()
expect(() =>
processFileToIPLDFormat(blockstore, [Buffer.from('hello')], BigInt(5), name),
).toThrow(`Filename is too long: ${name.length} > ${MAX_NAME_SIZE}`)
})

it('create a file with long name from buffer should throw an error', async () => {
const name = 'a'.repeat(MAX_NAME_SIZE + 1)
const blockstore = new MemoryBlockstore()
await expect(
processBufferToIPLDFormatFromChunks(blockstore, [], name, BigInt(5), fileBuilders),
).rejects.toThrow(`Filename is too long: ${name.length} > ${MAX_NAME_SIZE}`)
})

it('create a file dag with inlinks', async () => {
const chunkLength = 1000
const maxNodeSize = chunkLength + NODE_METADATA_SIZE
Expand Down Expand Up @@ -194,6 +220,89 @@ describe('chunker', () => {
expect(rootCount).toBe(1)
expect(inlinkCount).toBe(3)
})

it('create a folder with long name should throw an error', async () => {
const name = 'a'.repeat(MAX_NAME_SIZE + 1)
const blockstore = new MemoryBlockstore()
await expect(processFolderToIPLDFormat(blockstore, [], name, BigInt(1000))).rejects.toThrow(
`Filename is too long: ${name.length} > ${MAX_NAME_SIZE}`,
)
})
})

describe('asyncronous file creation', () => {
it('process chunks to IPLD format should return the leftover buffer', async () => {
const filename = 'test.txt'
const chunkSize = DEFAULT_MAX_CHUNK_SIZE
const chunksCount = 1.5
const buffer = Buffer.from(
Array.from({ length: chunkSize * chunksCount })
.map(() => Math.floor(Math.random() * 16).toString(16))
.join(''),
)

const leftoverSize = buffer.length % chunkSize
const blockstore = new MemoryBlockstore()
const leftover = await processChunksToIPLDFormat(blockstore, [buffer], fileBuilders)
expect(leftover.length).toBe(leftoverSize)
})

it('process chunks with exact chunk size len(leftover)=0', async () => {
const filename = 'test.txt'
const chunkSize = DEFAULT_MAX_CHUNK_SIZE
const chunksCount = 4
const buffer = Buffer.from(
Array.from({ length: chunkSize * chunksCount })
.map(() => Math.floor(Math.random() * 16).toString(16))
.join(''),
)

const blockstore = new MemoryBlockstore()
const leftover = await processChunksToIPLDFormat(blockstore, [buffer], fileBuilders)

expect(leftover.length).toBe(0)
})

it('process file by chunks', async () => {
const filename = 'test.txt'
const chunkSize = DEFAULT_MAX_CHUNK_SIZE
const chunksCount = 4.5
const buffer = Buffer.from(
Array.from({ length: chunkSize * chunksCount })
.map(() => Math.floor(Math.random() * 16).toString(16))
.join(''),
)

const blockstore = new MemoryBlockstore()
const leftover = await processChunksToIPLDFormat(blockstore, [buffer], fileBuilders)
const leftoverCid = createFileChunkIpldNode(leftover)
await blockstore.put(cidOfNode(leftoverCid), encodeNode(leftoverCid))

const mapCIDs = (async function* () {
for await (const { cid } of blockstore.getAll()) {
yield cid
}
})()

const headCID = await processBufferToIPLDFormatFromChunks(
blockstore,
mapCIDs,
filename,
BigInt(buffer.length),
fileBuilders,
)

const headNode = decodeNode(await blockstore.get(headCID))
expect(headNode?.Links.length).toBe(Math.ceil(chunksCount))
expect(cidToString(headNode?.Links[headNode.Links.length - 1].Hash)).toEqual(
cidToString(cidOfNode(leftoverCid)),
)
const ipldMetadata = IPLDNodeData.decode(headNode?.Data ?? new Uint8Array())
expect(ipldMetadata.name).toBe(filename)
expect(ipldMetadata.type).toBe(MetadataType.File)
expect(ipldMetadata.linkDepth).toBe(1)
expect(ipldMetadata.size!.toString()).toBe(buffer.length.toString())
})
})

describe('metadata creation', () => {
Expand All @@ -209,11 +318,31 @@ describe('chunker', () => {
}

const blockstore = new MemoryBlockstore()
const headCID = await processMetadataToIPLDFormat(blockstore, metadata)
await processMetadataToIPLDFormat(blockstore, metadata)
const nodes = await nodesFromBlockstore(blockstore)
expect(nodes.length).toBe(1)
})

it('create a metadata dag with long name should throw an error', async () => {
const name = 'a'.repeat(MAX_NAME_SIZE + 1)
const metadata = fileMetadata(
cidOfNode(createNode(Buffer.from(Math.random().toString()))),
[
{
cid: cidToString(cidOfNode(createNode(Buffer.from(Math.random().toString())))),
size: BigInt(1000),
},
],
BigInt(1000),
name,
)

const blockstore = new MemoryBlockstore()
await expect(processMetadataToIPLDFormat(blockstore, metadata)).rejects.toThrow(
`Filename is too long: ${name.length} > ${MAX_NAME_SIZE}`,
)
})

it('large metadata dag represented into multiple nodes', async () => {
const metadata: OffchainMetadata = {
type: 'file',
Expand Down
81 changes: 81 additions & 0 deletions packages/auto-dag-data/tests/compression.spec.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
import { AwaitIterable } from 'interface-store'
import { compressFile, COMPRESSION_CHUNK_SIZE, CompressionAlgorithm, decompressFile } from '../src'

const awaitIterable = async (it: AwaitIterable<Buffer>) => {
for await (const _ of it);
}

describe('compression', () => {
it('compresses and decompresses a file with default options', async () => {
const file = Buffer.from('hello'.repeat(1000))
Expand Down Expand Up @@ -55,4 +60,80 @@ describe('compression', () => {

expect(decompressedBuffer.toString()).toBe(file.toString())
})

it('asynchronously iterates over the compressed file for chunked compression', async () => {
const chunkSize = COMPRESSION_CHUNK_SIZE
const chunks = 5
const chunk = Buffer.from('hello'.repeat(chunkSize))
const compressed = compressFile(
(async function* () {
for (let i = 0; i < chunks; i++) {
yield chunk
await new Promise((resolve) => setTimeout(resolve, 50))
}
})(),
{
level: 9,
algorithm: CompressionAlgorithm.ZLIB,
},
)

await awaitIterable(compressed)
}, 10_000)

it('throws an error if the compression algorithm is not supported', async () => {
await expect(
awaitIterable(compressFile([Buffer.from('hello')], { algorithm: 'efwhhgfew' as any })),
).rejects.toThrow('Unsupported compression algorithm')
})

it('throws an error if the compression level is invalid', async () => {
await expect(
awaitIterable(
compressFile([Buffer.from('hello')], {
algorithm: CompressionAlgorithm.ZLIB,
level: -1 as any,
}),
),
).rejects.toThrow('Invalid compression level')
})

it('throws an error if the chunk size is invalid', async () => {
await expect(
awaitIterable(
compressFile([Buffer.from('hello')], {
algorithm: CompressionAlgorithm.ZLIB,
chunkSize: 0,
}),
),
).rejects.toThrow('Invalid chunk size')
})

it('throws an error if the decompression algorithm is not supported', async () => {
await expect(
awaitIterable(decompressFile([Buffer.from('hello')], { algorithm: 'efwhhgfew' as any })),
).rejects.toThrow('Unsupported compression algorithm')
})

it('throws an error if the decompression chunk size is invalid', async () => {
await expect(
awaitIterable(
decompressFile([Buffer.from('hello')], {
chunkSize: 0,
algorithm: CompressionAlgorithm.ZLIB,
}),
),
).rejects.toThrow('Invalid chunk size')
})

it('throws an error if the compression level is invalid', async () => {
await expect(
awaitIterable(
decompressFile([Buffer.from('hello')], {
level: -1 as any,
algorithm: CompressionAlgorithm.ZLIB,
}),
),
).rejects.toThrow('Invalid compression level')
})
})
21 changes: 21 additions & 0 deletions packages/auto-dag-data/tests/encryption.spec.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
import { AwaitIterable } from 'interface-store'
import { decryptFile, encryptFile, EncryptionAlgorithm } from '../src'

const awaitIterable = async (it: AwaitIterable<Buffer>) => {
for await (const _ of it);
}

describe('encryption', () => {
it('encrypts and decrypts a file with default chunk size', async () => {
const chunk = 'hello'
Expand Down Expand Up @@ -101,4 +106,20 @@ describe('encryption', () => {
decryptedBuffer = Buffer.concat([decryptedBuffer, chunk])
}
})

it('throws an error if the encryption algorithm is not supported', async () => {
await expect(
awaitIterable(
encryptFile([Buffer.from('hello')], 'password', { algorithm: 'efwhhgfew' as any }),
),
).rejects.toThrow('Unsupported encryption algorithm')
})

it('throws an error if the decryption algorithm is not supported', async () => {
await expect(
awaitIterable(
decryptFile([Buffer.from('hello')], 'password', { algorithm: 'efwhhgfew' as any }),
),
).rejects.toThrow('Unsupported encryption algorithm')
})
})
Loading

0 comments on commit 4b2dde9

Please sign in to comment.