Skip to content

Commit

Permalink
Merge pull request #17 from DIG-Network/release/v0.0.1-alpha.17
Browse files Browse the repository at this point in the history
Release/v0.0.1 alpha.17
  • Loading branch information
MichaelTaylor3D authored Sep 16, 2024
2 parents d146d30 + 9b57b89 commit cf452ea
Show file tree
Hide file tree
Showing 5 changed files with 50 additions and 21 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,13 @@

All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.

### [0.0.1-alpha.17](https://github.com/DIG-Network/dig-chia-sdk/compare/v0.0.1-alpha.16...v0.0.1-alpha.17) (2024-09-16)


### Features

* add the ability to request chunks of data from integrity tree ([cd578a8](https://github.com/DIG-Network/dig-chia-sdk/commit/cd578a86e46aab9236999ed1f011a482f14870de))

### [0.0.1-alpha.16](https://github.com/DIG-Network/dig-chia-sdk/compare/v0.0.1-alpha.15...v0.0.1-alpha.16) (2024-09-10)


Expand Down
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@dignetwork/dig-sdk",
"version": "0.0.1-alpha.16",
"version": "0.0.1-alpha.17",
"description": "",
"type": "commonjs",
"main": "./dist/index.js",
Expand Down
32 changes: 29 additions & 3 deletions src/DataIntegrityTree/DataIntegrityTree.ts
Original file line number Diff line number Diff line change
Expand Up @@ -495,7 +495,12 @@ class DataIntegrityTree {
* @param rootHash - The root hash of the tree. Defaults to the latest root hash.
* @returns The readable stream for the file.
*/
getValueStream(hexKey: string, rootHash: string | null = null): Readable {
getValueStream(
hexKey: string,
rootHash: string | null = null,
byteOffset: number | null = null,
length: number | null = null
): Readable {
if (!isHexString(hexKey)) {
throw new Error("key must be a valid hex string");
}
Expand Down Expand Up @@ -526,8 +531,29 @@ class DataIntegrityTree {
throw new Error(`File at path ${filePath} does not exist`);
}

// Create a read stream and pipe it through a decompression stream using the same algorithm (gzip)
const readStream = fs.createReadStream(filePath);
const fileSize = fs.statSync(filePath).size;

// Validate offset and length
if (byteOffset !== null && length !== null) {
if (byteOffset + length > fileSize) {
throw new Error(
`Offset (${byteOffset}) and length (${length}) exceed the file size (${fileSize}).`
);
}
}

// Create the read stream with optional byte range
const options: { start?: number; end?: number } = {};

if (byteOffset !== null) {
options.start = byteOffset;
}

if (length !== null && byteOffset !== null) {
options.end = byteOffset + length - 1; // `end` is inclusive, hence `byteOffset + length - 1`
}

const readStream = fs.createReadStream(filePath, options);
const decompressStream = zlib.createGunzip();

// Return the combined stream as a generic Readable stream
Expand Down
26 changes: 11 additions & 15 deletions src/utils/directoryUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,27 +8,28 @@ const limitConcurrency = async (
concurrencyLimit: number,
tasks: (() => Promise<void>)[]
) => {
const results = [];
const executing: Promise<void>[] = [];

for (const task of tasks) {
const p = task();
results.push(p);

// Once the limit is reached, wait for one to complete
if (executing.length >= concurrencyLimit) {
await Promise.race(executing);
}

// Add the new task to the executing array
executing.push(p);

// When a task completes, remove it from the executing array
p.finally(() => executing.splice(executing.indexOf(p), 1));
const cleanup = p.finally(() => {
executing.splice(executing.indexOf(cleanup), 1);
});

// Once the limit is reached, wait for one to complete
if (executing.length >= concurrencyLimit) {
await new Promise<void>((resolve) => setTimeout(resolve, 100));
await Promise.race(executing);
}
}

// Wait for all remaining tasks to complete
return Promise.all(results);
return Promise.all(executing);
};

export const addDirectory = async (
Expand Down Expand Up @@ -70,16 +71,11 @@ export const addDirectory = async (
const stream = fs.createReadStream(filePath);
datalayer
.upsertKey(stream, Buffer.from(relativePath).toString("hex"))
.then(async () => {
await new Promise<void>((resolve) => setTimeout(resolve, 100));
resolve();
})
.then(() => resolve())
.catch(reject);
})
);
}

await new Promise<void>((resolve) => setTimeout(resolve, 100));
}

// Run tasks with limited concurrency (set the concurrency limit as needed)
Expand Down

0 comments on commit cf452ea

Please sign in to comment.