From 05efba5a0582871e39fca9de06865b4033f9961c Mon Sep 17 00:00:00 2001 From: Michael Taylor Date: Thu, 31 Oct 2024 08:48:24 -0400 Subject: [PATCH 1/3] feat: allow partial download sync of stores and picking back up --- src/DigNetwork/PropagationServer.ts | 81 +++++++++++------------------ 1 file changed, 31 insertions(+), 50 deletions(-) diff --git a/src/DigNetwork/PropagationServer.ts b/src/DigNetwork/PropagationServer.ts index 316f106..76e792f 100644 --- a/src/DigNetwork/PropagationServer.ts +++ b/src/DigNetwork/PropagationServer.ts @@ -832,7 +832,8 @@ export class PropagationServer { } /** - * Static function to handle downloading multiple files from a DataStore based on file paths. + * Downloads a store from the specified IP address, saving directly to the main directory, + * skipping files that already exist. */ static async downloadStore( storeId: string, @@ -844,7 +845,7 @@ export class PropagationServer { // Initialize wallet await propagationServer.initializeWallet(); - // Check if the store exists + // Verify the store and rootHash existence const { storeExists, rootHashExists } = await propagationServer.checkStoreExists(rootHash); if (!storeExists || !rootHashExists) { @@ -855,77 +856,57 @@ export class PropagationServer { const datFileContent = await propagationServer.fetchFile(`${rootHash}.dat`); const root = JSON.parse(datFileContent.toString()); - // Prepare download tasks - const downloadTasks = []; - - for (const [fileKey, fileData] of Object.entries(root.files)) { - const dataPath = getFilePathFromSha256( - root.files[fileKey].sha256, - "data" - ); - const label = Buffer.from(fileKey, "hex").toString("utf-8"); - downloadTasks.push({ label, dataPath }); - } - - // Limit the number of concurrent downloads - const concurrencyLimit = 10; // Adjust this number as needed - - // Create a temporary directory - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "downloadStore-")); + // Prepare download tasks, skipping existing files + const downloadTasks = Object.entries(root.files) + .map(([fileKey, fileData]: [any, any]) => { + const dataPath = getFilePathFromSha256(fileData.sha256, "data"); + const label = Buffer.from(fileKey, "hex").toString("utf-8"); + const destinationPath = path.join(STORE_PATH, storeId, dataPath); + return fs.existsSync(destinationPath) + ? null + : { label, dataPath: destinationPath }; + }) + .filter((task) => task !== null); + + // Limit concurrent downloads + const concurrencyLimit = 10; try { - // Download files to the temporary directory + // Download missing files directly to the main directory await asyncPool(concurrencyLimit, downloadTasks, async (task) => { await propagationServer.downloadFile( task.label, task.dataPath, rootHash, - tempDir + path.dirname(task.dataPath) ); }); - // Save the rootHash.dat file to the temporary directory - if (!fs.existsSync(tempDir)) { - fs.mkdirSync(tempDir, { recursive: true }); + // Save the rootHash.dat file if it doesn’t exist + const datFilePath = path.join(STORE_PATH, storeId, `${rootHash}.dat`); + if (!fs.existsSync(datFilePath)) { + fs.writeFileSync(datFilePath, datFileContent); } - fs.writeFileSync(path.join(tempDir, `${rootHash}.dat`), datFileContent); - - // Integrity check for the downloaded files was done during the download - // Here we want to make sure we got all the files or we reject the download session + // Verify all files are present for (const [fileKey, fileData] of Object.entries(root.files)) { - const dataPath = getFilePathFromSha256( - root.files[fileKey].sha256, - "data" - ); - - if (!fs.existsSync(path.join(tempDir, dataPath))) { - if (!fs.existsSync(path.join(STORE_PATH, storeId, dataPath))) { - throw new Error( - `Missing file: ${Buffer.from(fileKey, "hex")}, aborting session.` - ); - } + // @ts-ignore + const dataPath = getFilePathFromSha256(fileData.sha256, "data"); + if (!fs.existsSync(path.join(STORE_PATH, storeId, dataPath))) { + throw new Error( + `Missing file: ${Buffer.from(fileKey, "hex")}, aborting session.` + ); } } - // After all downloads are complete, copy from temp directory to the main directory - const destinationDir = path.join(STORE_PATH, storeId); - fsExtra.copySync(tempDir, destinationDir, { - overwrite: false, // Prevents overwriting existing files - errorOnExist: false, // No error if file already exists - }); - // Generate the manifest file in the main directory const dataStore = DataStore.from(storeId); await dataStore.cacheStoreCreationHeight(); await dataStore.generateManifestFile(); console.log(green(`✔ All files have been downloaded to ${storeId}.`)); - } catch (error) { + } catch (error: any) { console.log(red("✖ Error downloading files:"), error); - } finally { - // Clean up the temporary directory - fsExtra.removeSync(tempDir); } } } From 3a6d1f1f328880e4245b56a17b298902aa017331 Mon Sep 17 00:00:00 2001 From: Michael Taylor Date: Thu, 31 Oct 2024 08:52:15 -0400 Subject: [PATCH 2/3] feat: udi hex getters --- src/utils/Udi.ts | 219 ++++++++++++++++++++++++----------------------- 1 file changed, 114 insertions(+), 105 deletions(-) diff --git a/src/utils/Udi.ts b/src/utils/Udi.ts index cf662a0..6d8f438 100644 --- a/src/utils/Udi.ts +++ b/src/utils/Udi.ts @@ -3,139 +3,148 @@ import { createHash } from 'crypto'; import { encode as base32Encode, decode as base32Decode } from 'hi-base32'; // -// This class encapsulates the concept of a Universal Data Identifier (UDI) which is a +// This class encapsulates the concept of a Universal Data Identifier (UDI), which is a // standardized way to identify resources across the distributed DIG mesh network. -// The UDI is formatted as follows: -// urn:dig:chainName:storeId:rootHash/resourceKey -// The UDI can be used to uniquely identify resources across the DIG network. +// The UDI format: urn:dig:chainName:storeId:rootHash/resourceKey +// This allows unique resource identification across the DIG network. // class Udi { - readonly chainName: string; - readonly storeId: Buffer; - readonly rootHash: Buffer | null; - readonly resourceKey: string | null; - static readonly nid: string = "dig"; - static readonly namespace: string = `urn:${Udi.nid}`; - - constructor( - chainName: string, - storeId: string | Buffer, - rootHash: string | Buffer | null = null, - resourceKey: string | null = null - ) { - if (!storeId) { - throw new Error("storeId cannot be empty"); - } - this.chainName = chainName || "chia"; - this.storeId = Udi.convertToBuffer(storeId); - this.rootHash = rootHash ? Udi.convertToBuffer(rootHash) : null; - this.resourceKey = resourceKey; + readonly chainName: string; + private readonly _storeId: Buffer; + private readonly _rootHash: Buffer | null; + readonly resourceKey: string | null; + static readonly nid: string = "dig"; + static readonly namespace: string = `urn:${Udi.nid}`; + + constructor( + chainName: string, + storeId: string | Buffer, + rootHash: string | Buffer | null = null, + resourceKey: string | null = null + ) { + if (!storeId) { + throw new Error("storeId cannot be empty"); } - - static convertToBuffer(input: string | Buffer): Buffer { - if (Buffer.isBuffer(input)) { - return input; - } - - if (Udi.isHex(input)) { - return Buffer.from(input, 'hex'); - } - - if (Udi.isBase32(input)) { - return Buffer.from(base32Decode(input, false)); // Decode as UTF-8 - } - - throw new Error("Invalid input encoding. Must be 32-byte hex or Base32 string."); + this.chainName = chainName || "chia"; + this._storeId = Udi.convertToBuffer(storeId); + this._rootHash = rootHash ? Udi.convertToBuffer(rootHash) : null; + this.resourceKey = resourceKey; + } + + static convertToBuffer(input: string | Buffer): Buffer { + if (Buffer.isBuffer(input)) { + return input; } - static isHex(input: string): boolean { - return /^[a-fA-F0-9]{64}$/.test(input); + if (Udi.isHex(input)) { + return Buffer.from(input, 'hex'); } - static isBase32(input: string): boolean { - return /^[a-z2-7]{52}$/.test(input.toLowerCase()); + if (Udi.isBase32(input)) { + return Buffer.from(base32Decode(input, false)); // Decode as UTF-8 } - withRootHash(rootHash: string | Buffer | null): Udi { - return new Udi(this.chainName, this.storeId, rootHash, this.resourceKey); - } - - withResourceKey(resourceKey: string | null): Udi { - return new Udi(this.chainName, this.storeId, this.rootHash, resourceKey); - } + throw new Error("Invalid input encoding. Must be 32-byte hex or Base32 string."); + } - static fromUrn(urn: string): Udi { - const parsedUrn = urns.parseURN(urn); - if (parsedUrn.nid.toLowerCase() !== Udi.nid) { - throw new Error(`Invalid nid: ${parsedUrn.nid}`); - } + static isHex(input: string): boolean { + return /^[a-fA-F0-9]{64}$/.test(input); + } - const parts = parsedUrn.nss.split(':'); - if (parts.length < 2) { - throw new Error(`Invalid UDI format: ${parsedUrn.nss}`); - } + static isBase32(input: string): boolean { + return /^[a-z2-7]{52}$/.test(input.toLowerCase()); + } - const chainName = parts[0]; - const storeId = parts[1].split('/')[0]; + withRootHash(rootHash: string | Buffer | null): Udi { + return new Udi(this.chainName, this._storeId, rootHash, this.resourceKey); + } - let rootHash: string | null = null; - if (parts.length > 2) { - rootHash = parts[2].split('/')[0]; - } + withResourceKey(resourceKey: string | null): Udi { + return new Udi(this.chainName, this._storeId, this._rootHash, resourceKey); + } - const pathParts = parsedUrn.nss.split('/'); - let resourceKey: string | null = null; - if (pathParts.length > 1) { - resourceKey = pathParts.slice(1).join('/'); - } - - return new Udi(chainName, storeId, rootHash, resourceKey); + static fromUrn(urn: string): Udi { + const parsedUrn = urns.parseURN(urn); + if (parsedUrn.nid.toLowerCase() !== Udi.nid) { + throw new Error(`Invalid nid: ${parsedUrn.nid}`); } - toUrn(encoding: 'hex' | 'base32' = 'hex'): string { - const storeIdStr = this.bufferToString(this.storeId, encoding); - let urn = `${Udi.namespace}:${this.chainName}:${storeIdStr}`; - - if (this.rootHash) { - const rootHashStr = this.bufferToString(this.rootHash, encoding); - urn += `:${rootHashStr}`; - } + const parts = parsedUrn.nss.split(':'); + if (parts.length < 2) { + throw new Error(`Invalid UDI format: ${parsedUrn.nss}`); + } - if (this.resourceKey) { - urn += `/${this.resourceKey}`; - } + const chainName = parts[0]; + const storeId = parts[1].split('/')[0]; - return urn; + let rootHash: string | null = null; + if (parts.length > 2) { + rootHash = parts[2].split('/')[0]; } - bufferToString(buffer: Buffer, encoding: 'hex' | 'base32'): string { - return encoding === 'hex' - ? buffer.toString('hex') - : base32Encode(buffer).toLowerCase().replace(/=+$/, ''); + const pathParts = parsedUrn.nss.split('/'); + let resourceKey: string | null = null; + if (pathParts.length > 1) { + resourceKey = pathParts.slice(1).join('/'); } - equals(other: Udi): boolean { - return ( - this.storeId.equals(other.storeId) && - this.chainName === other.chainName && - (this.rootHash && other.rootHash ? this.rootHash.equals(other.rootHash) : this.rootHash === other.rootHash) && - this.resourceKey === other.resourceKey - ); - } + return new Udi(chainName, storeId, rootHash, resourceKey); + } - toString(): string { - return this.toUrn(); - } + toUrn(encoding: 'hex' | 'base32' = 'hex'): string { + const storeIdStr = this.bufferToString(this._storeId, encoding); + let urn = `${Udi.namespace}:${this.chainName}:${storeIdStr}`; - clone(): Udi { - return new Udi(this.chainName, this.storeId, this.rootHash, this.resourceKey); + if (this._rootHash) { + const rootHashStr = this.bufferToString(this._rootHash, encoding); + urn += `:${rootHashStr}`; } - hashCode(): string { - const hash = createHash('sha256'); - hash.update(this.toUrn()); - return hash.digest('hex'); + if (this.resourceKey) { + urn += `/${this.resourceKey}`; } + + return urn; + } + + bufferToString(buffer: Buffer, encoding: 'hex' | 'base32'): string { + return encoding === 'hex' + ? buffer.toString('hex') + : base32Encode(buffer).toLowerCase().replace(/=+$/, ''); + } + + equals(other: Udi): boolean { + return ( + this._storeId.equals(other._storeId) && + this.chainName === other.chainName && + (this._rootHash && other._rootHash ? this._rootHash.equals(other._rootHash) : this._rootHash === other._rootHash) && + this.resourceKey === other.resourceKey + ); + } + + toString(): string { + return this.toUrn(); + } + + clone(): Udi { + return new Udi(this.chainName, this._storeId, this._rootHash, this.resourceKey); + } + + hashCode(): string { + const hash = createHash('sha256'); + hash.update(this.toUrn()); + return hash.digest('hex'); + } + + // Getter for storeId as a hex string + get storeId(): string { + return this._storeId.toString('hex'); + } + + // Getter for rootHash as a hex string + get rootHash(): string | null { + return this._rootHash ? this._rootHash.toString('hex') : null; + } } export { Udi }; From 6ae760834794a50de02450593a6807df465bf055 Mon Sep 17 00:00:00 2001 From: Michael Taylor Date: Thu, 31 Oct 2024 08:53:26 -0400 Subject: [PATCH 3/3] chore(release): 0.0.1-alpha.178 --- CHANGELOG.md | 8 ++++++++ package-lock.json | 4 ++-- package.json | 2 +- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 167597b..439702a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,14 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [0.0.1-alpha.178](https://github.com/DIG-Network/dig-chia-sdk/compare/v0.0.1-alpha.177...v0.0.1-alpha.178) (2024-10-31) + + +### Features + +* allow partial download sync of stores and picking back up ([05efba5](https://github.com/DIG-Network/dig-chia-sdk/commit/05efba5a0582871e39fca9de06865b4033f9961c)) +* udi hex getters ([3a6d1f1](https://github.com/DIG-Network/dig-chia-sdk/commit/3a6d1f1f328880e4245b56a17b298902aa017331)) + ### [0.0.1-alpha.177](https://github.com/DIG-Network/dig-chia-sdk/compare/v0.0.1-alpha.176...v0.0.1-alpha.177) (2024-10-30) ### [0.0.1-alpha.176](https://github.com/DIG-Network/dig-chia-sdk/compare/v0.0.1-alpha.175...v0.0.1-alpha.176) (2024-10-30) diff --git a/package-lock.json b/package-lock.json index f18ec4d..c958ffa 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@dignetwork/dig-sdk", - "version": "0.0.1-alpha.177", + "version": "0.0.1-alpha.178", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@dignetwork/dig-sdk", - "version": "0.0.1-alpha.177", + "version": "0.0.1-alpha.178", "license": "ISC", "dependencies": { "@dignetwork/datalayer-driver": "^0.1.29", diff --git a/package.json b/package.json index 1218393..ea94ce7 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@dignetwork/dig-sdk", - "version": "0.0.1-alpha.177", + "version": "0.0.1-alpha.178", "description": "", "type": "commonjs", "main": "./dist/index.js",