From 1369d0a1920dc516bf3de44b39aec7451a0a4917 Mon Sep 17 00:00:00 2001 From: Michael Taylor Date: Tue, 10 Sep 2024 09:55:52 -0400 Subject: [PATCH] feat: setup uploadHead and skipData on download --- scripts/release.sh | 3 ++ src/DigNetwork/DigNetwork.ts | 96 +++++++++++++++++++++++++++++------- src/blockchain/DataStore.ts | 48 +++++++++++++++++- tests/integration/test.js | 2 +- 4 files changed, 129 insertions(+), 20 deletions(-) diff --git a/scripts/release.sh b/scripts/release.sh index d34adb5..0c3e5dd 100644 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -13,6 +13,9 @@ if [ "$CURRENT_BRANCH" != "develop" ]; then exit 1 fi +# First run the build to make sure it compiles +npm run build + # Run standard-version for version bumping npx standard-version --prerelease alpha diff --git a/src/DigNetwork/DigNetwork.ts b/src/DigNetwork/DigNetwork.ts index c989dbb..0fd82ff 100644 --- a/src/DigNetwork/DigNetwork.ts +++ b/src/DigNetwork/DigNetwork.ts @@ -66,6 +66,59 @@ export class DigNetwork { return { generationIndex, lastLocalRootHash }; } + public async uploadStoreHead(digPeer: DigPeer): Promise { + // First make sure that the remote store is up to date. + const rootHistory = await this.dataStore.getRootHistory(); + const localManifestHashes = await this.dataStore.getManifestHashes(); + const remoteManifestFile = await digPeer.propagationServer.getStoreData( + "manifest.dat" + ); + + const remoteManifestHashes = remoteManifestFile.split("\n").filter(Boolean); + const onChainRootHashes = rootHistory.map((root) => root.root_hash); + + // Check that remote manifest is one behind on-chain root hashes + if (remoteManifestHashes.length !== onChainRootHashes.length - 1) { + throw new Error( + "Remote manifest should be one behind the on-chain root. Cannot push head." + ); + } + + // Compare each remote manifest hash with the corresponding on-chain root hash + for (let i = 0; i < remoteManifestHashes.length; i++) { + if (remoteManifestHashes[i] !== onChainRootHashes[i]) { + throw new Error( + `Remote manifest does not match on-chain root at index ${i}. Cannot push head.` + ); + } + } + + // Get the files for the latest local manifest hash + const filesToUpload = await this.dataStore.getFileSetForRootHash( + localManifestHashes[localManifestHashes.length - 1] + ); + + if (!filesToUpload.length) { + console.log("No files to upload."); + return; + } + + // Upload files to the remote peer with a progress bar + await this.runProgressBar( + filesToUpload.length, + "Store Data", + async (progress) => { + for (const filePath of filesToUpload) { + const relativePath = path + .relative(this.storeDir, filePath) + .replace(/\\/g, "/"); + await digPeer.propagationServer.pushFile(filePath, relativePath); + progress.increment(); + } + } + ); + } + // Uploads the store to a specific peer public async uploadStore(digPeer: DigPeer): Promise { const { generationIndex } = await this.uploadPreflight(digPeer); @@ -88,7 +141,6 @@ export class DigNetwork { "Store Data", async (progress) => { for (const filePath of filesToUpload) { - console.log(`Uploading ${filePath}...`); const relativePath = path .relative(this.storeDir, filePath) .replace(/\\/g, "/"); @@ -108,14 +160,17 @@ export class DigNetwork { } public static unsubscribeFromStore(storeId: string): void { - fs.rmdirSync(path.join(DIG_FOLDER_PATH, "stores", storeId), { recursive: true }); + fs.rmdirSync(path.join(DIG_FOLDER_PATH, "stores", storeId), { + recursive: true, + }); fs.unlinkSync(path.join(DIG_FOLDER_PATH, "stores", storeId + ".json")); } // Downloads files from the network based on the manifest public async downloadFiles( forceDownload: boolean = false, - renderProgressBar: boolean = true + renderProgressBar: boolean = true, + skipData: boolean = false ): Promise { try { const rootHistory = await this.dataStore.getRootHistory(); @@ -164,19 +219,21 @@ export class DigNetwork { if (datFileContent.root !== rootHash) throw new Error("Root hash mismatch"); - for (const file of Object.keys(datFileContent.files)) { - const filePath = getFilePathFromSha256( - datFileContent.files[file].sha256, - path.join(this.storeDir, "data") - ); - const isInDataDir = filePath.startsWith( - path.join(this.storeDir, "data") - ); - await this.downloadFileFromPeers( - getFilePathFromSha256(datFileContent.files[file].sha256, "data"), - filePath, - forceDownload || !isInDataDir - ); + if (!skipData) { + for (const file of Object.keys(datFileContent.files)) { + const filePath = getFilePathFromSha256( + datFileContent.files[file].sha256, + path.join(this.storeDir, "data") + ); + const isInDataDir = filePath.startsWith( + path.join(this.storeDir, "data") + ); + await this.downloadFileFromPeers( + getFilePathFromSha256(datFileContent.files[file].sha256, "data"), + filePath, + forceDownload || !isInDataDir + ); + } } if (localManifestHashes[i] !== rootHash) newRootHashes.push(rootHash); @@ -305,6 +362,8 @@ export class DigNetwork { task: (progress: any) => Promise ): Promise { // Using 'any' to work around TypeScript issues + const oldConsoleLog = console.log; + console.log = () => {}; // Suppress console.log output const multiBar = new MultiBar( { clearOnComplete: false, @@ -315,6 +374,9 @@ export class DigNetwork { Presets.shades_classic ); const progress = multiBar.create(total, 0, { name }); - await task(progress).finally(() => multiBar.stop()); + await task(progress).finally(() => { + multiBar.stop(); + console.log = oldConsoleLog; // Restore console.log + }); } } diff --git a/src/blockchain/DataStore.ts b/src/blockchain/DataStore.ts index 8b43244..985cd20 100644 --- a/src/blockchain/DataStore.ts +++ b/src/blockchain/DataStore.ts @@ -1,4 +1,4 @@ -import fs from "fs"; +import fs, { read } from "fs"; import path from "path"; import { writerDelegatedPuzzleFromKey, @@ -27,7 +27,7 @@ import { import { selectUnspentCoins, calculateFeeForCoinSpends } from "./coins"; import { RootHistoryItem, DatFile } from "../types"; import { validateFileSha256 } from "../utils"; - +import { getFilePathFromSha256 } from "../utils/hashUtils"; import { DataIntegrityTree, DataIntegrityTreeOptions, @@ -654,4 +654,48 @@ export class DataStore { return updateStoreResponse.newStore; } + + public async getFileSetForRootHash(rootHash: string): Promise { + const datFilePath = path.join(STORE_PATH, this.storeId, `${rootHash}.dat`); + const datFileContent = JSON.parse(fs.readFileSync(datFilePath, "utf-8")); + const heightDatFilePath = path.join( + STORE_PATH, + this.storeId, + "height.json" + ); + const manifestFilePath = path.join( + STORE_PATH, + this.storeId, + "manifest.dat" + ); + + const filesInvolved: string[] = []; + filesInvolved.push(manifestFilePath); + filesInvolved.push(datFilePath); + filesInvolved.push(heightDatFilePath); + + for (const [fileKey, fileData] of Object.entries(datFileContent.files)) { + const filepath = path.join(STORE_PATH, this.storeId, "data", fileKey); + + const filePath = getFilePathFromSha256( + datFileContent.files[fileKey].sha256, + path.join(STORE_PATH, this.storeId, "data") + ); + + filesInvolved.push(filePath); + } + + return filesInvolved; + } + + public getManifestHashes(): string[] { + const manifestFilePath = path.join( + STORE_PATH, + this.storeId, + "manifest.dat" + ); + return fs.existsSync(manifestFilePath) + ? fs.readFileSync(manifestFilePath, "utf-8").split("\n").filter(Boolean) + : []; + } } diff --git a/tests/integration/test.js b/tests/integration/test.js index ec22dba..101e9f9 100644 --- a/tests/integration/test.js +++ b/tests/integration/test.js @@ -27,7 +27,7 @@ const fs = __importStar(require("fs")); const path = __importStar(require("path")); const crypto = __importStar(require("crypto")); const os = __importStar(require("os")); -const DataIntegrityTree_1 = require("../../src/Data@dignetwork/data-integrity-tree"); +const DataIntegrityTree_1 = require("@dignetwork/data-integrity-tree"); /** * Calculate the SHA-256 hash of a buffer using the crypto module. * @param buffer - The buffer.