Skip to content

Commit

Permalink
Merge pull request #166 from DIG-Network/release/v0.0.1-alpha.178
Browse files Browse the repository at this point in the history
Release/v0.0.1 alpha.178
  • Loading branch information
MichaelTaylor3D authored Oct 31, 2024
2 parents cb6db26 + 6ae7608 commit 7fe3a87
Show file tree
Hide file tree
Showing 5 changed files with 156 additions and 158 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,14 @@

All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.

### [0.0.1-alpha.178](https://github.com/DIG-Network/dig-chia-sdk/compare/v0.0.1-alpha.177...v0.0.1-alpha.178) (2024-10-31)


### Features

* allow partial download sync of stores and picking back up ([05efba5](https://github.com/DIG-Network/dig-chia-sdk/commit/05efba5a0582871e39fca9de06865b4033f9961c))
* udi hex getters ([3a6d1f1](https://github.com/DIG-Network/dig-chia-sdk/commit/3a6d1f1f328880e4245b56a17b298902aa017331))

### [0.0.1-alpha.177](https://github.com/DIG-Network/dig-chia-sdk/compare/v0.0.1-alpha.176...v0.0.1-alpha.177) (2024-10-30)

### [0.0.1-alpha.176](https://github.com/DIG-Network/dig-chia-sdk/compare/v0.0.1-alpha.175...v0.0.1-alpha.176) (2024-10-30)
Expand Down
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@dignetwork/dig-sdk",
"version": "0.0.1-alpha.177",
"version": "0.0.1-alpha.178",
"description": "",
"type": "commonjs",
"main": "./dist/index.js",
Expand Down
81 changes: 31 additions & 50 deletions src/DigNetwork/PropagationServer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -832,7 +832,8 @@ export class PropagationServer {
}

/**
* Static function to handle downloading multiple files from a DataStore based on file paths.
* Downloads a store from the specified IP address, saving directly to the main directory,
* skipping files that already exist.
*/
static async downloadStore(
storeId: string,
Expand All @@ -844,7 +845,7 @@ export class PropagationServer {
// Initialize wallet
await propagationServer.initializeWallet();

// Check if the store exists
// Verify the store and rootHash existence
const { storeExists, rootHashExists } =
await propagationServer.checkStoreExists(rootHash);
if (!storeExists || !rootHashExists) {
Expand All @@ -855,77 +856,57 @@ export class PropagationServer {
const datFileContent = await propagationServer.fetchFile(`${rootHash}.dat`);
const root = JSON.parse(datFileContent.toString());

// Prepare download tasks
const downloadTasks = [];

for (const [fileKey, fileData] of Object.entries(root.files)) {
const dataPath = getFilePathFromSha256(
root.files[fileKey].sha256,
"data"
);
const label = Buffer.from(fileKey, "hex").toString("utf-8");
downloadTasks.push({ label, dataPath });
}

// Limit the number of concurrent downloads
const concurrencyLimit = 10; // Adjust this number as needed

// Create a temporary directory
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "downloadStore-"));
// Prepare download tasks, skipping existing files
const downloadTasks = Object.entries(root.files)
.map(([fileKey, fileData]: [any, any]) => {
const dataPath = getFilePathFromSha256(fileData.sha256, "data");
const label = Buffer.from(fileKey, "hex").toString("utf-8");
const destinationPath = path.join(STORE_PATH, storeId, dataPath);
return fs.existsSync(destinationPath)
? null
: { label, dataPath: destinationPath };
})
.filter((task) => task !== null);

// Limit concurrent downloads
const concurrencyLimit = 10;

try {
// Download files to the temporary directory
// Download missing files directly to the main directory
await asyncPool(concurrencyLimit, downloadTasks, async (task) => {
await propagationServer.downloadFile(
task.label,
task.dataPath,
rootHash,
tempDir
path.dirname(task.dataPath)
);
});

// Save the rootHash.dat file to the temporary directory
if (!fs.existsSync(tempDir)) {
fs.mkdirSync(tempDir, { recursive: true });
// Save the rootHash.dat file if it doesn’t exist
const datFilePath = path.join(STORE_PATH, storeId, `${rootHash}.dat`);
if (!fs.existsSync(datFilePath)) {
fs.writeFileSync(datFilePath, datFileContent);
}

fs.writeFileSync(path.join(tempDir, `${rootHash}.dat`), datFileContent);

// Integrity check for the downloaded files was done during the download
// Here we want to make sure we got all the files or we reject the download session
// Verify all files are present
for (const [fileKey, fileData] of Object.entries(root.files)) {
const dataPath = getFilePathFromSha256(
root.files[fileKey].sha256,
"data"
);

if (!fs.existsSync(path.join(tempDir, dataPath))) {
if (!fs.existsSync(path.join(STORE_PATH, storeId, dataPath))) {
throw new Error(
`Missing file: ${Buffer.from(fileKey, "hex")}, aborting session.`
);
}
// @ts-ignore
const dataPath = getFilePathFromSha256(fileData.sha256, "data");
if (!fs.existsSync(path.join(STORE_PATH, storeId, dataPath))) {
throw new Error(
`Missing file: ${Buffer.from(fileKey, "hex")}, aborting session.`
);
}
}

// After all downloads are complete, copy from temp directory to the main directory
const destinationDir = path.join(STORE_PATH, storeId);
fsExtra.copySync(tempDir, destinationDir, {
overwrite: false, // Prevents overwriting existing files
errorOnExist: false, // No error if file already exists
});

// Generate the manifest file in the main directory
const dataStore = DataStore.from(storeId);
await dataStore.cacheStoreCreationHeight();
await dataStore.generateManifestFile();

console.log(green(`✔ All files have been downloaded to ${storeId}.`));
} catch (error) {
} catch (error: any) {
console.log(red("✖ Error downloading files:"), error);
} finally {
// Clean up the temporary directory
fsExtra.removeSync(tempDir);
}
}
}
Loading

0 comments on commit 7fe3a87

Please sign in to comment.