diff --git a/.gitignore b/.gitignore index 6fb7c71..761607e 100644 --- a/.gitignore +++ b/.gitignore @@ -200,4 +200,5 @@ Cargo.lock @reflink sandbox +__reflink-tests-* *.json.bak \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index 50fc7ac..e098c85 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,11 +7,11 @@ version = "0.0.0" crate-type = ["cdylib"] [dependencies] +copy_on_write = "0.1.1" futures = "0.3.28" # Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix napi = { version = "2.12.2", default-features = false, features = ["napi4"] } napi-derive = "2.12.2" -reflink-copy = { version = "0.1.10" } [build-dependencies] napi-build = "2.0.1" diff --git a/__test__/main.spec.ts b/__test__/main.spec.ts index eaaa31f..4c87c2a 100644 --- a/__test__/main.spec.ts +++ b/__test__/main.spec.ts @@ -1,79 +1,91 @@ -import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; +import { afterAll, describe, expect, it } from 'vitest'; import { join, resolve } from 'path'; -import { reflinkFileSync, reflinkFile } from '../index.js'; import { mkdir, rm, writeFile } from 'fs/promises'; import { readFileSync } from 'fs'; import { randomUUID, createHash } from 'crypto'; +import { rimraf } from 'rimraf'; +import { reflinkFileSync, reflinkFile } from '../index.js'; -const sandboxDir = join(process.cwd(), `__reflink-tests-${randomUUID()}`); +const sandboxDir = () => join(process.cwd(), `__reflink-tests-${randomUUID()}`); const sandboxFiles = [ { - path: join(sandboxDir, 'file1.txt'), + path: 'file1.txt', content: 'Hello World!', sha: createHash('sha256').update('Hello World!').digest('hex'), }, { - path: join(sandboxDir, 'file2.txt'), + path: 'file2.txt', content: 'Hello World!', sha: createHash('sha256').update('Hello World!').digest('hex'), }, { - path: join(sandboxDir, 'file3.txt'), + path: 'file3.txt', content: 'Hello World!', sha: createHash('sha256').update('Hello World!').digest('hex'), }, ]; -describe('reflink', () => { - beforeAll(async () => { - await mkdir(sandboxDir, { recursive: true }); - }); +const sandboxDirectories: string[] = []; - afterAll(async () => { - await rm(sandboxDir, { recursive: true, force: true }); - }); +async function prepare(dir: string) { + await mkdir(dir, { recursive: true }); + + sandboxDirectories.push(dir); - beforeEach(async () => { - // remove the sandbox directory and recreate it - await rm(sandboxDir, { recursive: true, force: true }); - await mkdir(sandboxDir, { recursive: true }); + return Promise.all( + sandboxFiles.map(async (file) => { + await writeFile(join(dir, file.path), file.content); + return { + ...file, + path: join(dir, file.path), + }; + }) + ); +} - // create the files again +describe('reflink', () => { + afterAll(async () => { await Promise.all( - sandboxFiles.map(async (file) => { - await writeFile(file.path, file.content); + sandboxDirectories.map(async (dir) => { + await rimraf(dir).catch(() => {}); }) ); }); - it('should correctly clone a file (sync)', () => { - const file = sandboxFiles[0]; + it('should correctly clone a file (sync)', async () => { + const dir = sandboxDir(); + const files = await prepare(dir); + const file = files[0]; - reflinkFileSync(file.path, join(sandboxDir, 'file1-copy.txt')); + reflinkFileSync(file.path, join(dir, 'file1-copy.txt')); - const content = readFileSync(join(sandboxDir, 'file1-copy.txt'), 'utf-8'); + const content = readFileSync(join(dir, 'file1-copy.txt'), 'utf-8'); expect(content).toBe(file.content); }); it('should correctly clone a file (async)', async () => { - const file = sandboxFiles[0]; + const dir = sandboxDir(); + const files = await prepare(dir); + const file = files[0]; - await reflinkFile(file.path, join(sandboxDir, 'file1-copy.txt')); + await reflinkFile(file.path, join(dir, 'file1-copy.txt')); - const content = readFileSync(join(sandboxDir, 'file1-copy.txt'), 'utf-8'); + const content = readFileSync(join(dir, 'file1-copy.txt'), 'utf-8'); expect(content).toBe(file.content); }); it('should keep the same content in source file after editing the cloned file', async () => { - const file = sandboxFiles[0]; + const dir = sandboxDir(); + const files = await prepare(dir); + const file = files[0]; - await reflinkFile(file.path, join(sandboxDir, 'file1-copy.txt')); + await reflinkFile(file.path, join(dir, 'file1-copy.txt')); await writeFile( - join(sandboxDir, 'file1-copy.txt'), + join(dir, 'file1-copy.txt'), file.content + '\nAdded content!' ); @@ -82,65 +94,84 @@ describe('reflink', () => { expect(originalContent).toBe(file.content); }); - it('should fail if the source file does not exist (sync)', () => { + it('should fail if the source file does not exist (sync)', async () => { + const dir = sandboxDir(); + await prepare(dir); + expect(() => { reflinkFileSync( - join(sandboxDir, 'file-does-not-exist.txt'), - join(sandboxDir, 'file1-copy.txt') + join(dir, 'file-does-not-exist.txt'), + join(dir, 'file1-copy.txt') ); }).toThrow(); }); it('should fail if the source file does not exist (async)', async () => { + const dir = sandboxDir(); + await prepare(dir); + await expect( reflinkFile( - join(sandboxDir, 'file-does-not-exist.txt'), - join(sandboxDir, 'file1-copy.txt') + join(dir, 'file-does-not-exist.txt'), + join(dir, 'file1-copy.txt') ) ).rejects.toThrow(); }); - it('should fail if the destination file already exists (sync)', () => { + it('should fail if the destination file already exists (sync)', async () => { + const dir = sandboxDir(); + const sandboxFiles = await prepare(dir); + expect(() => { reflinkFileSync(sandboxFiles[0].path, sandboxFiles[1].path); }).toThrow(); }); it('should fail if the destination file already exists (async)', async () => { + const dir = sandboxDir(); + const sandboxFiles = await prepare(dir); await expect( reflinkFile(sandboxFiles[0].path, sandboxFiles[1].path) ).rejects.toThrow(); }); - it('should fail if the source file is a directory (sync)', () => { + it('should fail if the source file is a directory (sync)', async () => { + const dir = sandboxDir(); + const sandboxFiles = await prepare(dir); expect(() => { - reflinkFileSync(sandboxDir, sandboxFiles[1].path); + reflinkFileSync(dir, sandboxFiles[1].path); }).toThrow(); }); it('should fail if the source file is a directory (async)', async () => { - await expect( - reflinkFile(sandboxDir, sandboxFiles[1].path) - ).rejects.toThrow(); + const dir = sandboxDir(); + const sandboxFiles = await prepare(dir); + await expect(reflinkFile(dir, sandboxFiles[1].path)).rejects.toThrow(); }); - it('should fail if the source and destination files are the same (sync)', () => { + it('should fail if the source and destination files are the same (sync)', async () => { + const dir = sandboxDir(); + const sandboxFiles = await prepare(dir); expect(() => { reflinkFileSync(sandboxFiles[0].path, sandboxFiles[0].path); }).toThrow(); }); it('should fail if the source and destination files are the same (async)', async () => { + const dir = sandboxDir(); + const sandboxFiles = await prepare(dir); await expect( reflinkFile(sandboxFiles[0].path, sandboxFiles[0].path) ).rejects.toThrow(); }); - it('should fail if the destination parent directory does not exist (sync)', () => { + it('should fail if the destination parent directory does not exist (sync)', async () => { + const dir = sandboxDir(); + const sandboxFiles = await prepare(dir); expect(() => { reflinkFileSync( sandboxFiles[0].path, - join(sandboxDir, 'does-not-exist', 'file1-copy.txt') + join(dir, 'does-not-exist', 'file1-copy.txt') ); }).toThrow(); }); @@ -190,8 +221,11 @@ describe('reflink', () => { }); it('should correctly clone 1000 files (sync)', async () => { + const dir = sandboxDir(); + const sandboxFiles = await prepare(dir); + const files = Array.from({ length: 1000 }, (_, i) => ({ - path: join(sandboxDir, `file${i}.txt`), + path: join(dir, `file${i}.txt`), content: 'Hello World!', })); @@ -201,22 +235,21 @@ describe('reflink', () => { await Promise.all( files.map(async (file, i) => - reflinkFileSync(file.path, join(sandboxDir, `file${i}-copy.txt`)) + reflinkFileSync(file.path, join(dir, `file${i}-copy.txt`)) ) ); files.forEach((file, i) => { - const content = readFileSync( - join(sandboxDir, `file${i}-copy.txt`), - 'utf-8' - ); + const content = readFileSync(join(dir, `file${i}-copy.txt`), 'utf-8'); expect(content).toBe(file.content); }); }); it('should correctly clone 1000 files (async)', async () => { + const dir = sandboxDir(); + const sandboxFiles = await prepare(dir); const files = Array.from({ length: 1000 }, (_, i) => ({ - path: join(sandboxDir, `file${i}.txt`), + path: join(dir, `file${i}.txt`), content: 'Hello World!', hash: createHash('sha256').update('Hello World!').digest('hex'), })); @@ -227,15 +260,12 @@ describe('reflink', () => { await Promise.all( files.map(async (file, i) => - reflinkFile(file.path, join(sandboxDir, `file${i}-copy.txt`)) + reflinkFile(file.path, join(dir, `file${i}-copy.txt`)) ) ); files.forEach((file, i) => { - const content = readFileSync( - join(sandboxDir, `file${i}-copy.txt`), - 'utf-8' - ); + const content = readFileSync(join(dir, `file${i}-copy.txt`), 'utf-8'); const hash = createHash('sha256').update(content).digest('hex'); expect(content).toBe(file.content); expect(hash).toBe(file.hash); @@ -243,13 +273,15 @@ describe('reflink', () => { }); it('should keep the same hash when cloning a file more than 3,000 times', async () => { + const dir = sandboxDir(); + const sandboxFiles = await prepare(dir); const srcFile = { path: resolve('./package.json'), content: readFileSync(join('./package.json'), 'utf-8'), }; const destFiles = Array.from({ length: 3_000 }, (_, i) => ({ - path: join(sandboxDir, `file1-copy-${i}.txt`), + path: join(dir, `file1-copy-${i}.txt`), hash: createHash('sha256').update(srcFile.content).digest('hex'), })); @@ -276,13 +308,15 @@ describe('reflink', () => { }); it('should clone "sample.pyc" file correctly (sync)', async () => { + const dir = sandboxDir(); + const sandboxFiles = await prepare(dir); const srcFile = { path: resolve(join('fixtures', 'sample.pyc')), content: readFileSync(join('fixtures', 'sample.pyc')), }; const destFile = { - path: join(sandboxDir, 'sample.pyc'), + path: join(dir, 'sample.pyc'), hash: createHash('sha256').update(srcFile.content).digest('hex'), }; @@ -299,13 +333,15 @@ describe('reflink', () => { * The issue with empty cloned files doesnt seem related to ASCII characters */ it.skip('should clone "ascii-file.js" file correctly (sync)', async () => { + const dir = sandboxDir(); + const sandboxFiles = await prepare(dir); const srcFile = { path: resolve(join('fixtures', 'ascii-file.js')), content: readFileSync(join('fixtures', 'ascii-file.js')), }; const destFile = { - path: join(sandboxDir, 'ascii-file.js'), + path: join(dir, 'ascii-file.js'), hash: createHash('sha256').update(srcFile.content).digest('hex'), }; @@ -325,13 +361,15 @@ describe('reflink', () => { }); it('should clone "sample.pyc" file correctly (async)', async () => { + const dir = sandboxDir(); + const sandboxFiles = await prepare(dir); const srcFile = { path: resolve(join('fixtures', 'sample.pyc')), content: readFileSync(join('fixtures', 'sample.pyc')), }; const destFile = { - path: join(sandboxDir, 'sample.pyc'), + path: join(dir, 'sample.pyc'), hash: createHash('sha256').update(srcFile.content).digest('hex'), }; diff --git a/__test__/threads.spec.ts b/__test__/threads.spec.ts index 9c9e848..0d1f690 100644 --- a/__test__/threads.spec.ts +++ b/__test__/threads.spec.ts @@ -65,7 +65,7 @@ describe('reflink worker', () => { } }); - it('clone the same file to different location simultaneously (sync)', async () => { + it('clone the same file to different location simultaneously (async)', async () => { const src = { path: join(process.cwd(), 'fixtures', 'ascii-file.js'), content: readFileSync(join(process.cwd(), 'fixtures', 'ascii-file.js')), diff --git a/infinite_clone_test.mjs b/infinite_clone_test.mjs index 4a861b2..040762e 100644 --- a/infinite_clone_test.mjs +++ b/infinite_clone_test.mjs @@ -31,7 +31,6 @@ async function main() { for (let i = 0; i < 1000; i++) { const destPath = path.join('./sandbox', `file1-copy-${i}.txt`); - // Assume reflinkFile is your function that performs the file cloning operation await reflinkFile(srcFile.path, destPath); const destContent = await fs.readFile(destPath, 'utf-8'); diff --git a/package.json b/package.json index 4c474d1..1d01d2b 100644 --- a/package.json +++ b/package.json @@ -40,7 +40,7 @@ "build": "napi build --platform --release", "build:debug": "napi build --platform", "prepublishOnly": "napi prepublish -t npm", - "pretest": "yarn build", + "pretest": "pnpm build", "test": "cargo t && vitest", "bench": "node benchmark.mjs", "universal": "napi universal", diff --git a/src/lib.rs b/src/lib.rs index de22532..053f774 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -2,14 +2,12 @@ #[macro_use] extern crate napi_derive; - +use copy_on_write::reflink_file_sync; use napi::{bindgen_prelude::AsyncTask, Env, Error, JsNumber, Result, Task}; -use std::path::PathBuf; -use reflink_copy; pub struct AsyncReflink { - src: PathBuf, - dst: PathBuf, + src: String, + dst: String, } #[napi] @@ -18,15 +16,15 @@ impl Task for AsyncReflink { type JsValue = JsNumber; fn compute(&mut self) -> Result { - match reflink_copy::reflink(&self.src, &self.dst) { + match reflink_file_sync(&self.src, &self.dst) { Ok(_) => { Ok(()) }, Err(err) => return Err(Error::from_reason(format!( "{}, reflink '{}' -> '{}'", err.to_string(), - self.src.display(), - self.dst.display() + self.src, + self.dst ))), } } @@ -39,42 +37,40 @@ impl Task for AsyncReflink { // Async version #[napi(js_name = "reflinkFile")] pub fn reflink_task(src: String, dst: String) -> AsyncTask { - let src_path = PathBuf::from(src); - let dst_path = PathBuf::from(dst); - AsyncTask::new(AsyncReflink { src: src_path, dst: dst_path }) + AsyncTask::new(AsyncReflink { src, dst }) } // Sync version #[napi(js_name = "reflinkFileSync")] pub fn reflink_sync(env: Env, src: String, dst: String) -> Result { - let src_path = PathBuf::from(src); - let dst_path = PathBuf::from(dst); - match reflink_copy::reflink(&src_path, &dst_path) { + match reflink_file_sync(&src, &dst) { Ok(_) => Ok(env.create_int32(0)?), Err(err) => Err(Error::from_reason(format!( "{}, reflink '{}' -> '{}'", err.to_string(), - src_path.display(), - dst_path.display() + src, + dst ))), } } #[test] pub fn test_pyc_file() { - let src = std::path::Path::new("fixtures/sample.pyc"); - let dst = std::path::Path::new("fixtures/sample.pyc.reflink"); + let src = "fixtures/sample.pyc"; + let dst = "fixtures/sample.pyc.reflink"; + + let dst_path = std::path::Path::new(dst); // Remove the destination file if it already exists - if dst.exists() { + if dst_path.try_exists().unwrap() { std::fs::remove_file(&dst).unwrap(); } // Run the reflink operation - let result = reflink_copy::reflink(&src, &dst); + let result = reflink_file_sync(src, dst); assert!(result.is_ok()); - println!("Reflinked '{}' -> '{}'", src.display(), dst.display()); + println!("Reflinked {src:?} -> {dst:?}"); // Further validation: compare the contents of both files to make sure they are identical let src_contents = std::fs::read(&src).expect("Failed to read source file");