diff --git a/.github/workflows/build_cli_checks.yml b/.github/workflows/build_cli_checks.yml new file mode 100644 index 0000000000..3804f7dd20 --- /dev/null +++ b/.github/workflows/build_cli_checks.yml @@ -0,0 +1,59 @@ +# This action runs the checks for pull requests using the build CLI Tool. +name: Build CLI Tool Checks + +# Using the tool in GitHub Actions stills work in progress and will be invoked manually +# until it's ready to replace the current rake workflows. +on: + workflow_dispatch: + +jobs: + all_lint: + name: Run all lints using CLI tool + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Install Latest Rust + run: | + rustup update --no-self-update ${{ env.RUST_CHANNEL }} + rustup default ${{ env.RUST_CHANNEL }} + - name: Rust Cache + uses: Swatinem/rust-cache@v2.7.3 + - name: Install wasm-pack + run: cargo install wasm-pack + - name: Install nj-cli + run: cargo install nj-cli + - name: Install Build CLI tool + run: cargo install --path=cli + - name: libudev-dev + run: sudo apt-get install -y libudev-dev + - name: enable corepack for yarnpkg upgrade + run: corepack enable + - name: Run Lints + run: cargo chipmunk lint -r + all_test: + name: Run all tests using CLI tool + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Install Latest Rust + run: | + rustup update --no-self-update ${{ env.RUST_CHANNEL }} + rustup default ${{ env.RUST_CHANNEL }} + - name: Rust Cache + uses: Swatinem/rust-cache@v2.7.3 + - name: Install wasm-pack + run: cargo install wasm-pack + - name: Install nj-cli + run: cargo install nj-cli + - name: Install Build CLI tool + run: cargo install --path=cli + - name: libudev-dev + run: sudo apt-get install -y libudev-dev + - name: enable corepack for yarnpkg upgrade + run: | + npm install tslib + corepack enable + - name: Run Tests + run: cargo chipmunk test -r diff --git a/.gitignore b/.gitignore index 407a6234eb..b6b34d256e 100644 --- a/.gitignore +++ b/.gitignore @@ -71,4 +71,8 @@ application/sandbox/serial/render/lib/views/dialog/port.available sandbox sandbox.backup **/holder/release -scripts/tools/file_checklists/*.* \ No newline at end of file +scripts/tools/file_checklists/*.* + +# CLI build tool files # +######################## +.build_chksum_* diff --git a/application/apps/indexer/sources/src/command/process.rs b/application/apps/indexer/sources/src/command/process.rs index 9ba6a102f6..5edf14246e 100644 --- a/application/apps/indexer/sources/src/command/process.rs +++ b/application/apps/indexer/sources/src/command/process.rs @@ -14,9 +14,6 @@ use tokio::{ use tokio_stream::StreamExt; use tokio_util::codec::{self, FramedRead, LinesCodec}; -#[cfg(windows)] -use std::os::windows::process::CommandExt; - lazy_static! { static ref GROUP_RE: Regex = Regex::new(r#"".*?""#).expect("Regex must compile (fail with GROUP_RE)"); diff --git a/application/apps/indexer/sources/src/serial/serialport.rs b/application/apps/indexer/sources/src/serial/serialport.rs index 38a51d4811..8c82830fba 100644 --- a/application/apps/indexer/sources/src/serial/serialport.rs +++ b/application/apps/indexer/sources/src/serial/serialport.rs @@ -103,6 +103,8 @@ impl SerialSource { .stop_bits(stop_bits(&config.stop_bits)) .open_native_async() { + // We get warning on windows because `port` doesn't need to be mutated there + #[cfg_attr(windows, allow(unused_mut))] Ok(mut port) => { #[cfg(unix)] if let Err(err) = port.set_exclusive(config.exclusive) { diff --git a/application/apps/rustcore/ts-bindings/package.json b/application/apps/rustcore/ts-bindings/package.json index b11d5e50ad..89aefa3a52 100644 --- a/application/apps/rustcore/ts-bindings/package.json +++ b/application/apps/rustcore/ts-bindings/package.json @@ -6,6 +6,7 @@ "types": "./dist/index.d.ts", "scripts": { "build": "node_modules/.bin/tsc -p tsconfig.json", + "prod": "node_modules/.bin/tsc -p tsconfig.json", "test_cancel": "node_modules/.bin/electron node_modules/jasmine-ts/lib/index.js ./spec/session.cancel.spec.ts", "lint": "node_modules/.bin/eslint . --ext .ts --max-warnings=0", "check": "node_modules/.bin/tsc -p tsconfig.json --noemit" diff --git a/application/apps/rustcore/wasm-bindings/package.json b/application/apps/rustcore/wasm-bindings/package.json index fcfb879dc7..2c6eb90ae2 100644 --- a/application/apps/rustcore/wasm-bindings/package.json +++ b/application/apps/rustcore/wasm-bindings/package.json @@ -5,7 +5,8 @@ "scripts": { "build": "node_modules/.bin/webpack", "start": "node_modules/.bin/webpack-dev-server", - "test": "node_modules/.bin/karma start" + "test": "node_modules/.bin/karma start", + "test_win": "node_modules\\.bin\\karma start" }, "author": "", "license": "MIT", diff --git a/application/holder/package.json b/application/holder/package.json index ea8dc5597a..a4d2b29e83 100644 --- a/application/holder/package.json +++ b/application/holder/package.json @@ -60,6 +60,7 @@ "electron": "npm run build && ./node_modules/.bin/electron --inspect ./dist/app.js", "electron-win": "node_modules/.bin/electron --inspect ./dist/app.js", "build": "node_modules/.bin/tsc -p tsconfig.json", + "prod": "node_modules/.bin/tsc -p tsconfig.json", "start": "npm run build-ts && npm run electron", "postinstall": "electron-builder install-app-deps", "build-darwin-arm64": "node_modules/.bin/electron-builder --mac --dir --config=./electron.config.darwin.arm64.json", diff --git a/application/platform/package.json b/application/platform/package.json index a84643169f..19bec3edb6 100644 --- a/application/platform/package.json +++ b/application/platform/package.json @@ -37,6 +37,7 @@ }, "scripts": { "build": "node_modules/.bin/tsc -p tsconfig.json", + "prod": "node_modules/.bin/tsc -p tsconfig.json", "lint": "node_modules/.bin/eslint . --ext .ts --max-warnings=0 ", "check": "node_modules/.bin/tsc -p tsconfig.json --noemit" }, diff --git a/cli/.gitignore b/cli/.gitignore new file mode 100644 index 0000000000..e9a83dd15c --- /dev/null +++ b/cli/.gitignore @@ -0,0 +1,6 @@ +/target +**/*.rs.bk +*.out +lineMetadata.json +vim-markdown-preview.html +.DS_Store diff --git a/cli/Cargo.lock b/cli/Cargo.lock new file mode 100644 index 0000000000..699374ab12 --- /dev/null +++ b/cli/Cargo.lock @@ -0,0 +1,1257 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "anstream" +version = "0.6.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" + +[[package]] +name = "anstyle-parse" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" +dependencies = [ + "anstyle", + "windows-sys", +] + +[[package]] +name = "anyhow" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" + +[[package]] +name = "arrayref" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d151e35f61089500b617991b791fc8bfd237ae50cd5950803758a179b41e67a" + +[[package]] +name = "arrayvec" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" + +[[package]] +name = "autocfg" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" + +[[package]] +name = "backtrace" +version = "0.3.73" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "bitflags" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" + +[[package]] +name = "blake3" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9ec96fe9a81b5e365f9db71fe00edc4fe4ca2cc7dcb7861f0603012a7caa210" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq", + "rayon-core", +] + +[[package]] +name = "bstr" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "bytes" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a12916984aab3fa6e39d655a33e09c0071eb36d6ab3aea5c2d78551f1df6d952" + +[[package]] +name = "cargo-chipmunk" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", + "clap_complete", + "console", + "dir_checksum", + "fs_extra", + "futures", + "futures-lite", + "git2", + "indicatif", + "tempdir", + "tokio", + "which", +] + +[[package]] +name = "cc" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26a5c3fd7bfa1ce3897a3a3501d362b2d87b7f2583ebcb4a949ec25911025cbc" +dependencies = [ + "jobserver", + "libc", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "clap" +version = "4.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35723e6a11662c2afb578bcf0b88bf6ea8e21282a953428f240574fcc3a2b5b3" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49eb96cbfa7cfa35017b7cd548c75b14c3118c98b423041d70562665e07fb0fa" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_complete" +version = "4.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6ae69fbb0833c6fcd5a8d4b8609f108c7ad95fc11e248d853ff2c42a90df26a" +dependencies = [ + "clap", +] + +[[package]] +name = "clap_derive" +version = "4.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d029b67f89d30bbb547c89fd5161293c0aec155fc691d7924b64550662db93e" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clap_lex" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" + +[[package]] +name = "colorchoice" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" + +[[package]] +name = "console" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "unicode-width", + "windows-sys", +] + +[[package]] +name = "constant_time_eq" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" + +[[package]] +name = "crossbeam-deque" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" + +[[package]] +name = "dir_checksum" +version = "0.1.0" +dependencies = [ + "anyhow", + "blake3", + "ignore", + "memmap2", + "rayon", + "tempdir", + "thiserror", +] + +[[package]] +name = "either" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" + +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + +[[package]] +name = "errno" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "fastrand" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" +dependencies = [ + "instant", +] + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-lite" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "memchr", + "parking", + "pin-project-lite", + "waker-fn", +] + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "gimli" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" + +[[package]] +name = "git2" +version = "0.18.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "232e6a7bfe35766bf715e55a88b39a700596c0ccfd88cd3680b4cdb40d66ef70" +dependencies = [ + "bitflags", + "libc", + "libgit2-sys", + "log", + "openssl-probe", + "openssl-sys", + "url", +] + +[[package]] +name = "globset" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" +dependencies = [ + "aho-corasick", + "bstr", + "log", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "home" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "ignore" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1" +dependencies = [ + "crossbeam-deque", + "globset", + "log", + "memchr", + "regex-automata", + "same-file", + "walkdir", + "winapi-util", +] + +[[package]] +name = "indicatif" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" +dependencies = [ + "console", + "instant", + "number_prefix", + "portable-atomic", + "unicode-width", +] + +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + +[[package]] +name = "jobserver" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" +dependencies = [ + "libc", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.155" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" + +[[package]] +name = "libgit2-sys" +version = "0.16.2+1.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee4126d8b4ee5c9d9ea891dd875cfdc1e9d0950437179104b183d7d8a74d24e8" +dependencies = [ + "cc", + "libc", + "libssh2-sys", + "libz-sys", + "openssl-sys", + "pkg-config", +] + +[[package]] +name = "libssh2-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dc8a030b787e2119a731f1951d6a773e2280c660f8ec4b0f5e1505a386e71ee" +dependencies = [ + "cc", + "libc", + "libz-sys", + "openssl-sys", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "libz-sys" +version = "1.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c15da26e5af7e25c90b37a2d75cdbf940cf4a55316de9d84c679c9b8bfabf82e" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linux-raw-sys" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "memmap2" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe751422e4a8caa417e13c3ea66452215d7d63e19e604f4980461212f3ae1322" +dependencies = [ + "libc", +] + +[[package]] +name = "miniz_oxide" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4569e456d394deccd22ce1c1913e6ea0e54519f577285001215d33557431afe4" +dependencies = [ + "hermit-abi", + "libc", + "wasi", + "windows-sys", +] + +[[package]] +name = "number_prefix" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" + +[[package]] +name = "object" +version = "0.36.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f203fa8daa7bb185f760ae12bd8e097f63d17041dcdcaf675ac54cdf863170e" +dependencies = [ + "memchr", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "parking" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project-lite" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" + +[[package]] +name = "portable-atomic" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da544ee218f0d287a911e9c99a39a8c9bc8fcad3cb8db5959940044ecfc67265" + +[[package]] +name = "proc-macro2" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rayon" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redox_syscall" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" +dependencies = [ + "bitflags", +] + +[[package]] +name = "regex-automata" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustix" +version = "0.38.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys", +] + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "serde" +version = "1.0.204" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.204" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "socket2" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "syn" +version = "2.0.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tempdir" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8" +dependencies = [ + "rand", + "remove_dir_all", +] + +[[package]] +name = "thiserror" +version = "1.0.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tinyvec" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.39.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daa4fb1bc778bd6f04cbfc4bb2d06a7396a8f299dc33ea1900cedaa316f467b1" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys", +] + +[[package]] +name = "tokio-macros" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-normalization" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-width" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" + +[[package]] +name = "url" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "waker-fn" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "317211a0dc0ceedd78fb2ca9a44aed3d7b9b26f81870d485c07122b4350673b7" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "which" +version = "6.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d9c5ed668ee1f17edb3b627225343d210006a90bb1e3745ce1f30b1fb115075" +dependencies = [ + "either", + "home", + "rustix", + "winsafe", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "winsafe" +version = "0.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" diff --git a/cli/Cargo.toml b/cli/Cargo.toml new file mode 100644 index 0000000000..c45618efe3 --- /dev/null +++ b/cli/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "cargo-chipmunk" +version = "0.1.0" +edition = "2021" +description = "CLI Tool for chipmunk application development" + +[workspace] +members = ["dir_checksum"] + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[workspace.dependencies] +tempdir = "0.3" +anyhow = "1.0.80" + +[dependencies] +anyhow.workspace = true +clap = { version = "4.4.4", features = ["derive"] } +console = "0.15.7" +fs_extra = "1.3.0" +futures = "0.3.28" +futures-lite = "1.13.0" +git2 = "0.18.2" +indicatif = "0.17.7" +tokio = { version = "1.36.0", features = ["full"] } +dir_checksum = { path = "./dir_checksum" } +which = "6.0" +clap_complete = "4.5" + +[dev-dependencies] +tempdir.workspace = true + +[[test]] +name = "dir_checksum" +path = "dir_checksum/tests/integration_tests.rs" diff --git a/cli/README.md b/cli/README.md new file mode 100644 index 0000000000..64233821a4 --- /dev/null +++ b/cli/README.md @@ -0,0 +1,127 @@ +[![LICENSE](https://img.shields.io/badge/License-Apache_2.0-blue.svg)](LICENSE.txt) + +# Chipmunk Development CLI Tool + +This CLI Tool provides an easier way to manage various development tasks for Chipmunk. +Chipmunk consists of multiple modules with complex dependencies on each other, and this tool helps streamline the development process by coordinating these tasks seamlessly. +This tool acts as a cargo extension. Once installed, you can access it by running `cargo chipmunk ` from anywhere within the repository. + +## Build/Installation + +### Prerequisites + +Before installing the Chipmunk CLI tool, ensure that Rust is installed on your system. If Rust is not yet installed, follow the official installation instructions for your platform: + +- **Install Rust:** Visit [rustup.rs](https://rustup.rs/) and follow the instructions to install Rust. + +### Install Chipmunk CLI + +Navigate to the root directory of the Chipmunk repository in your terminal and run the following command to install the Chipmunk CLI tool: + +```bash +cargo install --path cli +``` + +This command installs this tool as a cargo extension, allowing you to use `cargo chipmunk ` to execute various development tasks for Chipmunk. + + +## Usage + +This CLI tool provides multiple sub-commands for different tasks, with each sub-command offering various arguments. + +### General Commands Overview + +```bash +CLI Tool for chipmunk application development + +Usage: cargo chipmunk + +Commands: + environment Provides commands for the needed tools for the development [aliases: env] + print-dot Prints an overview of targets dependencies in print-dot format for `Graphviz` [aliases: dot] + lint Runs linting & clippy for all or the specified targets + build Build all or the specified targets + clean Clean all or the specified targets + test Run tests for all or the specified targets + run Build and Run the application + reset-checksum Resets the checksums records what is used to check if there were any code changes for each target [aliases: reset] + shell-completion Generate shell completion for the commands of this tool in the given shell, printing them to stdout [aliases: compl] + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help Print help + -V, --version Print version +``` + +### Build Command + +```bash +Usage: cargo chipmunk build [OPTIONS] [TARGET]... + +Arguments: + [TARGET]... + Target to build, by default whole application will be built + + Possible values: + - core: Represents the path `application/apps/indexer` + - binding: Represents the path `application/apps/rustcore/rs-bindings` + - wrapper: Represents the path `application/apps/rustcore/ts-bindings` + - client: Represents the path `application/client` + - shared: Represents the path `application/platform` + - app: Represents the path `application/holder` + - cli: Represents the path `cli` + - wasm: Represents the path `application/apps/rustcore/wasm-bindings` + - updater: Represents the path `application/apps/precompiled/updater + +Options: + -p, --production + Build release version + + -r, --report [] + Write report from command logs to the given file or to stdout if no file is defined + + -h, --help + Print help (see a summary with '-h') +``` + +## Shell Completion + +The Chipmunk CLI tool supports shell completion for various shells. You can generate shell completions and print them to `stdout` using the following command: + +```bash +cargo chipmunk shell-completion +``` +Replace with the name of your shell (e.g., bash, zsh, fish, powershell). + +To use shell completion, you can redirect the output of the completion command to a file and save the file to the appropriate shell completion directory. + +After installing the completion script, restart your shell session or source the completion file to enable shell completion for the Chipmunk CLI tool. + + +### Example: Bash Shell +To enable bash shell completion, run the following command to generate the completion script and save it to a file: + +```bash +cargo chipmunk shell-completion bash > chipmunk-completion.bash +``` +Next, copy the chipmunk-completion.bash file to your bash completion directory (typically ~/.bash_completion.d/ or /etc/bash_completion.d/). + + +## Contributing + +Contributions in any part of Chipmunk are very welcome! + +After making any changes to this build CLI tool, please run the integration tests to ensure that all the provided commands in this tool are still working as expected. Additionally, consider adding new tests when introducing new features and commands. + +To run all the tests, execute the Python file `chipmunk/cli/integration_tests/run_all.py` from within the `chipmunk/cli` directory. For example: + +```bash +# Move to cli directory +cd cli +# Run python file +python ./integration_tests/run_all.py +``` +Please be aware that these tests will run on your local copy of Chipmunk. This process will rebuild the project and run all linting and tests on the entire solution. + +For more details, please see our [contribution guid](../contribution.md) + diff --git a/cli/dir_checksum/Cargo.toml b/cli/dir_checksum/Cargo.toml new file mode 100644 index 0000000000..fe82b94126 --- /dev/null +++ b/cli/dir_checksum/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "dir_checksum" +version = "0.1.0" +authors = ["Ammar Abou Zor "] +edition = "2021" +description = "A Library which provides methods to calculate the checksum of the files in a given path recursively considering `gitignore` rules" + +[dependencies] +blake3 = { version = "1", features = ["rayon"] } +ignore = "0.4" +memmap2 = "0.9" +rayon = "1" +thiserror = "1" + +[dev-dependencies] +tempdir.workspace = true +anyhow.workspace = true diff --git a/cli/dir_checksum/src/file_hash_digest.rs b/cli/dir_checksum/src/file_hash_digest.rs new file mode 100644 index 0000000000..dc72aee008 --- /dev/null +++ b/cli/dir_checksum/src/file_hash_digest.rs @@ -0,0 +1,17 @@ +use std::path::PathBuf; + +use crate::hash_digest::HashDigest; + +/// Holds the checksum for a file with its path +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct FileHashDigest { + pub path: PathBuf, + /// The calculated checksum value + pub hash_digest: HashDigest, +} + +impl FileHashDigest { + pub fn new(path: PathBuf, hash_digest: HashDigest) -> Self { + Self { path, hash_digest } + } +} diff --git a/cli/dir_checksum/src/hash_digest.rs b/cli/dir_checksum/src/hash_digest.rs new file mode 100644 index 0000000000..8e375e0325 --- /dev/null +++ b/cli/dir_checksum/src/hash_digest.rs @@ -0,0 +1,63 @@ +use std::{fmt::Display, str::FromStr}; + +/// The number of bytes in a Hash, 32. +pub const OUT_LEN: usize = blake3::OUT_LEN; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +/// Represents the calculated checksum value +/// Provides different methods to represents the hash value +pub struct HashDigest { + hash: blake3::Hash, +} + +impl Display for HashDigest { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.hash) + } +} + +impl FromStr for HashDigest { + type Err = String; + + fn from_str(s: &str) -> Result { + let hash = blake3::Hash::from_hex(s).map_err(|e| format!("Invalid Input. Error: {}", e))?; + + Ok(Self { hash }) + } +} + +impl From for HashDigest { + fn from(hash: blake3::Hash) -> Self { + Self { hash } + } +} + +impl From<[u8; OUT_LEN]> for HashDigest { + fn from(bytes: [u8; OUT_LEN]) -> Self { + blake3::Hash::from_bytes(bytes).into() + } +} + +impl From for [u8; OUT_LEN] { + fn from(hash: HashDigest) -> Self { + hash.to_bytes() + } +} + +impl<'a> From<&'a HashDigest> for &'a [u8; OUT_LEN] { + fn from(hash: &'a HashDigest) -> Self { + hash.as_bytes() + } +} + +impl HashDigest { + /// Returns a reference to the hash as bytes. + pub fn as_bytes(&self) -> &[u8; OUT_LEN] { + self.hash.as_bytes() + } + + /// Convert the hash to bytes consuming itself. + pub fn to_bytes(self) -> [u8; OUT_LEN] { + self.hash.into() + } +} diff --git a/cli/dir_checksum/src/hash_error.rs b/cli/dir_checksum/src/hash_error.rs new file mode 100644 index 0000000000..e90cec2492 --- /dev/null +++ b/cli/dir_checksum/src/hash_error.rs @@ -0,0 +1,11 @@ +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum HashError { + #[error("Environment error, cannot continue: {0}")] + Environment(String), + #[error("Entry could not be processed: {0}")] + Entry(String), + #[error("IO error: {0:?}")] + Io(#[from] std::io::Error), +} diff --git a/cli/dir_checksum/src/input.rs b/cli/dir_checksum/src/input.rs new file mode 100644 index 0000000000..292cdf60f1 --- /dev/null +++ b/cli/dir_checksum/src/input.rs @@ -0,0 +1,108 @@ +use std::{ + fs::File, + io::{self, Read}, + path::Path, +}; + +use crate::HashError; + +pub(crate) enum Input { + Mmap(io::Cursor), + File(File), +} + +impl Input { + pub(crate) fn open(path: &Path) -> Result { + let file = File::open(path)?; + if let Some(mmap) = maybe_memmap_file(&file)? { + return Ok(Self::Mmap(io::Cursor::new(mmap))); + } + Ok(Self::File(file)) + } + + pub(crate) fn hash(&mut self, base_hasher: &blake3::Hasher) -> Result { + let mut hasher = base_hasher.clone(); + match self { + // The fast path: If we mmapped the file successfully, hash using + // multiple threads. + Self::Mmap(cursor) => { + hasher.update_rayon(cursor.get_ref()); + } + // The slower paths, for files we didn't/couldn't mmap. + // This is currently all single-threaded. Doing multi-threaded + // hashing without memory mapping is tricky, since all your worker + // threads have to stop every time you refill the buffer, and that + // ends up being a lot of overhead. To solve that, we need a more + // complicated double-buffering strategy where a background thread + // fills one buffer while the worker threads are hashing the other + // one. We might implement that in the future, but since this is + // the slow path anyway, it's not high priority. + Self::File(file) => { + copy_wide(file, &mut hasher)?; + } + } + Ok(hasher.finalize()) + } +} + +// Mmap a file, if it looks like a good idea. Return None in cases where we +// know mmap will fail, or if the file is short enough that mmapping isn't +// worth it. However, if we do try to mmap and it fails, return the error. +fn maybe_memmap_file(file: &File) -> Result, HashError> { + let metadata = file.metadata()?; + let file_size = metadata.len(); + let map = if !metadata.is_file() { + // Not a real file. + None + } else if file_size > isize::MAX as u64 { + // Too long to safely map. + // https://github.com/danburkert/memmap-rs/issues/69 + None + } else if file_size < 16 * 1024 { + // Mapping small files is not worth it. + None + } else { + // Explicitly set the length of the memory map, so that file system + // changes can't race to violate the invariants we just checked. + // + // + // ## Safety + // + // All file-backed memory map constructors are marked `unsafe` because of the potential for + // *Undefined Behavior* using the map if the underlying file is subsequently modified, in or + // out of process. + // Memory map will be used here for a very short time only while calculating the hash. It's + // unlikely that the files will be changed at the same time, and in that case it's the + // responsibility of the user of this library to unsure that the files aren't changed from + // different process while calling this methods from this library. + let map = unsafe { + memmap2::MmapOptions::new() + .len(file_size as usize) + .map(file)? + }; + + Some(map) + }; + + Ok(map) +} + +// A 16 KiB buffer is enough to take advantage of all the SIMD instruction sets +// that we support, but `std::io::copy` currently uses 8 KiB. Most platforms +// can support at least 64 KiB, and there's some performance benefit to using +// bigger reads, so that's what we use here. +fn copy_wide(mut reader: impl Read, hasher: &mut blake3::Hasher) -> io::Result { + let mut buffer = [0; 65536]; + let mut total = 0; + loop { + match reader.read(&mut buffer) { + Ok(0) => return Ok(total), + Ok(n) => { + hasher.update(&buffer[..n]); + total += n as u64; + } + Err(ref e) if e.kind() == io::ErrorKind::Interrupted => continue, + Err(e) => return Err(e), + } + } +} diff --git a/cli/dir_checksum/src/lib.rs b/cli/dir_checksum/src/lib.rs new file mode 100644 index 0000000000..e995ff367b --- /dev/null +++ b/cli/dir_checksum/src/lib.rs @@ -0,0 +1,101 @@ +use blake3::Hasher; +use ignore::Walk; +use input::Input; +use rayon::iter::{IntoParallelIterator, ParallelIterator}; +use std::{ + io::{self, ErrorKind}, + path::{Path, PathBuf}, +}; + +mod file_hash_digest; +mod hash_digest; +mod hash_error; +mod input; + +pub use file_hash_digest::FileHashDigest; +pub use hash_digest::HashDigest; +pub use hash_digest::OUT_LEN; +pub use hash_error::HashError; + +/// Calculates the hash of each file in the given folder considering `gitignore` rules returning +/// the combination of their checksums +/// +/// * `dir_path`: Root directory to iterate through their files recursively +pub fn calc_combined_checksum

(dir_path: P) -> Result +where + P: AsRef + Send + Sync, +{ + run_intern(dir_path, |path, hasher| { + calc_files_hashes(path, hasher)?.iter().for_each(|entry| { + hasher.update(entry.hash_digest.as_bytes()); + }); + + Ok(hasher.finalize().into()) + }) +} + +/// Calculates the hash of each file in the given folder considering `gitignore` rules and returns +/// a list of the files with their checksums +/// +/// * `dir_path`: Root directory to iterate through their files recursively +pub fn calc_individual_checksum

(dir_path: P) -> Result, HashError> +where + P: AsRef + Send + Sync, +{ + run_intern(dir_path, |path, hasher| calc_files_hashes(path, hasher)) +} + +/// Validates the given path and prepares the run environment then calls the given function +/// returning its result +/// +/// * `calc_fn`: Function that will be called inside the function with the directory path and +/// the created hasher +fn run_intern(dir_path: P, calc_fn: F) -> Result +where + F: Fn(&Path, &mut blake3::Hasher) -> Result + Sync + Send, + P: AsRef + Send + Sync, + T: Send + Sync, +{ + let dir_path = dir_path.as_ref(); + if !dir_path.is_dir() { + return Err(io::Error::new( + ErrorKind::InvalidInput, + format!( + "Given path must be a directory. path: {}", + dir_path.display() + ), + ) + .into()); + } + + let mut hasher = blake3::Hasher::new(); + + calc_fn(dir_path, &mut hasher) +} + +/// Walks through file trees calculate the checksum for each files of them +/// +/// * `dir_path`: Path of directory to walk the files from +fn calc_files_hashes>( + dir_path: P, + hasher: &blake3::Hasher, +) -> Result, HashError> { + let entries: Vec = Walk::new(dir_path) + .filter_map(|dir_entry| dir_entry.map(|entry| entry.into_path()).ok()) + .filter(|path| path.is_file()) + .collect(); + + entries + .into_par_iter() + .map(|path| calc_hash(&path, hasher).map(|hash| FileHashDigest::new(path, hash.into()))) + .collect() +} + +/// Calculates the hash for the given file path +fn calc_hash(file_path: &Path, base_hasher: &Hasher) -> Result { + let mut input = Input::open(file_path) + .map_err(|e| HashError::Entry(format!("Could not open file: {file_path:?} ({e})")))?; + input + .hash(base_hasher) + .map_err(|e| HashError::Entry(format!("Could not hash file: {file_path:?} ({e})"))) +} diff --git a/cli/dir_checksum/tests/integration_tests.rs b/cli/dir_checksum/tests/integration_tests.rs new file mode 100644 index 0000000000..8d78192a31 --- /dev/null +++ b/cli/dir_checksum/tests/integration_tests.rs @@ -0,0 +1,162 @@ +extern crate tempdir; + +use std::{ + fs::{self, File}, + path::PathBuf, +}; + +use dir_checksum::*; +use tempdir::TempDir; + +fn create_tmp_dir_with_file(dir_name: &'static str) -> anyhow::Result<(TempDir, PathBuf)> { + let tmp_dir = TempDir::new(dir_name)?; + let file_path = tmp_dir.path().join("file1.txt"); + fs::write(&file_path, "Initial text")?; + + Ok((tmp_dir, file_path)) +} + +#[test] +fn hash_combinations_add_then_remove_file() -> anyhow::Result<()> { + let (tmp_dir, _) = create_tmp_dir_with_file("comb_add_remove_file")?; + + let original_hash = calc_combined_checksum(tmp_dir.path())?; + + let file_path_2 = tmp_dir.path().join("file2.txt"); + fs::write(&file_path_2, "Initial text 2")?; + + assert_ne!( + original_hash, + calc_combined_checksum(tmp_dir.path())?, + "Hashes after adding one file can't be the same" + ); + + fs::remove_file(file_path_2)?; + + assert_eq!( + original_hash, + calc_combined_checksum(tmp_dir.path())?, + "Hashes after deleting the second file must be identical again" + ); + + Ok(()) +} + +#[test] +fn hash_combinations_change_file_content() -> anyhow::Result<()> { + let (tmp_dir, file_path_1) = create_tmp_dir_with_file("comg_change_contnet")?; + + let original_hash = calc_combined_checksum(tmp_dir.path())?; + + fs::write(&file_path_1, "changed text")?; + assert_ne!( + original_hash, + calc_combined_checksum(tmp_dir.path())?, + "Hashes after changing file content can't be the same" + ); + + Ok(()) +} + +#[test] +fn hash_combinations_empty_file() -> anyhow::Result<()> { + let (tmp_dir, _) = create_tmp_dir_with_file("comb_empty_file")?; + + let original_hash = calc_combined_checksum(tmp_dir.path())?; + + // Create an empty file + let empty_file_path = tmp_dir.path().join("empty.txt"); + let empty_file = File::create(&empty_file_path)?; + drop(empty_file); + + assert_ne!( + original_hash, + calc_combined_checksum(tmp_dir.path())?, + "Hashes after creating an empty file can't be the same" + ); + + Ok(()) +} + +#[test] +fn hash_combinations_add_then_remove_sub_dir() -> anyhow::Result<()> { + let (tmp_dir, _) = create_tmp_dir_with_file("comb_sub_dir")?; + + let original_hash = calc_combined_checksum(tmp_dir.path())?; + + let sub_dir = tmp_dir.path().join("sub_dir"); + fs::create_dir(&sub_dir)?; + + let file_path_2 = sub_dir.join("file2.txt"); + fs::write(&file_path_2, "Initial text 2")?; + + assert_ne!( + original_hash, + calc_combined_checksum(tmp_dir.path())?, + "Hashes after adding one file in sub directory can't be the same" + ); + + fs::remove_file(file_path_2)?; + + assert_eq!( + original_hash, + calc_combined_checksum(tmp_dir.path())?, + "Hashes after deleting the file in sub directory must be identical again" + ); + + Ok(()) +} + +#[test] +fn hash_individual_many_files() -> anyhow::Result<()> { + let (tmp_dir, _) = create_tmp_dir_with_file("indiv_files")?; + + // Create non-empty file + let file2_path = tmp_dir.path().join("file2.txt"); + fs::write(file2_path, "file 2 content")?; + + // Create empty file + let empty_file_path = tmp_dir.path().join("empty.txt"); + let _ = File::create(&empty_file_path)?; + + let items = calc_individual_checksum(tmp_dir.path())?; + + assert_eq!(items.len(), 3, "Hashes count must be 3"); + + assert_eq!( + &items, + &calc_individual_checksum(tmp_dir.path())?, + "Hash items must be identical" + ); + + Ok(()) +} + +#[test] +fn hash_individual_sub_directory() -> anyhow::Result<()> { + let (tmp_dir, _) = create_tmp_dir_with_file("indiv_sub_dir")?; + + let sub_dir = tmp_dir.path().join("sub_dir"); + fs::create_dir(&sub_dir)?; + + // Create non-empty file + let file2_path = &sub_dir.join("file2.txt"); + fs::write(file2_path, "file 2 content")?; + + // Create empty file + let empty_file_path = &sub_dir.join("empty.txt"); + let empty_file = File::create(&empty_file_path)?; + drop(empty_file); + + let items = calc_individual_checksum(tmp_dir.path())?; + + assert_eq!(items.len(), 3, "Hashes count must be 3"); + + assert_eq!( + &items, + &calc_individual_checksum(tmp_dir.path())?, + "Hash items must be identical" + ); + + Ok(()) +} diff --git a/cli/integration_tests/.gitignore b/cli/integration_tests/.gitignore new file mode 100644 index 0000000000..bee8a64b79 --- /dev/null +++ b/cli/integration_tests/.gitignore @@ -0,0 +1 @@ +__pycache__ diff --git a/cli/integration_tests/build.py b/cli/integration_tests/build.py new file mode 100644 index 0000000000..1623222d6f --- /dev/null +++ b/cli/integration_tests/build.py @@ -0,0 +1,321 @@ +""" +Provides methods to test the Build command and checksum implementation to watch source code changes +in Chipmunk Build CLI Tool. +""" + +from pathlib import Path +from utls import get_root, run_command, print_green_bold, print_blue_bold +from datetime import datetime +from typing import Dict +import platform + +####################################################################### +######################## RUN ALL TESTS ############################ +####################################################################### + + +def run_build_tests(): + """Runs many tests for build command: + - General Test: General test for build command and the created directories and checksum files. + - Checksum Test: Test how changing files in some target will affect the build on it and other targets depending on it. + - Reset Checksum Test: Make sure checksum file will deleted when the command is called. + """ + print_blue_bold("Running General Checks for Build Command...") + _build_general_check() + print_green_bold("*** General Check for Build Command Succeeded ***") + + print("---------------------------------------------------------------") + + print_blue_bold("Running Checksum Checks for Build Command...") + _build_checksum_check() + print_green_bold("*** Checksum Check for Build Command Succeeded ***") + + print("---------------------------------------------------------------") + + print_blue_bold("Running Reset Checksum Command...") + _run_reset_checksum_command() + print_green_bold("*** Test Running Reset Checksum Command Succeeded ***") + + +####################################################################### +################ GENERAL TEST FOR BUILD COMMAND ################### +####################################################################### + +# Build command to be used in general build test +# This command should build all the targets expect the Build CLI Tool +BUILD_COMMAND = [ + "cargo", + "run", + "-r", + "--", + "chipmunk", + "build", + # Provide app target only and it should pull all other targets expect for build CLI, which isn't + # possible to build on Windows because it's not allowed to replace a binary while it's running. + "app", +] + +# These paths must exist after build command has been finished. +# The paths are relative starting from `chipmunk_root/application` +APP_PATHS_FOR_BUILD_CHECK = [ + # Core + "apps/indexer/target", + # Shared + "platform/dist", + "platform/node_modules", + # Binding + "apps/rustcore/rs-bindings/dist", + "apps/rustcore/rs-bindings/target", + # Wrapper + "apps/rustcore/ts-bindings/dist", + "apps/rustcore/ts-bindings/node_modules", + "apps/rustcore/ts-bindings/src/native/index.node", + # Wasm + "apps/rustcore/wasm-bindings/pkg", + "apps/rustcore/wasm-bindings/node_modules", + # Client + "client/dist", + "client/node_modules", + # Updater + "apps/precompiled/updater/target", + # App + "holder/dist", + "holder/node_modules", +] + +# The name of the file where the checksum are saved for development build +CHECKSUM_FILE_NAME = ".build_chksum_dev" + + +def get_build_paths(root_dir: Path) -> list[Path]: + """Provides the paths for the directories that must be created after running the build command""" + application_dir = root_dir.joinpath("application") + build_paths = [ + application_dir.joinpath(sub_dir) for sub_dir in APP_PATHS_FOR_BUILD_CHECK + ] + return build_paths + + +def _build_general_check(): + """Runs Build command for app targets and checks if all build directories + checksum file are created""" + print("Running build command...") + root_dir = get_root() + # The path for the file where build checksums are saved. + # This file must be written on every call of build. + checksum_path = root_dir.joinpath(CHECKSUM_FILE_NAME) + + # Get last modification date for checksum file if exists otherwise get minimal date + checksum_modified_before = ( + get_last_modification_date(checksum_path) + if checksum_path.exists() + else datetime.min + ) + run_command(BUILD_COMMAND) + + # Check if all the targets directory exists after running build command. + print("Checking created directories...") + for path in get_build_paths(root_dir): + if not path.exists(): + raise AssertionError(f"Path doesn't exist after build. Path: {path}") + + print("Checking created directories Succeeded") + + # Checksum Records file Checks: File must exist after build, + # and it must have a more resent modification date compared to before build + print("Checking Checksum Records file changed...") + + assert ( + checksum_path.exists() + ), f"Checksum record file doesn't exist after build. File Path: {checksum_path}" + + checksum_modified_after = get_last_modification_date(checksum_path) + + assert ( + checksum_modified_after > checksum_modified_before + ), f"Checksum file modification date after build isn't greater as before build.\ + Before: {checksum_modified_before} , After: {checksum_modified_after}" + print("Checking if Checksum Records file changed Succeeded") + + +####################################################################### +#################### CHECKSUM RECORDS TESTS ####################### +####################################################################### + +# Name of the temporary file which will be used for testing if file changes will lead to rebuild +# the direct target and all other targets that depend on here. +# This file should be created then deleted after the test is done. +TEMP_FILE_NAME = "tmp_for_cli_test.txt" + +# Content of temporary file for testing checksum checks. +# It should provide a clear message for the users to delete the file if the file somehow didn't +# get deleted after the test has finished. +TEMP_FILE_CONTENT = """This file is created to test the build CLI tool only and it should be deleted after each test. +Please delete this file manually if it still exists after build CLI tests are done, +and please consider opening an issue if you can reproduce this behavior""" + +# Paths of files and directory in platform target and all other targets depending on it. +# The modification date for this files will be read before the checksum test starts, then it will be compared after +# the build command has finished to insure that the those targets have been rebuilt. +INVOLVED_PATHS_CHECKSUM_CHECK = [ + # Shared + "platform/dist/lib.js", + # Binding + "apps/rustcore/rs-bindings/dist/index.node", + # Wrapper + "apps/rustcore/ts-bindings/dist/index.js", + # Client + "client/dist", + # App + "holder/dist/app.js", +] + +# Paths of files and directory that must be not changed after running the build with the changes in platform +# The modification date for this files will be read before the checksum test starts, then it will be compared after +# the build command has finished to insure that the those targets have not been rebuilt. +PATHS_NON_INVOLVED_CHECKSUM_CHECK = [ + # Core + "apps/indexer/target", + # Wasm + "apps/rustcore/wasm-bindings/pkg", + # Updater + "apps/precompiled/updater/target", +] + + +def get_last_modification_date(path: Path) -> datetime: + """Gets the last modification date for the given path on different platforms + On Unix it will return the last time the meta data of the file has been changed + On Windows it will return the more recent between creating and last modification times + """ + # Get file stats + stats = path.stat() + + # On Unix, return the last time any of the file meta data has changed + if platform.system() != "Windows": + return datetime.fromtimestamp(stats.st_ctime) + + # On Windows, return the greater of st_mtime and st_birthtime because time information on Windows can be misleading, + # like it's possible to get a creation time that is more recent than the modification time. + else: + most_recent_time = max( + stats.st_mtime, + stats.st_birthtime, + ) + return datetime.fromtimestamp(most_recent_time) + + +def _build_checksum_check(): + """!!!This function must run directly after a full build!!! + It creates a dummy file in platform directory and checks that all dependencies (Binding, Wrapper, Client, App) + has been newly built + """ + root_dir = get_root() + application_dir = root_dir.joinpath("application") + + # Get and validate checksum file + chksum_file = root_dir.joinpath(CHECKSUM_FILE_NAME) + assert ( + chksum_file.exists() + ), f"Checksum File must exist before running checksum tests. File Path: {chksum_file}" + + # Save modification date for build paths that must change before start to compare them later. + modifi_involved_before_start: Dict[Path, datetime] = {} + for sub_path in INVOLVED_PATHS_CHECKSUM_CHECK: + sub_path = application_dir.joinpath(sub_path) + assert ( + sub_path.exists() + ), f"Build Path must exist before checksum tests starts. Path {sub_path}" + modifi_date = get_last_modification_date(sub_path) + modifi_involved_before_start[sub_path] = modifi_date + + # Save modification date for build paths that must sta before start to compare them later. + modifi_non_involved_before_start: Dict[Path, datetime] = {} + for sub_path in PATHS_NON_INVOLVED_CHECKSUM_CHECK: + sub_path = application_dir.joinpath(sub_path) + assert ( + sub_path.exists() + ), f"Build Path must exist before checksum tests starts. Path {sub_path}" + modifi_date = get_last_modification_date(sub_path) + modifi_non_involved_before_start[sub_path] = modifi_date + + # Define temporary file path in platform directory to insure it will be rebuilt + # with all other targets depending on it. + temp_file_path = application_dir.joinpath(f"platform/{TEMP_FILE_NAME}") + assert ( + not temp_file_path.exists() + ), f"Temporary file can't exist before checksum test start. File Path: {temp_file_path}" + + try: + # Create temporary file in platform directory + with open(temp_file_path, "w") as f: + f.write(TEMP_FILE_CONTENT) + + # Run build command + run_command(BUILD_COMMAND) + + # Compare modification date for involved targets on different platforms + if platform.system() != "Windows": + # On Unix systems we compare all the involved targets to insure the checksum solution is + # working and to test the build dependencies logic + for path, modifi_before in modifi_involved_before_start.items(): + modifi_after = get_last_modification_date(path) + assert ( + modifi_after > modifi_before + ), f"Involved target modification date after must be more recent than before.\n\ + Target Path: {path}.\n\ + Before: {modifi_before}, After: {modifi_after}" + else: + # On Windows it's enough that only one of the involved target has more recent date + # because the file system here isn't reliable in delivering the current time of the + # latest change on a file or directory + date_changed = False + for path, modifi_before in modifi_involved_before_start.items(): + modifi_after = get_last_modification_date(path) + date_changed = modifi_after > modifi_before + if date_changed: + break + assert ( + date_changed + ), "None of the involved targets' modification date is more recent than before build" + + # Compare modification date for not involved targets + for path, modifi_before in modifi_non_involved_before_start.items(): + modifi_after = get_last_modification_date(path) + assert ( + modifi_after == modifi_before + ), f"Not involved target modification date must not be changed.\n\ + Target Path: {path}.\n\ + Before: {modifi_before}, After: {modifi_after}" + finally: + # Insure temporary file is deleted + if temp_file_path.exists(): + temp_file_path.unlink() + + +####################################################################### +#################### RESET CHECKSUM COMMAND ####################### +####################################################################### + +RESET_CHECKSUM_COMMAND = ["cargo", "run", "-r", "--", "chipmunk", "reset-checksum"] + + +def _run_reset_checksum_command(): + """This test will run reset-checksum command in development mode, then it insure that + the checksum file has been deleted""" + + checksum_path = get_root().joinpath(CHECKSUM_FILE_NAME) + assert ( + checksum_path.exists() + ), f"Checksum file must exist before running 'reset-checksum' command. File Path: {checksum_path}" + + run_command(RESET_CHECKSUM_COMMAND) + + assert ( + not checksum_path.exists() + ), f"Checksum file must not exist after running 'reset-checksum' command. File Path: {checksum_path}" + + pass + + +if __name__ == "__main__": + run_build_tests() diff --git a/cli/integration_tests/clean.py b/cli/integration_tests/clean.py new file mode 100644 index 0000000000..80b8d1b6af --- /dev/null +++ b/cli/integration_tests/clean.py @@ -0,0 +1,79 @@ +""" +Provides methods to test the Clean command in Chipmunk Build CLI Tool +Clean command will be invoked here for all targets but the CLI tool itself, +then it checks that all the paths that must be removed don't exist anymore. +""" + +from pathlib import Path +from utls import run_command, print_blue_bold, print_green_bold, get_root + +CLEAN_COMMAND = [ + "cargo", + "run", + "-r", + "--", + "chipmunk", + "clean", + # We need to set targets explicitly because we need to clean everything expect the build CLI tools + # binaries to avoid failing on Windows when CLI tool tries to remove it's own binary. + "core", + "shared", + "binding", + "wrapper", + "wasm", + "client", + "updater", + "app", +] + +# These paths must not exist after clean build is done. +# The paths are relative starting from `chipmunk_root/application` +PATHS_TO_CHECK = [ + # Core + "apps/indexer/target", + # Shared + "platform/dist", + "platform/node_modules", + # Binding + "apps/rustcore/rs-bindings/dist", + "apps/rustcore/rs-bindings/target", + # Wrapper + "apps/rustcore/ts-bindings/dist", + "apps/rustcore/ts-bindings/node_modules", + "apps/rustcore/ts-bindings/spec/build", + "apps/rustcore/ts-bindings/src/native/index.node", + # Wasm + "apps/rustcore/wasm-bindings/pkg", + "apps/rustcore/wasm-bindings/node_modules", + "apps/rustcore/wasm-bindings/test_output", + # Client + "client/dist", + "client/node_modules", + # Updater + "apps/precompiled/updater/target", + # App + "holder/dist", + "holder/node_modules", +] + + +def run_clean_command(): + """Runs Clean Commands on all targets and insure that all build directories are deleted.""" + print_blue_bold("Running clean command...") + run_command(CLEAN_COMMAND) + for path in get_removed_paths(): + if path.exists(): + raise AssertionError(f"Path exists after clean. Path: {path}") + + print_green_bold("*** Check for Clean Command Succeeded ***") + + +def get_removed_paths() -> list[Path]: + """Provides the paths for the directories that must be removed after running the clean command""" + root_dir = get_root() + application_dir = root_dir.joinpath("application") + return [application_dir.joinpath(sub_dir) for sub_dir in PATHS_TO_CHECK] + + +if __name__ == "__main__": + run_clean_command() diff --git a/cli/integration_tests/environment.py b/cli/integration_tests/environment.py new file mode 100644 index 0000000000..82a1b7d981 --- /dev/null +++ b/cli/integration_tests/environment.py @@ -0,0 +1,41 @@ +""" +Provides methods to test the Environment commands in Chipmunk Build CLI Tool +""" + +from utls import run_command, print_blue_bold, print_green_bold + +ENVIRONMENT_CHECK_COMMAND = [ + "cargo", + "run", + "-r", + "--", + "chipmunk", + "environment", + "check", +] + +ENVIRONMENT_PRINT_COMMAND = [ + "cargo", + "run", + "-r", + "--", + "chipmunk", + "environment", + "print", +] + + +def run_environment_commands(): + """Runs environment command to check the installed development tools for chipmunk, + and the command to print infos about those tools""" + print_blue_bold("Running Environment Check command...") + run_command(ENVIRONMENT_CHECK_COMMAND) + print_green_bold("*** Environment Check Command Succeeded ***") + + print_blue_bold("Running Environment Print command...") + run_command(ENVIRONMENT_PRINT_COMMAND) + print_green_bold("*** Environment Print Command Succeeded ***") + + +if __name__ == "__main__": + run_environment_commands() diff --git a/cli/integration_tests/lint.py b/cli/integration_tests/lint.py new file mode 100644 index 0000000000..5e476a6204 --- /dev/null +++ b/cli/integration_tests/lint.py @@ -0,0 +1,25 @@ +""" +Provides methods to test the Lint command in Chipmunk Build CLI Tool +""" + +from utls import run_command, print_green_bold, print_blue_bold + +LINT_COMMAND = [ + "cargo", + "run", + "-r", + "--", + "chipmunk", + "lint", +] + + +def run_lint_command(): + """Runs lint command for all targets. This test will fail on linting errors as well.""" + print_blue_bold("Running Lint command...") + run_command(LINT_COMMAND) + print_green_bold("*** Check for Lint Command Succeeded ***") + + +if __name__ == "__main__": + run_lint_command() diff --git a/cli/integration_tests/print_dot.py b/cli/integration_tests/print_dot.py new file mode 100644 index 0000000000..01c3224edc --- /dev/null +++ b/cli/integration_tests/print_dot.py @@ -0,0 +1,41 @@ +""" +Provides methods to test the Print-dot commands in Chipmunk Build CLI Tool +""" + +from utls import run_command, print_green_bold, print_blue_bold + +# Command and Args for the general print dot command +PRINT_DOT_COMMAND = [ + "cargo", + "run", + "-r", + "--", + "chipmunk", + "print-dot", +] + +# Command and Args for the print dot command with `--all` flag +PRINT_DOT_ALL_COMMAND = [ + "cargo", + "run", + "-r", + "--", + "chipmunk", + "print-dot", + "-a", +] + + +def run_print_dot_commands(): + """Runs print dot commands for both targets and tasks""" + print_blue_bold("Running General Print Dot command...") + run_command(PRINT_DOT_COMMAND) + print_green_bold("*** General Print Dot command Succeeded ***") + + print_blue_bold("Running Print Dot command with flag `--all`...") + run_command(PRINT_DOT_ALL_COMMAND) + print_green_bold("*** Print Dot command with flag `--all` Succeeded ***") + + +if __name__ == "__main__": + run_print_dot_commands() diff --git a/cli/integration_tests/run_all.py b/cli/integration_tests/run_all.py new file mode 100644 index 0000000000..4bec7fad0d --- /dev/null +++ b/cli/integration_tests/run_all.py @@ -0,0 +1,92 @@ +""" +Provide method to run the tests for all the commands provided by Chipmunk Build CLI Tool +All the tests build and run the current build CLI implementation in release mode, therefore +it must be invoked from withing `Chipmunk/cli` directory +""" + +from utls import print_red_bold, print_blue_bold, print_green_bold, print_cyan +from build import run_build_tests +from clean import run_clean_command +from environment import run_environment_commands +from lint import run_lint_command +from print_dot import run_print_dot_commands +from shell_compl import run_shell_completion_commands +from test_cmd import run_test_command + + +def run_all(): + """Run the tests for all commands provided by Chipmunk Build CLI Tool""" + print_blue_bold("Running tests for all commands of Chipmunk Build CLI Tool") + + ### Environment ### + try: + run_environment_commands() + except Exception: + print_err("Environment") + raise + print_separator() + + ### Lint ### + try: + run_lint_command() + except Exception: + print_err("Lint") + raise + print_separator() + + ### Clean ### + try: + run_clean_command() + except Exception: + print_err("Clean") + raise + print_separator() + + ### Build ### + try: + run_build_tests() + except Exception: + print_err("Build") + raise + print_separator() + + ### Test ### + try: + run_test_command() + except Exception: + print_err("Test") + raise + print_separator() + + ### Print Dots ### + try: + run_print_dot_commands() + except Exception: + print_err("Print Dots") + raise + print_separator() + + ### Shell Completion ### + try: + run_shell_completion_commands() + except Exception: + print_err("Shell Completion") + raise + + print_green_bold( + "******** Tests for all commands of Chipmunk Build CLI Tool succeeded ********" + ) + + +def print_err(cmd_name: str): + """Prints a formatted error with the main command name""" + print_red_bold(f"Error while running tests for {cmd_name} commands") + + +def print_separator(): + """Prints a colored separator between main commands""" + print_cyan("------------------------------------------------------------------") + + +if __name__ == "__main__": + run_all() diff --git a/cli/integration_tests/shell_compl.py b/cli/integration_tests/shell_compl.py new file mode 100644 index 0000000000..f2f214239a --- /dev/null +++ b/cli/integration_tests/shell_compl.py @@ -0,0 +1,31 @@ +""" +Provides methods to test the Shell Completion command for variety of shells in Chipmunk Build CLI Tool +""" + +from utls import run_command, print_blue_bold, print_green_bold + +PRINT_COMPLETION_COMMAND = [ + "cargo", + "run", + "-r", + "--", + "chipmunk", + "shell-completion", +] + +# These are all the supported shells for completion +SHELLS = ["bash", "elvish", "fish", "powershell", "zsh"] + + +def run_shell_completion_commands(): + """Runs commands to generate shell completion on all the available shells""" + for shell in SHELLS: + print_blue_bold(f"Running Shell Completion command for {shell}") + shell_command = PRINT_COMPLETION_COMMAND.copy() + shell_command.append(shell) + run_command(shell_command) + print_green_bold(f"*** Shell Completion Command for {shell} Succeeded ***") + + +if __name__ == "__main__": + run_shell_completion_commands() diff --git a/cli/integration_tests/test_cmd.py b/cli/integration_tests/test_cmd.py new file mode 100644 index 0000000000..3e3a6565bc --- /dev/null +++ b/cli/integration_tests/test_cmd.py @@ -0,0 +1,25 @@ +""" +Provides methods to test the Test Command in Chipmunk Build CLI Tool +""" + +from utls import run_command, print_blue_bold, print_green_bold + +TEST_COMMAND = [ + "cargo", + "run", + "-r", + "--", + "chipmunk", + "test", +] + + +def run_test_command(): + """Runs test command on all targets. This test will fail on test errors of chipmunk targets as well.""" + print_blue_bold("Running test command...") + run_command(TEST_COMMAND) + print_green_bold("*** Check for Test Command Succeeded ***") + + +if __name__ == "__main__": + run_test_command() diff --git a/cli/integration_tests/utls.py b/cli/integration_tests/utls.py new file mode 100644 index 0000000000..ed71e2f5bc --- /dev/null +++ b/cli/integration_tests/utls.py @@ -0,0 +1,81 @@ +""" +Utilities function to be used among the modules to test Chipmunk Build CLI Tool +""" + +from pathlib import Path +import subprocess + + +def get_root() -> Path: + """Get and validate the root directory of chipmunk repository + + Raises: + SystemError: If validation of root directory fails + + Returns: + Root directory of chipmunk + """ + + # We are using the utls file with the assumption that is path is `root/cli/integration_tests` + root_dir = Path(__file__).parent.parent.parent + + # Root Dir checks. This checks depends on the current chipmunk directories' structure + sub_dirs = ( + root_dir.joinpath(dir_name) + for dir_name in ["application", "developing", "cli", "scripts"] + ) + + if any(not path.exists() for path in sub_dirs): + raise SystemError(f"Root directory verification fail. Root Dir: {root_dir}") + + return root_dir + + +def run_command(command_args: list[str]): + """Runs the commands and its arguments after printing it to stdout + + Args: + command_args: The command and its arguments in one string list + """ + command_txt = " ".join(command_args) + print_bold(f"Command: {command_txt}") + subprocess.run(command_args, check=True) + + +class bcolors: + """Color codes representation in ANSI""" + + HEADER = "\033[95m" + OKBLUE = "\033[94m" + OKCYAN = "\033[96m" + OKGREEN = "\033[92m" + WARNING = "\033[93m" + FAIL = "\033[91m" + ENDC = "\033[0m" + BOLD = "\033[1m" + UNDERLINE = "\033[4m" + + +def print_bold(text: str): + """Prints the given text to stdout with bold attribute""" + print(f"{bcolors.BOLD}{text}{bcolors.ENDC}") + + +def print_blue_bold(text: str): + """Prints the given text to stdout with bold and blue attribute""" + print(f"{bcolors.BOLD}{bcolors.OKBLUE}{text}{bcolors.ENDC}{bcolors.ENDC}") + + +def print_green_bold(text: str): + """Prints the given text to stdout with bold and green attribute""" + print(f"{bcolors.BOLD}{bcolors.OKGREEN}{text}{bcolors.ENDC}{bcolors.ENDC}") + + +def print_red_bold(text: str): + """Prints the given text to stdout with bold and red attribute""" + print(f"{bcolors.BOLD}{bcolors.FAIL}{text}{bcolors.ENDC}{bcolors.ENDC}") + + +def print_cyan(text: str): + """Prints the given text to stdout with cyan color attribute""" + print(f"{bcolors.OKCYAN}{text}{bcolors.ENDC}") diff --git a/cli/src/app_runner.rs b/cli/src/app_runner.rs new file mode 100644 index 0000000000..8e766a752c --- /dev/null +++ b/cli/src/app_runner.rs @@ -0,0 +1,24 @@ +use std::{io, process::ExitStatus}; + +use tokio::process::Command; + +use crate::{dev_tools::DevTool, target::Target}; + +pub async fn run_app() -> io::Result { + let electron_path = Target::App.cwd(); + + let yarn_path = DevTool::Yarn.path(); + + let electron_arg = if cfg!(windows) { + "electron-win" + } else { + "electron" + }; + + Command::new(yarn_path) + .current_dir(electron_path) + .args(["run", electron_arg]) + .kill_on_drop(true) + .status() + .await +} diff --git a/cli/src/checksum_records.rs b/cli/src/checksum_records.rs new file mode 100644 index 0000000000..c1e343decd --- /dev/null +++ b/cli/src/checksum_records.rs @@ -0,0 +1,271 @@ +use std::{ + collections::{btree_map, BTreeMap, BTreeSet}, + fs::{self, File}, + io::Write, + path::PathBuf, + sync::{Mutex, OnceLock}, +}; + +use anyhow::{anyhow, Context}; +use dir_checksum::{calc_combined_checksum, HashDigest}; + +use crate::{job_type::JobType, location::get_root, target::Target}; + +const FILE_NAME_DEV: &str = ".build_chksum_dev"; +const FILE_NAME_PROD: &str = ".build_chksum_prod"; + +#[derive(Debug)] +/// Manages and compares the file states for the targets between current and previous builds. +/// It calculates the checksums of the files for each targets and saves them to a file after +/// each build, and for the next build it'll calculate the checksum again and compare it with +/// the saved one. +/// It also manages loading and clearing the saved checksum records as well. +pub struct ChecksumRecords { + items: Mutex, +} + +#[derive(Debug, Default)] +struct ChecksumItems { + map: BTreeMap, + involved_targets: BTreeSet, +} + +/// Represents the comparison's result between the saved Checksum and the calculate one for the +/// build target +#[derive(Debug, Clone, Copy, Eq, PartialEq)] +pub enum ChecksumCompareResult { + Same, + Changed, +} + +impl ChecksumRecords { + /// Update checksum records for involved jobs depending on the job type. + /// It will calculate new checksums if build tasks were involved. + pub fn update_and_save(job_type: JobType) -> anyhow::Result<()> { + // calculate should be involved when build is called at some point of the job + let (calculate_involved, prod) = match &job_type { + // Linting build targets for TS targets and their dependencies + JobType::Lint => (true, false), + JobType::Build { production } + | JobType::Run { production } + | JobType::Test { production } => (true, *production), + // With clean we need to remove the items from both development and production + JobType::Clean => (false, false), + JobType::Install { production } | JobType::AfterBuild { production } => { + (false, *production) + } + }; + + let records = Self::get(prod)?; + + if calculate_involved { + records.calculate_involved_hashes()?; + + // Production and development use the same artifacts which will lead to false + // positives when the artifacts are modified via another build but the checksum of + // source files still the same. + // To solve this problem we will reset the checksums of the opposite build production + // type when build is involved in the current process + let outdated_record_prod = !prod; + Self::remove_records_file(outdated_record_prod).with_context(|| { + format!( + "Error while remove the outdated {} build checksum records", + if outdated_record_prod { + "production" + } else { + "development" + } + ) + })?; + } + + records + .persist_hashes(prod) + .context("Error while saving the updated hashes")?; + + // Hashes must be removed from production if clean is called because it doesn't + // differentiate between development and production. + if matches!(job_type, JobType::Clean) { + let prod_records = + Self::load(true).context("Error while loading production records")?; + + let dev_items = records + .items + .lock() + .map_err(|err| anyhow!("Error while acquiring items jobs mutex: Error {err}"))?; + + // With clean job, the involved targets are the ones that has been deleted. + for target in &dev_items.involved_targets { + prod_records.remove_hash_if_exist(*target)?; + } + + prod_records + .persist_hashes(true) + .context("Error while saving the updated hashes")?; + } + + Ok(()) + } + + /// Returns a reference to checksums records manager singleton + pub fn get(production: bool) -> anyhow::Result<&'static ChecksumRecords> { + static CHECKSUM_RECORDS: OnceLock> = OnceLock::new(); + + CHECKSUM_RECORDS + .get_or_init(|| ChecksumRecords::load(production)) + .as_ref() + .map_err(|err| anyhow!("{err}")) + } + + /// Loads the persisted records from checksums file if exist + fn load(production: bool) -> anyhow::Result { + let file_path = Self::get_file_path(production); + + let items = if file_path.exists() { + let file_content = fs::read_to_string(file_path)?; + let map = Self::parse_hashes(&file_content)?; + ChecksumItems { + map, + involved_targets: BTreeSet::new(), + } + } else { + ChecksumItems::default() + }; + + Ok(Self { + items: Mutex::new(items), + }) + } + + /// Gets the path of the file where the checksums are saved + fn get_file_path(production: bool) -> PathBuf { + let root = get_root(); + if production { + root.join(FILE_NAME_PROD) + } else { + root.join(FILE_NAME_DEV) + } + } + + /// Removes the records file for the given environment + pub fn remove_records_file(production: bool) -> anyhow::Result<()> { + let file_path = Self::get_file_path(production); + if file_path.exists() { + std::fs::remove_file(&file_path).with_context(|| { + format!( + "Error while removing the file {} to reset checksum records", + file_path.display() + ) + })?; + } + + Ok(()) + } + + fn parse_hashes(text: &str) -> anyhow::Result> { + let mut hashes = BTreeMap::new(); + + for (target, hash) in text.lines().filter_map(|line| line.split_once(':')) { + let target: Target = target.parse()?; + let hash: HashDigest = hash.parse().map_err(|e| anyhow!("{e}"))?; + + hashes.insert(target, hash); + } + + Ok(hashes) + } + + /// Marks the job is involved in the record tracker + pub fn register_job(&self, target: Target) -> anyhow::Result<()> { + let mut items = self + .items + .lock() + .map_err(|err| anyhow!("Error while acquiring items jobs mutex: Error {err}"))?; + items.involved_targets.insert(target); + Ok(()) + } + + /// Calculate the current checksum for the given target and compare it to the saved one. + /// + /// # Panics + /// + /// This method panics if the provided target isn't registered + pub fn compare_checksum(&self, target: Target) -> anyhow::Result { + let items = self + .items + .lock() + .map_err(|err| anyhow!("Error while acquiring items jobs mutex: Error {err}"))?; + + assert!(items.involved_targets.contains(&target)); + let saved_hash = match items.map.get(&target) { + Some(hash) => hash, + // If there is no existing checksum to compare with, then the checksums state has + // changed. + None => return Ok(ChecksumCompareResult::Changed), + }; + + let current_hash = Self::calc_hash_for_target(target)?; + + let comparison = if current_hash == *saved_hash { + ChecksumCompareResult::Same + } else { + ChecksumCompareResult::Changed + }; + + Ok(comparison) + } + + fn calc_hash_for_target(target: Target) -> anyhow::Result { + let path = target.cwd(); + calc_combined_checksum(path).with_context(|| { + format!("Error while calculating the current hash for target: {target}",) + }) + } + + /// Remove the target from the checksum records + pub fn remove_hash_if_exist(&self, target: Target) -> anyhow::Result<()> { + let mut items = self + .items + .lock() + .map_err(|err| anyhow!("Error while acquiring items jobs mutex: Error {err}"))?; + + items.involved_targets.insert(target); + + items.map.remove(&target); + + Ok(()) + } + + fn calculate_involved_hashes(&self) -> anyhow::Result<()> { + let mut items = self + .items + .lock() + .map_err(|err| anyhow!("Error while acquiring items jobs mutex: Error {err}"))?; + + for target in items.involved_targets.clone() { + let hash = Self::calc_hash_for_target(target)?; + match items.map.entry(target) { + btree_map::Entry::Occupied(mut o) => *o.get_mut() = hash, + btree_map::Entry::Vacant(e) => _ = e.insert(hash), + }; + } + + Ok(()) + } + + fn persist_hashes(&self, production: bool) -> anyhow::Result<()> { + let file_path = Self::get_file_path(production); + + let mut file = File::create(file_path)?; + let items = self + .items + .lock() + .map_err(|err| anyhow!("Error while acquiring items jobs mutex: Error {err}"))?; + + for (target, hash) in items.map.iter() { + writeln!(file, "{}:{}", target, hash)?; + } + + Ok(()) + } +} diff --git a/cli/src/cli_args.rs b/cli/src/cli_args.rs new file mode 100644 index 0000000000..a640c41bad --- /dev/null +++ b/cli/src/cli_args.rs @@ -0,0 +1,112 @@ +use std::path::PathBuf; + +use clap::{Parser, Subcommand}; + +use crate::target::Target; + +static REPORT_HELP_TEXT: &str = + "Write report from command logs to the given file or to stdout if no file is defined"; +static REPORT_VALUE_NAME: &str = "FILE-PATH"; + +#[derive(Parser)] +#[command(name = "cargo", bin_name = "cargo")] +pub enum CargoCli { + Chipmunk(Cli), +} + +#[derive(clap::Args, Debug)] +#[command(author, version, about, long_about = None)] +pub struct Cli { + #[command(subcommand)] + pub command: Command, +} + +#[derive(Subcommand, Debug, Clone)] +pub enum Command { + /// Provides commands for the needed tools for the development + #[clap(visible_alias = "env")] + #[command(subcommand)] + Environment(EnvironmentCommand), + /// Prints an overview of targets dependencies in print-dot format for `Graphviz` + #[clap(visible_alias = "dot")] + PrintDot { + /// Show all jobs and their relations + #[arg(short, long, default_value_t = false)] + all_jobs: bool, + }, + /// Runs linting & clippy for all or the specified targets + Lint { + /// Target to lint, by default whole application will be linted + #[arg(index = 1)] + target: Option>, + + #[arg(short, long, value_name = REPORT_VALUE_NAME, help = REPORT_HELP_TEXT)] + report: Option>, + }, + /// Build all or the specified targets + Build { + /// Target to build, by default whole application will be built + #[arg(index = 1)] + target: Option>, + + /// Build release version + #[arg(short, long, default_value_t = false)] + production: bool, + + #[arg(short, long, value_name = REPORT_VALUE_NAME, help = REPORT_HELP_TEXT)] + report: Option>, + }, + /// Clean all or the specified targets + Clean { + /// Target to clean, by default whole application will be cleaned + #[arg(index = 1)] + target: Option>, + + #[arg(short, long, value_name = REPORT_VALUE_NAME, help = REPORT_HELP_TEXT)] + report: Option>, + }, + /// Run tests for all or the specified targets + Test { + /// Target to test, by default whole application will be tested + #[arg(index = 1)] + target: Option>, + + /// Test release version + #[arg(short, long, default_value_t = false)] + production: bool, + + #[arg(short, long, value_name = REPORT_VALUE_NAME, help = REPORT_HELP_TEXT)] + report: Option>, + }, + /// Build and Run the application + Run { + /// Run release version + #[arg(short, long, default_value_t = false)] + production: bool, + }, + /// Resets the checksums records what is used to check if there were any code changes for + /// each target. + #[clap(visible_alias = "reset")] + ResetChecksum { + /// Reset release records + #[arg(short, long, default_value_t = false)] + production: bool, + }, + /// Generate shell completion for the commands of this tool in the given shell, + /// printing them to stdout. + #[clap(visible_alias = "compl")] + ShellCompletion { + /// Shell to generate the completion for + #[arg(value_enum)] + shell: clap_complete::Shell, + }, +} + +#[derive(Subcommand, Debug, Clone)] +pub enum EnvironmentCommand { + /// Checks that all needed tools for the development are installed + Check, + /// Prints the information of the needed tools for the development + #[clap(visible_alias = "list")] + Print, +} diff --git a/cli/src/dev_environment.rs b/cli/src/dev_environment.rs new file mode 100644 index 0000000000..244ae38070 --- /dev/null +++ b/cli/src/dev_environment.rs @@ -0,0 +1,60 @@ +use std::{fmt::Write, process::Command}; + +use anyhow::bail; + +use crate::dev_tools::DevTool; + +/// Resolve the paths for all development tool returning an Error if any of them can't be resolved +pub fn resolve_dev_tools() -> anyhow::Result<()> { + let mut errors = None; + for tool in DevTool::all() { + let Err(err) = tool.resolve_path() else { + continue; + }; + + let error_lines = + errors.get_or_insert(String::from("Following dependencies are missing:\n")); + + // Writing to string never fails + writeln!( + error_lines, + "Required dependency '{tool}' is not installed.", + )?; + + writeln!(error_lines, "Resolve Error Info:{err}",)?; + + if let Some(install_hint) = tool.install_hint() { + writeln!( + error_lines, + "Consider installing it using the command '{install_hint}'" + )?; + } + + writeln!( + error_lines, + "------------------------------------------------------------------" + )?; + } + + match errors { + Some(err_text) => bail!("{}", err_text.trim()), + None => Ok(()), + } +} + +/// Prints the information of the needed tools for the development if available, otherwise prints +/// error information to `stderr` +pub fn print_env_info() { + for tool in DevTool::all() { + println!("{tool} Info:"); + match tool.resolve_path() { + Ok(cmd) => { + if let Err(err) = Command::new(cmd).arg(tool.version_args()).status() { + eprintln!("Error while retrieving dependency's information: {err}"); + } + } + Err(err) => eprintln!("Error while resolving tool '{tool}': {err}"), + } + println!("------------------------------------------------------------------"); + } +} diff --git a/cli/src/dev_tools.rs b/cli/src/dev_tools.rs new file mode 100644 index 0000000000..5bc8e3f83b --- /dev/null +++ b/cli/src/dev_tools.rs @@ -0,0 +1,154 @@ +use anyhow::{anyhow, Result}; +use std::{fmt::Display, path::PathBuf, sync::OnceLock}; + +use which::{which_all_global, which_global}; + +#[derive(Debug, Clone, Copy)] +/// Represents the development tools which used to build & test the app +pub enum DevTool { + Node, + Npm, + Yarn, + RustUp, + Cargo, + WasmPack, + NjCli, +} + +impl Display for DevTool { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + DevTool::Node => write!(f, "NodeJS"), + DevTool::Npm => write!(f, "npm"), + DevTool::Yarn => write!(f, "yarn"), + DevTool::RustUp => write!(f, "Rust"), + DevTool::Cargo => write!(f, "cargo"), + DevTool::WasmPack => write!(f, "wasm-pack"), + DevTool::NjCli => write!(f, "nj-cl"), + } + } +} + +impl DevTool { + /// Returns all needed tools to chipmunk development + pub fn all() -> &'static [DevTool] { + if cfg!(debug_assertions) { + // This check to remember to add the newly added enums to this function + match DevTool::Node { + DevTool::Node => (), + DevTool::Npm => (), + DevTool::Yarn => (), + DevTool::RustUp => (), + DevTool::Cargo => (), + DevTool::WasmPack => (), + DevTool::NjCli => (), + }; + } + + [ + DevTool::Node, + DevTool::Npm, + DevTool::Yarn, + DevTool::RustUp, + DevTool::Cargo, + DevTool::WasmPack, + DevTool::NjCli, + ] + .as_slice() + } + + /// Provide the suggested way to install the tool + pub fn install_hint(self) -> Option<&'static str> { + match self { + DevTool::Node | DevTool::Npm | DevTool::RustUp | DevTool::Cargo => None, + DevTool::Yarn => Some("npm install --global yarn"), + DevTool::WasmPack => Some("cargo install wasm-pack"), + DevTool::NjCli => Some("cargo install nj-cli"), + } + } + + /// Provide the command line argument to get the version of the installed tool + pub fn version_args(self) -> &'static str { + match self { + DevTool::Node | DevTool::Npm | DevTool::Yarn => "-v", + DevTool::RustUp | DevTool::Cargo | DevTool::WasmPack | DevTool::NjCli => "-V", + } + } + + /// Resolve the path of the tool if exists. Returning an Error when not possible + pub fn resolve_path(self) -> &'static Result { + match self { + DevTool::Node => resolve_node_path(), + DevTool::Npm => resolve_npm_path(), + DevTool::Yarn => resolve_yarn_path(), + DevTool::RustUp => resolve_rustup_path(), + DevTool::Cargo => resolve_cargo_path(), + DevTool::WasmPack => resolve_wasm_pack_path(), + DevTool::NjCli => resolve_nj_cli_path(), + } + } + + /// Get the path of the resolved tool. Panics if the tool can't be resolved + pub fn path(self) -> &'static PathBuf { + self.resolve_path() + .as_ref() + .expect("Developer Error: Cmd has already been resolved") + } +} + +fn resolve_node_path() -> &'static Result { + static NODE: OnceLock> = OnceLock::new(); + + NODE.get_or_init(|| find_cmd("node")) +} + +fn find_cmd(cmd: &str) -> Result { + which_global(cmd).map_err(|err| anyhow!("Command `{cmd}` couldn't be resolved. Err: {err}")) +} + +fn resolve_npm_path() -> &'static Result { + static NPM: OnceLock> = OnceLock::new(); + + NPM.get_or_init(|| find_cmd("npm")) +} + +fn resolve_yarn_path() -> &'static Result { + static YARN: OnceLock> = OnceLock::new(); + + YARN.get_or_init(|| find_cmd("yarn")) +} + +fn resolve_rustup_path() -> &'static Result { + static RUSTUP: OnceLock> = OnceLock::new(); + + RUSTUP.get_or_init(|| find_cmd("rustup")) +} + +fn resolve_cargo_path() -> &'static Result { + static CARGO: OnceLock> = OnceLock::new(); + + if cfg!(windows) { + // Rust adds its toolchain to PATH in windows which must be filtered out + CARGO.get_or_init(|| { + let mut paths = which_all_global("cargo")?; + + paths + .find(|p| p.components().any(|c| c.as_os_str() == ".cargo")) + .ok_or_else(|| anyhow!("The command 'cargo' can't be found")) + }) + } else { + CARGO.get_or_init(|| find_cmd("cargo")) + } +} + +fn resolve_wasm_pack_path() -> &'static Result { + static WASM_PACK: OnceLock> = OnceLock::new(); + + WASM_PACK.get_or_init(|| find_cmd("wasm-pack")) +} + +fn resolve_nj_cli_path() -> &'static Result { + static NJ_CLI: OnceLock> = OnceLock::new(); + + NJ_CLI.get_or_init(|| find_cmd("nj-cli")) +} diff --git a/cli/src/fstools.rs b/cli/src/fstools.rs new file mode 100644 index 0000000000..0e43d4f33d --- /dev/null +++ b/cli/src/fstools.rs @@ -0,0 +1,93 @@ +extern crate fs_extra; +use anyhow::{Context, Error}; +use fs_extra::dir::{copy_with_progress, CopyOptions, TransitProcess, TransitProcessResult}; +use std::sync::mpsc; +use std::{fs, path::PathBuf}; + +use crate::jobs_runner::JobDefinition; +use crate::tracker::get_tracker; + +/// Spawn a job to copy a file, adding the info the report logs +pub fn cp_file( + job_def: JobDefinition, + src: PathBuf, + dest: PathBuf, + report_logs: &mut Vec, +) -> Result<(), Error> { + let msg = format!("copying file: '{}' to '{}'", src.display(), dest.display()); + report_logs.push(msg); + + let tracker = get_tracker(); + tracker.msg(job_def, "copying files".into()); + + fs::copy(&src, &dest).with_context(|| { + format!( + "Error while copying file '{}' to '{}'", + src.display(), + dest.display() + ) + })?; + tracker.msg( + job_def, + format!("copied: {} to {}", src.display(), dest.display()), + ); + Ok(()) +} + +/// Spawn a job to copy a directory, adding the info the report logs +pub async fn cp_folder( + job_def: JobDefinition, + src: PathBuf, + dest: PathBuf, + report_logs: &mut Vec, +) -> Result<(), Error> { + let options = CopyOptions::new(); + let (tx, rx): (mpsc::Sender, mpsc::Receiver) = mpsc::channel(); + + let path_display = format!("'{}' to '{}'", src.display(), dest.display()); + + let report_msg = format!("copying directory: {path_display}"); + report_logs.push(report_msg.clone()); + + let tracker = get_tracker(); + tracker.msg(job_def, report_msg); + + let _ = tokio::spawn(async move { + copy_with_progress(src, dest, &options, |info| { + if tx.send(info).is_err() { + eprintln!("Fail to send copying progress"); + } + TransitProcessResult::ContinueOrAbort + }) + }) + .await + .with_context(|| format!("Error while copying directory: {path_display}"))?; + while let Ok(info) = rx.recv() { + tracker.msg( + job_def, + format!( + "copied: {} bytes; current: {}", + info.copied_bytes, info.file_name + ), + ); + tracker.progress(job_def, None); + } + + let msg = format!("copied: {path_display}"); + tracker.msg(job_def, msg); + Ok(()) +} + +/// Spawn a job to remove a directory recursively, adding the info the report logs +pub fn rm_folder(job_def: JobDefinition, path: &PathBuf) -> Result<(), Error> { + if !path.exists() { + return Ok(()); + } + let tracker = get_tracker(); + tracker.msg(job_def, format!("removing directory: {}", path.display())); + + fs::remove_dir_all(path)?; + + tracker.msg(job_def, format!("removed: {}", path.display(),)); + Ok(()) +} diff --git a/cli/src/job_type.rs b/cli/src/job_type.rs new file mode 100644 index 0000000000..34bc586d37 --- /dev/null +++ b/cli/src/job_type.rs @@ -0,0 +1,104 @@ +use std::fmt::Display; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +// * NOTE: The order of job types must match the running-order of them because it's used by +// solving their dependencies-graph using BTreeMap +// +// * NOTE: We provide all job types in match arms without using wild-card matching nor +// `matches!()` macro to keep the compiler assistance when adding new job types. +pub enum JobType { + Clean, + Install { production: bool }, + Build { production: bool }, + AfterBuild { production: bool }, + Lint, + Test { production: bool }, + Run { production: bool }, +} + +impl Display for JobType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + JobType::Lint => write!(f, "Lint"), + JobType::Clean => write!(f, "Clean"), + JobType::Build { .. } => write!(f, "Build"), + JobType::Install { .. } => write!(f, "Install"), + JobType::AfterBuild { .. } => write!(f, "After Build"), + JobType::Test { .. } => write!(f, "Test"), + JobType::Run { .. } => write!(f, "Run"), + } + } +} + +impl JobType { + pub fn is_production(&self) -> Option { + match self { + JobType::Lint | JobType::Clean => None, + JobType::Build { production } + | JobType::Install { production } + | JobType::AfterBuild { production } + | JobType::Test { production } + | JobType::Run { production } => Some(*production), + } + } + + /// Returns job types that are involved with this job and should run with it. + pub fn get_involved_jobs(&self) -> Vec { + match self { + // Linting TS needs to building too to check for type errors + JobType::Lint => vec![JobType::Build { production: false }], + JobType::Build { production } => vec![ + JobType::Install { + production: *production, + }, + JobType::AfterBuild { + production: *production, + }, + ], + // Only TS and WASM Tests need to build before running the tests + JobType::Run { production } | JobType::Test { production } => vec![JobType::Build { + production: *production, + }], + JobType::Clean | JobType::Install { .. } | JobType::AfterBuild { .. } => Vec::new(), + } + } + + /// Returns if the job type is part of the build process (install, build, or after build) + pub fn is_part_of_build(&self) -> bool { + match self { + JobType::Install { .. } | JobType::Build { .. } | JobType::AfterBuild { .. } => true, + JobType::Clean | JobType::Lint | JobType::Test { .. } | JobType::Run { .. } => false, + } + } +} + +#[cfg(test)] +impl JobType { + /// Returns all existing targets with production set to false for the types with + /// production infos + pub fn all() -> &'static [JobType] { + if cfg!(debug_assertions) { + // This check to remember to add the newly added enums to this function + match JobType::Lint { + JobType::Lint => (), + JobType::Clean => (), + JobType::Build { .. } => (), + JobType::Install { .. } => (), + JobType::AfterBuild { .. } => (), + JobType::Test { .. } => (), + JobType::Run { .. } => (), + }; + } + + [ + JobType::Lint, + JobType::Clean, + JobType::Build { production: false }, + JobType::Install { production: false }, + JobType::AfterBuild { production: false }, + JobType::Test { production: false }, + JobType::Run { production: false }, + ] + .as_slice() + } +} diff --git a/cli/src/jobs_runner/job_definition.rs b/cli/src/jobs_runner/job_definition.rs new file mode 100644 index 0000000000..170c4b05e7 --- /dev/null +++ b/cli/src/jobs_runner/job_definition.rs @@ -0,0 +1,100 @@ +use crate::{job_type::JobType, spawner::SpawnResult, target::Target, tracker::get_tracker}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +/// Represents a development job definition +/// +/// * `target`: Job Target (Shared, Client...) +/// * `job_type`: Job Type (Build, Test...) +pub struct JobDefinition { + pub target: Target, + pub job_type: JobType, +} + +impl JobDefinition { + pub fn new(target: Target, job_type: JobType) -> Self { + Self { target, job_type } + } + + /// Provide formatted job title with target and job type infos + pub fn job_title(self) -> String { + format!("{} {}", self.target, self.job_type) + } + + /// Run the job definition if it has a job, communicating its status with the UI bars + pub async fn run(self, skip: bool) -> Option> { + let tracker = get_tracker(); + if let Err(err) = tracker.start(self).await { + return Some(Err(err)); + } + + let res = self.run_intern(skip).await; + + match res.as_ref() { + Some(Ok(res)) => { + if res.status.success() { + if res.skipped.is_some_and(|skipped| skipped) { + tracker.success(self, "skipped".into()); + } else { + tracker.success(self, String::default()); + } + } else { + tracker.fail(self, "finished with errors".into()); + } + } + Some(Err(err)) => tracker.fail(self, format!("finished with errors. {err}")), + None => (), + } + + res + } + + #[inline] + /// Runs the job definition if it has a job + async fn run_intern(self, skip: bool) -> Option> { + let res = match self.job_type { + JobType::Lint => self.target.check().await, + JobType::Build { production } => self.target.build(production, skip).await, + // Install run always in development at first then it should get reinstalled with + // production after build command is ran. + // We must deliver the correct jobtype though for the communication with tracker. + JobType::Install { production } => { + return self + .target + .install(false, skip, Some(JobType::Install { production })) + .await; + } + JobType::AfterBuild { production } => { + return self.target.after_build(production, skip).await + } + JobType::Clean => self.target.reset().await, + JobType::Test { production } => return self.target.test(production).await, + JobType::Run { .. } => return None, + }; + + Some(res) + } +} + +#[cfg(test)] +mod tests { + use crate::{job_type::JobType, jobs_runner::JobDefinition}; + + use super::Target; + + #[tokio::test] + async fn target_has_job() { + for target in Target::all() { + for job_type in JobType::all() { + if !target.has_job(*job_type) { + let job_def = JobDefinition::new(*target, job_type.clone()); + assert!( + job_def.run_intern(false).await.is_none(), + "'{}' has no job for '{}' but it returns Some when calling run", + target, + job_type + ) + } + } + } + } +} diff --git a/cli/src/jobs_runner/jobs_resolver.rs b/cli/src/jobs_runner/jobs_resolver.rs new file mode 100644 index 0000000000..8abeb1cc1f --- /dev/null +++ b/cli/src/jobs_runner/jobs_resolver.rs @@ -0,0 +1,424 @@ +use std::collections::{BTreeMap, BTreeSet}; + +use crate::{job_type::JobType, target::Target}; + +use super::JobDefinition; + +/// Resolve tasks dependencies for the given targets and job, +/// returning dependencies map for the tasks +pub fn resolve( + targets: &[Target], + main_job: JobType, +) -> BTreeMap> { + let involved_jobs = flatten_jobs(main_job); + + let has_build_deps = involved_jobs + .iter() + .any(|job| matches!(job, JobType::Build { .. })); + + let involved_targets = if has_build_deps { + flatten_targets_for_build(targets) + } else { + BTreeSet::from_iter(targets.to_owned()) + }; + + let mut jobs_tree: BTreeMap> = BTreeMap::new(); + + for target in involved_targets { + for job in involved_jobs + .iter() + .filter(|&&j| is_job_involved(target, j, &main_job)) + { + // Start with dependencies from other targets (Applies for Build & Install jobs only) + let mut dep_jobs = match job { + // Install jobs are involved here too because copying the files in the after build + // process could delete the current files. + JobType::Build { .. } | JobType::Install { .. } => { + let deps = flatten_targets_for_build(target.deps().as_slice()); + + // Jobs of the dependencies are already included in the jobs tree because we + // are iterating through targets and jobs in the matching order of their + // dependencies relations. + jobs_tree + .keys() + .filter(|job_def| deps.contains(&job_def.target)) + .cloned() + .collect() + } + + // Other job types doesn't have dependencies + JobType::Clean + | JobType::AfterBuild { .. } + | JobType::Lint + | JobType::Test { .. } + | JobType::Run { .. } => Vec::new(), + }; + + // Add dependencies jobs from the same target + // NOTE: This relays on that JobType enums are listed in the current order + dep_jobs.extend( + jobs_tree + .keys() + .filter(|job_d| job_d.target == target) + .cloned(), + ); + + let job_def = JobDefinition::new(target, *job); + + assert!( + jobs_tree.insert(job_def, dep_jobs).is_none(), + "JobDefinition is added to tree more than once. Target: {}, Job: {}", + target, + job + ); + } + } + + jobs_tree +} + +/// Returns all involved job types according to the given job type. +fn flatten_jobs(main_job: JobType) -> BTreeSet { + fn flatten_rec(job: JobType, involved_jobs: &mut BTreeSet) { + if !involved_jobs.insert(job) { + return; + } + for involved_job in job.get_involved_jobs() { + flatten_rec(involved_job, involved_jobs); + } + } + + let mut jobs = BTreeSet::new(); + + flatten_rec(main_job, &mut jobs); + + jobs +} + +/// Returns all involved targets for the given target for build tasks +fn flatten_targets_for_build(targets: &[Target]) -> BTreeSet { + fn flatten_rec(target: Target, involved_targets: &mut BTreeSet) { + if !involved_targets.insert(target) { + return; + } + for involved_target in target.deps() { + flatten_rec(involved_target, involved_targets); + } + } + + let mut resolved_targets = BTreeSet::new(); + + for target in targets { + flatten_rec(*target, &mut resolved_targets); + } + + resolved_targets +} + +/// Check if job involved depending if the target has a job for the current job type + Additional +/// filter based on the main job type. +/// The additional filter is currently used because linting and running tests on TS and WASM targets +/// require all build steps to be done on them and their dependencies. +/// +/// * `target`: Job Target +/// * `current_job`: Current job type to check if it has job for the given target +/// * `main_job`: Main job type, which is used for the additional filter +fn is_job_involved(target: Target, current_job: JobType, main_job: &JobType) -> bool { + // This filter handle the special cases of adding build steps for TS and WASM lints and tests + // and remove those jobs from the not involved targets + let additional_filter = || { + match main_job { + // Linting for TS and WASM targets inquire that those targets are built + JobType::Lint => match target { + // Linting for Rust targets doesn't need any build and must be excluded in the additional filter. + Target::Core | Target::Cli | Target::Updater => { + matches!(current_job, JobType::Lint) + } + // TS and Bindings targets need to be built with all their dependencies to perform the + // needed type checks on TypeScript + Target::Shared + | Target::Binding + | Target::Wrapper + | Target::Wasm + | Target::Client + | Target::App => true, + }, + + // Tests for TS and WASM targets inquire that those targets are built + JobType::Test { .. } => match target { + // Running tests for rust jobs doesn't inquire running build on them. + Target::Core | Target::Cli | Target::Updater => { + matches!(current_job, JobType::Test { .. }) + } + + // Only TS and WASM Bindings have tests that inquire running build on them and their dependencies + // before running the actual tests. + Target::Wrapper | Target::Wasm => true, + + // Shared and Bindings don't have tests but they should be built for Wrapper and Wasm + // tests + Target::Shared | Target::Binding => { + assert!( + !matches!(current_job, JobType::Test { .. }), + "Shared and Bindings targets don't have test jobs currently" + ); + true + } + + // Client and App doesn't have tests and they are not dependencies other TS and WASM + // targets. + Target::Client | Target::App => { + assert!( + !matches!(current_job, JobType::Test { .. }), + "Client and App targets don't have test jobs currently" + ); + false + } + }, + JobType::Clean + | JobType::Install { .. } + | JobType::Build { .. } + | JobType::AfterBuild { .. } + | JobType::Run { .. } => true, + } + }; + + target.has_job(current_job) && additional_filter() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn flatten_clean_job() { + let expected_clean = BTreeSet::from([JobType::Clean]); + assert_eq!(flatten_jobs(JobType::Clean), expected_clean); + } + + #[test] + fn flatten_lint_job() { + let production = false; + let expected_lint = BTreeSet::from([ + JobType::Lint, + JobType::Install { production }, + JobType::Build { production }, + JobType::AfterBuild { production }, + ]); + assert_eq!(flatten_jobs(JobType::Lint), expected_lint); + } + + #[test] + fn flatten_install_job() { + let production = false; + let expected_install = BTreeSet::from([JobType::Install { production }]); + assert_eq!( + flatten_jobs(JobType::Install { production }), + expected_install + ); + } + + #[test] + fn flatten_build_job() { + let production = false; + let expected_build = BTreeSet::from([ + JobType::Build { production }, + JobType::Install { production }, + JobType::AfterBuild { production }, + ]); + assert_eq!(flatten_jobs(JobType::Build { production }), expected_build); + } + + #[test] + fn flatten_test_job() { + let production = false; + let expected_test = BTreeSet::from([ + JobType::Build { production }, + JobType::Install { production }, + JobType::AfterBuild { production }, + JobType::Test { production }, + ]); + assert_eq!(flatten_jobs(JobType::Test { production }), expected_test); + } + + #[test] + fn flatten_core_target() { + let expected = BTreeSet::from([Target::Core]); + assert_eq!(flatten_targets_for_build(&[Target::Core]), expected); + } + + #[test] + fn flatten_wrapper_target() { + let expected = BTreeSet::from([Target::Shared, Target::Binding, Target::Wrapper]); + assert_eq!(flatten_targets_for_build(&[Target::Wrapper]), expected); + } + + #[test] + fn flatten_app_target() { + let expected = BTreeSet::from([ + Target::Core, + Target::Shared, + Target::Binding, + Target::Wrapper, + Target::Client, + Target::Wasm, + Target::Updater, + Target::App, + ]); + assert_eq!(flatten_targets_for_build(&[Target::App]), expected); + } + + #[test] + fn flatten_all_target() { + let expected = BTreeSet::from_iter(Target::all().to_owned()); + assert_eq!(flatten_targets_for_build(&Target::all()), expected); + } + + #[test] + fn flatten_core_client_target() { + let expected = + BTreeSet::from_iter([Target::Core, Target::Shared, Target::Wasm, Target::Client]); + assert_eq!( + flatten_targets_for_build(&[Target::Core, Target::Client]), + expected + ); + } + + #[test] + fn resolve_lint_core_cli() { + let expected = BTreeMap::from([ + (JobDefinition::new(Target::Core, JobType::Lint), Vec::new()), + (JobDefinition::new(Target::Cli, JobType::Lint), Vec::new()), + ]); + + assert_eq!( + expected, + resolve(&[Target::Core, Target::Cli], JobType::Lint) + ); + } + + #[test] + fn resolve_test_core() { + let production = false; + let expected = BTreeMap::from([( + JobDefinition::new(Target::Core, JobType::Test { production }), + vec![], + )]); + + assert_eq!( + expected, + resolve(&[Target::Core], JobType::Test { production }) + ); + } + + #[test] + fn resolve_build_binding() { + let production = false; + let expected = BTreeMap::from([ + ( + JobDefinition::new(Target::Shared, JobType::Install { production }), + vec![], + ), + ( + JobDefinition::new(Target::Shared, JobType::Build { production }), + vec![JobDefinition::new( + Target::Shared, + JobType::Install { production }, + )], + ), + ( + JobDefinition::new(Target::Binding, JobType::Install { production }), + vec![ + JobDefinition::new(Target::Shared, JobType::Install { production }), + JobDefinition::new(Target::Shared, JobType::Build { production }), + ], + ), + ( + JobDefinition::new(Target::Binding, JobType::Build { production }), + vec![ + JobDefinition::new(Target::Shared, JobType::Install { production }), + JobDefinition::new(Target::Shared, JobType::Build { production }), + JobDefinition::new(Target::Binding, JobType::Install { production }), + ], + ), + ( + JobDefinition::new(Target::Binding, JobType::AfterBuild { production }), + vec![ + JobDefinition::new(Target::Binding, JobType::Install { production }), + JobDefinition::new(Target::Binding, JobType::Build { production }), + ], + ), + ]); + + assert_eq!( + expected, + resolve(&[Target::Binding], JobType::Build { production }) + ); + } + + #[test] + /// Resolves build for all targets and checks some cases in the dependencies-tree since the + /// tree is too huge to be tested one by one. + fn resolve_build_all_fuzzy() { + let production = false; + + let tree = resolve(&Target::all(), JobType::Build { production }); + + assert!( + tree.get(&JobDefinition::new( + Target::Cli, + JobType::Build { production } + )) + .is_some_and(|dep| dep.is_empty()), + "Build CLI should have no dependencies" + ); + + assert!( + tree.get(&JobDefinition::new( + Target::App, + JobType::Build { production } + )) + .is_some_and(|dep| dep.contains(&JobDefinition::new( + Target::Shared, + JobType::Build { production } + ))), + "Build App should have dependency on shared build" + ); + + assert!( + tree.get(&JobDefinition::new( + Target::Wrapper, + JobType::Build { production } + )) + .is_some_and(|dep| dep.contains(&JobDefinition::new( + Target::Binding, + JobType::AfterBuild { production } + ))), + "Build Wrapper should have dependency on Binding AfterBuild" + ); + + assert!( + tree.get(&JobDefinition::new( + Target::Wrapper, + JobType::Build { production } + )) + .is_some_and(|dep| dep.contains(&JobDefinition::new( + Target::Binding, + JobType::AfterBuild { production } + ))), + "Build Wrapper should have dependency on Binding AfterBuild" + ); + + assert!( + tree.get(&JobDefinition::new( + Target::App, + JobType::Install { production } + )) + .is_some_and(|dep| dep.contains(&JobDefinition::new( + Target::Wasm, + JobType::Build { production } + ))), + "Install App should have dependency on Wasm Build" + ); + } +} diff --git a/cli/src/jobs_runner/mod.rs b/cli/src/jobs_runner/mod.rs new file mode 100644 index 0000000000..b1527b1f1b --- /dev/null +++ b/cli/src/jobs_runner/mod.rs @@ -0,0 +1,181 @@ +mod job_definition; +pub mod jobs_resolver; + +use std::collections::BTreeMap; + +pub use job_definition::JobDefinition; +use tokio::sync::mpsc::{unbounded_channel, UnboundedSender}; + +use crate::{ + checksum_records::{ChecksumCompareResult, ChecksumRecords}, + job_type::JobType, + spawner::SpawnResult, + target::Target, + tracker::get_tracker, +}; + +use anyhow::Result; + +type SpawnResultsCollection = Vec>; + +#[derive(Debug, Clone)] +/// Represents the current state of the task. +enum JobPhase { + /// Job is waiting to the jobs in the list to finish + Awaiting(Vec), + /// Job is running currently + Running, + /// Job is finished + Done, +} + +/// Runs all the needed tasks for the given targets and the main jobs asynchronously, +/// returning a list of the tasks results +pub async fn run(targets: &[Target], main_job: JobType) -> Result { + let jobs_tree = jobs_resolver::resolve(targets, main_job); + + let tracker = get_tracker(); + tracker + .register_all(jobs_tree.keys().cloned().collect()) + .await?; + + let mut jobs_status: BTreeMap = jobs_tree + .into_iter() + .map(|(job, deps)| (job, JobPhase::Awaiting(deps))) + .collect(); + + let (tx, mut rx) = unbounded_channel::<(JobDefinition, Result)>(); + + let mut checksum_compare_map = BTreeMap::new(); + let mut failed_jobs = Vec::new(); + + // Spawn free job at first + spawn_jobs( + tx.clone(), + &mut jobs_status, + &mut checksum_compare_map, + &failed_jobs, + )?; + + let mut results = Vec::new(); + + while let Some((job_def, result)) = rx.recv().await { + // Update job state + jobs_status + .entry(job_def) + .and_modify(|phase| *phase = JobPhase::Done); + + if result.is_err() { + failed_jobs.push(job_def.target); + } + + results.push(result); + + let mut all_done = true; + + // Remove finished job from waiting lists to the awaiting jobs. + // And check if all jobs are done at same time. + for (_, mut phase) in jobs_status.iter_mut() { + let deps = match &mut phase { + JobPhase::Awaiting(deps) => { + all_done = false; + deps + } + JobPhase::Running => { + all_done = false; + continue; + } + JobPhase::Done => continue, + }; + + if let Some(dep_idx) = deps.iter().position(|j| *j == job_def) { + let _ = deps.swap_remove(dep_idx); + } + } + + if all_done { + return Ok(results); + } + + // Spawn more jobs after updating jobs_status tree. + spawn_jobs( + tx.clone(), + &mut jobs_status, + &mut checksum_compare_map, + &failed_jobs, + )?; + } + + Ok(results) +} + +/// Iterate over jobs states maps and spawn the jobs that aren't waiting for any jobs to be done +fn spawn_jobs( + sender: UnboundedSender<(JobDefinition, Result)>, + jobs_status: &mut BTreeMap, + checksum_compare_map: &mut BTreeMap, + failed_jobs: &[Target], +) -> Result<()> { + for (job_def, phase) in jobs_status.iter_mut() { + let JobPhase::Awaiting(deps) = phase else { + continue; + }; + + if !deps.is_empty() { + continue; + } + + let skip = if job_def.job_type.is_part_of_build() { + // Skip if any prequel job of this target has failed + if failed_jobs.contains(&job_def.target) { + true + } + // Check if target is already registered and checked + else if let Some(&chksm_compare) = checksum_compare_map.get(&job_def.target) { + chksm_compare == ChecksumCompareResult::Same + } else { + // Calculate target checksums and compare it the persisted one + let prod = job_def.job_type.is_production().is_some_and(|prod| prod); + let checksum_rec = ChecksumRecords::get(prod)?; + checksum_rec.register_job(job_def.target)?; + + // Check if all dependent jobs are skipped, then do the checksum calculations + if job_def.target.deps().iter().all(|dep| { + checksum_compare_map + .get(dep) + .is_some_and(|&chksm| chksm == ChecksumCompareResult::Same) + }) { + let chksm_compare = checksum_rec.compare_checksum(job_def.target)?; + checksum_compare_map.insert(job_def.target, chksm_compare); + chksm_compare == ChecksumCompareResult::Same + } else { + false + } + } + } else { + false + }; + + // Spawn the job + let sender = sender.clone(); + let job_def = *job_def; + tokio::spawn(async move { + let result = job_def.run(skip).await; + + let result = match result { + Some(res) => res, + None => panic!("Spawned jobs already resolved and must have return value."), + }; + + if sender.send((job_def, result)).is_err() { + let tracker = get_tracker(); + tracker.print(format!( + "Error: Job results can't be sent to receiver. Job: {job_def:?}" + )); + }; + }); + + *phase = JobPhase::Running; + } + Ok(()) +} diff --git a/cli/src/location.rs b/cli/src/location.rs new file mode 100644 index 0000000000..a18ba75095 --- /dev/null +++ b/cli/src/location.rs @@ -0,0 +1,50 @@ +use anyhow::{bail, Context, Error}; +use git2::Repository; + +use std::{env::current_dir, path::PathBuf, sync::OnceLock}; + +pub static LOCATION: OnceLock = OnceLock::new(); + +#[derive(Clone, Debug)] +pub struct Location { + pub root: PathBuf, +} + +impl Location { + fn new() -> Result { + let current_dir = current_dir()?; + let repo = + Repository::discover(current_dir).context("Fail to find chipmunk root directory")?; + let Some(root) = repo.workdir() else { + bail!("Fail to find chipmunk root directory") + }; + + // Make sure we are in the chipmunk repository + // Note: This check will fail if the structure of the repo changes + if root.join("application").is_dir() && root.join("developing").is_dir() { + Ok(Self { root: root.into() }) + } else { + bail!("Fail to find project's root location") + } + } +} + +/// Get the path of the root repository +pub fn get_root() -> &'static PathBuf { + &LOCATION + .get() + .expect("Developer Error: Location is initialized in main function") + .root +} + +/// Initial location instance to get the path of the root repository +/// return `Error` If the program isn't invoked inside chipmunk repository +pub fn init_location() -> Result<(), Error> { + assert!(LOCATION.get().is_none()); + + let location = Location::new()?; + LOCATION + .set(location) + .expect("Developer Error: init location can't be called more than once"); + Ok(()) +} diff --git a/cli/src/main.rs b/cli/src/main.rs new file mode 100644 index 0000000000..84b0fcb615 --- /dev/null +++ b/cli/src/main.rs @@ -0,0 +1,239 @@ +mod app_runner; +mod checksum_records; +mod cli_args; +mod dev_environment; +mod dev_tools; +mod fstools; +mod job_type; +mod jobs_runner; +mod location; +mod print_dot; +mod shell_completion; +mod spawner; +mod target; +mod tracker; + +use anyhow::{bail, Error}; +use checksum_records::ChecksumRecords; +use clap::Parser; +use cli_args::{CargoCli, Command}; +use dev_environment::{print_env_info, resolve_dev_tools}; +use job_type::JobType; +use location::init_location; +use spawner::SpawnResult; +use std::{ + fs::File, + io::{self, stdout, Stdout}, + path::PathBuf, +}; +use target::Target; +use tracker::get_tracker; + +use crate::cli_args::EnvironmentCommand; + +#[derive(Debug)] +pub enum ReportOptions { + None, + Stdout(Stdout), + File(PathBuf, File), +} + +#[tokio::main] +async fn main() -> Result<(), Error> { + let CargoCli::Chipmunk(cli) = CargoCli::parse(); + + init_location()?; + + // Run the given command + let command = cli.command; + let report_opt: ReportOptions; + let (job_type, results) = match command { + Command::Environment(sub_command) => match sub_command { + EnvironmentCommand::Check => { + resolve_dev_tools()?; + println!("All needed tools for development are installed"); + return Ok(()); + } + EnvironmentCommand::Print => { + print_env_info(); + return Ok(()); + } + }, + Command::PrintDot { all_jobs } => { + if all_jobs { + print_dot::print_dependencies_jobs(); + } else { + print_dot::print_dependencies_targets(); + } + return Ok(()); + } + Command::Lint { target, report } => { + resolve_dev_tools()?; + report_opt = get_report_option(report)?; + let targets = get_targets_or_default(target); + let results = jobs_runner::run(&targets, JobType::Lint).await?; + (JobType::Lint, results) + } + Command::Build { + target, + production, + report, + } => { + resolve_dev_tools()?; + report_opt = get_report_option(report)?; + let targets = get_targets_or_default(target); + let results = jobs_runner::run(&targets, JobType::Build { production }).await?; + (JobType::Build { production }, results) + } + Command::Clean { target, report } => { + resolve_dev_tools()?; + report_opt = get_report_option(report)?; + let targets = get_targets_or_default(target); + let results = jobs_runner::run(&targets, JobType::Clean).await?; + (JobType::Clean, results) + } + Command::Test { + target, + production, + report, + } => { + resolve_dev_tools()?; + report_opt = get_report_option(report)?; + let targets = get_targets_or_default(target); + let results = jobs_runner::run(&targets, JobType::Test { production }).await?; + (JobType::Test { production }, results) + } + Command::Run { production } => { + resolve_dev_tools()?; + report_opt = ReportOptions::None; + let results = jobs_runner::run(&[Target::App], JobType::Build { production }).await?; + (JobType::Run { production }, results) + } + Command::ResetChecksum { production } => { + ChecksumRecords::remove_records_file(production)?; + println!( + "Checksum-Records for {} has been reset", + if production { + "production" + } else { + "development" + } + ); + + return Ok(()); + } + Command::ShellCompletion { shell } => { + shell_completion::generate_completion(shell)?; + + return Ok(()); + } + }; + + // Shutdown and show results & report + let tracker = get_tracker(); + tracker.shutdown().await?; + let mut success: bool = true; + for (idx, res) in results.iter().enumerate() { + match res { + Ok(status) => { + let print_err = match &report_opt { + ReportOptions::None => true, + ReportOptions::Stdout(stdout) => { + write_report(status, stdout)?; + false + } + ReportOptions::File(path, file) => { + write_report(status, file)?; + if idx == results.len() - 1 { + let full_path = path.canonicalize().unwrap_or_else(|_| path.to_owned()); + println!("Report is written to '{}'", full_path.display()); + } + false + } + }; + + if !status.status.success() { + if print_err { + eprintln!("Failed with errors"); + eprintln!("{}:\n{}", status.job, status.report.join("")); + eprintln!( + "---------------------------------------------------------------------" + ); + } + success = false; + } + } + Err(err) => { + eprintln!("Builder error: {:?}", err); + eprintln!("---------------------------------------------------------------------"); + success = false; + } + } + } + if !success { + bail!("Some task were failed") + }; + + ChecksumRecords::update_and_save(job_type)?; + + if matches!(job_type, JobType::Run { .. }) { + println!("Starting chipmunk..."); + let status = app_runner::run_app().await?; + if !status.success() { + bail!("Error: Chipmunk Exited with the Code {status}"); + } + } + Ok(()) +} + +fn get_targets_or_default(targets: Option>) -> Vec { + if let Some(mut list) = targets { + list.dedup(); + list + } else { + Target::all().to_vec() + } +} + +fn get_report_option(report_argument: Option>) -> Result { + match report_argument { + None => Ok(ReportOptions::None), + Some(None) => Ok(ReportOptions::Stdout(stdout())), + Some(Some(path)) => { + let file = File::create(&path)?; + Ok(ReportOptions::File(path, file)) + } + } +} + +fn write_report(spawn_result: &SpawnResult, mut writer: impl io::Write) -> Result<(), io::Error> { + writeln!(writer)?; + writeln!( + writer, + "====================================================" + )?; + writeln!(writer)?; + + let status = match (spawn_result.skipped, spawn_result.status.success()) { + (Some(true), _) => "been skipped", + (_, true) => "succeeded", + (_, false) => "failed", + }; + + writeln!(writer, "Job '{}' has {status}", spawn_result.job)?; + writeln!(writer, "Command: {}", spawn_result.cmd)?; + if spawn_result.skipped.is_some_and(|skipped| skipped) { + return Ok(()); + } + + writeln!(writer, "Logs:")?; + for line in spawn_result.report.iter() { + writer.write_all(line.as_bytes())?; + + if !line.ends_with('\n') { + writeln!(writer)?; + } + } + + Ok(()) +} diff --git a/cli/src/print_dot.rs b/cli/src/print_dot.rs new file mode 100644 index 0000000000..ef8c4ab575 --- /dev/null +++ b/cli/src/print_dot.rs @@ -0,0 +1,46 @@ +use crate::{ + job_type::JobType, + jobs_runner::{jobs_resolver, JobDefinition}, + target::Target, +}; + +/// Prints an overview of targets dependencies in print-dot format for `Graphviz` +pub fn print_dependencies_targets() { + println!("digraph dependencies {{"); + + for target in Target::all() { + for dep in target.deps() { + println!(r#" "{target}" -> "{dep}""#); + } + } + + println!("}}"); +} + +/// Prints an overview of jobs dependencies in print-dot format for `Graphviz` +pub fn print_dependencies_jobs() { + let deps_tree = jobs_resolver::resolve(Target::all(), JobType::Build { production: false }); + println!("digraph dependencies {{"); + + for (job, deps) in deps_tree { + let job_txt = job_to_dot_string(&job); + for dep in deps { + println!(r#" "{job_txt}" -> "{}""#, job_to_dot_string(&dep)); + } + } + + println!("}}"); +} + +fn job_to_dot_string(job_def: &JobDefinition) -> String { + let job_type = match job_def.job_type { + JobType::Install { .. } => "Install", + JobType::Build { .. } => "Build", + JobType::AfterBuild { .. } => "After Build (Copy & Reinstall)", + JobType::Clean | JobType::Lint | JobType::Test { .. } | JobType::Run { .. } => { + unreachable!("Only build-related jobs are included in dot print") + } + }; + + format!("{}: {job_type}", job_def.target) +} diff --git a/cli/src/shell_completion.rs b/cli/src/shell_completion.rs new file mode 100644 index 0000000000..d807a06367 --- /dev/null +++ b/cli/src/shell_completion.rs @@ -0,0 +1,19 @@ +use std::io; + +use anyhow::Context; +use clap::CommandFactory; +use clap_complete::{generate, Shell}; + +use crate::cli_args::CargoCli; + +/// Generates shell complition for the given shell printing them to stdout +pub fn generate_completion(shell: Shell) -> anyhow::Result<()> { + let mut cmd = CargoCli::command(); + let bin_name = cmd + .get_bin_name() + .context("Error while getting bin name")? + .to_owned(); + + generate(shell, &mut cmd, bin_name, &mut io::stdout()); + Ok(()) +} diff --git a/cli/src/spawner.rs b/cli/src/spawner.rs new file mode 100644 index 0000000000..7a9c3ccf97 --- /dev/null +++ b/cli/src/spawner.rs @@ -0,0 +1,222 @@ +use crate::{ + jobs_runner::JobDefinition, location::get_root, target::ProcessCommand, tracker::get_tracker, +}; +use anyhow::{bail, Context}; +use core::panic; +use futures_lite::{future, FutureExt}; +use std::{ + path::PathBuf, + process::{ExitStatus, Stdio}, +}; +use tokio::{ + io::{AsyncBufReadExt, BufReader}, + process::Command, +}; + +#[derive(Clone, Debug)] +pub struct SpawnResult { + pub report: Vec, + pub status: ExitStatus, + pub job: String, + pub cmd: String, + pub skipped: Option, +} + +impl SpawnResult { + /// Create spawn for multiple file system commands + pub fn create_for_fs(job: String, report: Vec) -> Self { + SpawnResult { + report, + job, + status: ExitStatus::default(), + cmd: "Multiple file system commands".into(), + skipped: None, + } + } + + /// Create spawn for jobs that has been skipped + pub fn create_for_skipped(job: String, cmd: String) -> Self { + SpawnResult { + report: Vec::new(), + job, + status: ExitStatus::default(), + cmd, + skipped: Some(true), + } + } + + /// Append other result to the current one producing a combined results form them + pub fn append(&mut self, other: SpawnResult) { + if !other.report.is_empty() { + self.report.extend( + [ + String::default(), + String::from("-------------------------------------------------------------------------------"), + String::default() + ]); + self.report.extend(other.report); + } + + self.job = format!("{} & {}", self.job, other.job); + self.status = match (self.status.success(), other.status.success()) { + (_, true) => self.status, + (false, false) => self.status, + (_, false) => other.status, + }; + + self.cmd = format!("{} \n {}", self.cmd, other.cmd); + self.skipped = match (self.skipped, other.skipped) { + (Some(false), _) | (_, Some(false)) => Some(false), + (Some(true), _) | (_, Some(true)) => Some(true), + _ => None, + }; + } +} + +#[derive(Debug, Clone, Default)] +pub(crate) struct SpawnOptions { + pub suppress_msg: bool, + pub has_skip_info: bool, +} + +/// Spawns and runs a job asynchronously, updating the bar when job infos are available +pub async fn spawn( + job_def: JobDefinition, + command: ProcessCommand, + cwd: Option, + environment_vars: impl IntoIterator, + opts: Option, +) -> Result { + let opts = opts.unwrap_or_default(); + let cwd = cwd.unwrap_or_else(|| get_root().clone()); + let mut combined_env_vars = vec![(String::from("TERM"), String::from("xterm-256color"))]; + combined_env_vars.extend(environment_vars); + + let tracker = get_tracker(); + + let command_result = Command::new(&command.cmd) + .current_dir(&cwd) + .args(&command.args) + .envs(combined_env_vars) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .kill_on_drop(true) + .spawn() + .with_context(|| { + format!( + "Error While running the command '{}'\nwith arguments: {:?}", + command.cmd, command.args + ) + }); + + let mut child = command_result?; + + let mut report_lines: Vec = vec![]; + let drain_stdout_stderr = { + let stdout = child.stdout.take().expect( + "Developer Error: Stdout is implicity set in command definition from which the child is spawn", + ); + let stderr = child.stderr.take().expect( + "Developer Error: Stderr is implicity set in command definition from which the child is spawn", + ); + let storage_report = &mut report_lines; + async move { + let mut stdout_buf = BufReader::new(stdout); + let mut stderr_buf = BufReader::new(stderr); + loop { + let mut stdout_line = String::new(); + let mut stderr_line = String::new(); + tokio::select! { + stdout_read_result = stdout_buf.read_line(&mut stdout_line) => { + let stdout_read_bytes = stdout_read_result?; + if stdout_read_bytes == 0 { + break; + } else { + if !opts.suppress_msg { + tracker.msg(job_def, stdout_line.clone()); + } + tracker.progress(job_def, None); + storage_report.push(stdout_line); + } + } + stderr_read_result = stderr_buf.read_line(&mut stderr_line) => { + let stderr_read_bytes = stderr_read_result?; + if stderr_read_bytes == 0 { + break; + } else { + tracker.progress(job_def, None); + if !stderr_line.trim().is_empty() { + storage_report.push(stderr_line); + } + } + + } + } + } + + future::pending::<()>().await; + Ok::, anyhow::Error>(None) + } + }; + + let status = drain_stdout_stderr + .or(async move { Ok(Some(child.wait().await?)) }) + .await?; + + if let Some(status) = status { + let skipped = if opts.has_skip_info { + Some(false) + } else { + None + }; + + Ok(SpawnResult { + report: report_lines, + status, + job: job_def.job_title(), + cmd: command.to_string(), + skipped, + }) + } else { + bail!("Fail to get exist status of spawned command") + } +} + +/// Suspend the progress bars and run the giving blocking command using `Stdio::inherit()` +/// This is used with commands that doesn't work with `Stdio::piped()` +pub async fn spawn_blocking( + job_def: JobDefinition, + command: ProcessCommand, + cwd: Option, + environment_vars: impl IntoIterator, +) -> Result { + let cwd = cwd.unwrap_or_else(|| get_root().clone()); + + let mut combined_env_vars = vec![(String::from("TERM"), String::from("xterm-256color"))]; + combined_env_vars.extend(environment_vars); + + let mut child = std::process::Command::new(&command.cmd); + child.current_dir(&cwd); + child.args(&command.args); + child.envs(combined_env_vars); + + let tracker = get_tracker(); + + let status = tracker.suspend_and_run(child).await?; + + Ok(SpawnResult { + report: Vec::new(), + status, + job: job_def.job_title(), + cmd: command.to_string(), + skipped: None, + }) +} + +/// This spawns a new task and return immediately showing that the job has been skipped +pub async fn spawn_skip(job_def: JobDefinition, command: String) -> anyhow::Result { + Ok(SpawnResult::create_for_skipped( + job_def.job_title(), + command, + )) +} diff --git a/cli/src/target/app.rs b/cli/src/target/app.rs new file mode 100644 index 0000000000..94e0871496 --- /dev/null +++ b/cli/src/target/app.rs @@ -0,0 +1,54 @@ +use std::fs; + +use anyhow::{bail, Context}; + +use crate::{fstools, jobs_runner::JobDefinition, spawner::SpawnResult}; + +use super::{client::get_dist_path, Target}; + +pub async fn copy_client_to_app(job_def: JobDefinition) -> Result { + let mut report_logs = Vec::new(); + let prod = job_def.job_type.is_production().unwrap_or(false); + let src = get_dist_path(prod); + let dest = Target::App.cwd().join("dist"); + if !src.exists() { + bail!("Not found: {}", src.display()); + } + if !dest.exists() { + let msg = format!("creating directory: {}", dest.display()); + report_logs.push(msg); + + fs::create_dir(&dest) + .with_context(|| format!("Error while creating directory: {}", dest.display()))?; + } + let prev = dest.join("client"); + if prev.exists() { + let msg = format!("removing directory: {}", prev.display()); + report_logs.push(msg); + + fstools::rm_folder(job_def, &prev)?; + } + + fstools::cp_folder(job_def, src.clone(), dest.clone(), &mut report_logs).await?; + + let rename_from = dest.join( + src.file_name() + .context("Fail to parse client artifacts path")?, + ); + let rename_to = dest.join("client"); + + let msg = format!( + "renaming '{}' to '{}'", + rename_from.display(), + rename_to.display() + ); + report_logs.push(msg); + + std::fs::rename(&rename_from, &rename_to) + .with_context(|| format!("Error while renaming {}", rename_from.display()))?; + + Ok(SpawnResult::create_for_fs( + "Copy App Build Artifacts".into(), + report_logs, + )) +} diff --git a/cli/src/target/binding.rs b/cli/src/target/binding.rs new file mode 100644 index 0000000000..d92a11d2a4 --- /dev/null +++ b/cli/src/target/binding.rs @@ -0,0 +1,82 @@ +use std::fs; + +use anyhow::{bail, Context}; + +use crate::{fstools, jobs_runner::JobDefinition, spawner::SpawnResult}; + +use super::{ProcessCommand, Target}; + +pub fn get_build_cmd(prod: bool) -> anyhow::Result { + let mut path = Target::Wrapper.cwd(); + path.push("node_modules"); + path.push(".bin"); + + if cfg!(windows) { + // The script files can get the extension '*.cmd' on Windows + let electron_build_env_path = which::which_in("electron-build-env", Some(&path), &path) + .context("Error while resolving electron bin path on Windows")?; + path = electron_build_env_path; + } else { + path.push("electron-build-env"); + } + + let mut args = vec![String::from("nj-cli"), String::from("build")]; + + if prod { + args.push("--release".into()); + } + Ok(ProcessCommand::new( + path.to_string_lossy().to_string(), + args, + )) +} + +pub fn copy_index_node(job_def: JobDefinition) -> Result { + let mut report_logs = Vec::new(); + + // *** Copy `index.node` from rs to ts bindings dist *** + report_logs.push(String::from("Copying `index.node` to ts-bindings dist...")); + + let src_file = Target::Binding.cwd().join("dist").join("index.node"); + if !src_file.exists() { + bail!( + "Error while copying `rs-bindings`. Err: Not found: {}", + src_file.to_string_lossy() + ); + } + + let ts_dist_native_dir = Target::Wrapper.cwd().join("dist").join("native"); + if !ts_dist_native_dir.exists() { + let msg = format!("creating directory: {}", ts_dist_native_dir.display()); + report_logs.push(msg); + + fs::create_dir_all(&ts_dist_native_dir).with_context(|| { + format!( + "Error while creating directory: {}", + ts_dist_native_dir.display() + ) + })?; + } + + fstools::cp_file( + job_def, + src_file.clone(), + ts_dist_native_dir.join("index.node"), + &mut report_logs, + )?; + + // *** Copy `index.node` from rs to ts bindings src native (dir-tests) *** + report_logs.push(String::from( + "Copying `index.node` to ts-bindings src native...", + )); + + let dir_tests = Target::Wrapper.cwd().join("src").join("native"); + let mod_file = dir_tests.join("index.node"); + + fstools::cp_file(job_def, src_file, mod_file, &mut report_logs)?; + + Ok(SpawnResult::create_for_fs( + "Copying `index.node` from rs to ts bindings".into(), + report_logs, + )) +} diff --git a/cli/src/target/cli.rs b/cli/src/target/cli.rs new file mode 100644 index 0000000000..afa730fe85 --- /dev/null +++ b/cli/src/target/cli.rs @@ -0,0 +1,21 @@ +use crate::{ + dev_tools::DevTool, + target::{ProcessCommand, Target}, +}; + +use super::TestSpawnCommand; + +pub fn get_test_cmds(production: bool) -> Vec { + let cargo_path = DevTool::Cargo.path(); + + let mut args = vec![String::from("+stable"), String::from("test")]; + if production { + args.push("-r".into()); + } + args.push("--color".into()); + args.push("always".into()); + + let cmd = ProcessCommand::new(cargo_path.to_string_lossy().to_string(), args); + + vec![TestSpawnCommand::new(cmd, Target::Cli.cwd(), None)] +} diff --git a/cli/src/target/client.rs b/cli/src/target/client.rs new file mode 100644 index 0000000000..f80047ff2c --- /dev/null +++ b/cli/src/target/client.rs @@ -0,0 +1,10 @@ +use std::path::PathBuf; + +use super::Target; + +pub fn get_dist_path(prod: bool) -> PathBuf { + Target::Client + .cwd() + .join("dist") + .join(if prod { "release" } else { "debug" }) +} diff --git a/cli/src/target/core.rs b/cli/src/target/core.rs new file mode 100644 index 0000000000..19794febfc --- /dev/null +++ b/cli/src/target/core.rs @@ -0,0 +1,21 @@ +use crate::{ + dev_tools::DevTool, + target::{ProcessCommand, Target}, +}; + +use super::TestSpawnCommand; + +pub fn get_test_cmds(production: bool) -> Vec { + let cargo_path = DevTool::Cargo.path(); + + let mut args = vec![String::from("+stable"), String::from("test")]; + if production { + args.push("-r".into()); + } + args.push("--color".into()); + args.push("always".into()); + + let cmd = ProcessCommand::new(cargo_path.to_string_lossy().to_string(), args); + + vec![TestSpawnCommand::new(cmd, Target::Core.cwd(), None)] +} diff --git a/cli/src/target/mod.rs b/cli/src/target/mod.rs new file mode 100644 index 0000000000..bb2c7355d9 --- /dev/null +++ b/cli/src/target/mod.rs @@ -0,0 +1,590 @@ +use anyhow::bail; +use clap::ValueEnum; +use futures::future::join_all; +use std::{fmt::Display, iter, path::PathBuf, str::FromStr}; +use tokio::fs; + +use crate::{ + checksum_records::ChecksumRecords, + dev_tools::DevTool, + fstools, + job_type::JobType, + jobs_runner::JobDefinition, + location::get_root, + spawner::{spawn, spawn_skip, SpawnOptions, SpawnResult}, +}; + +use target_kind::TargetKind; + +mod app; +mod binding; +mod cli; +mod client; +mod core; +mod target_kind; +mod updater; +mod wasm; +mod wrapper; + +#[derive(Debug, ValueEnum, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)] +// * NOTE: The order of targets must match the running-order between them because it's used for +// solving their dependencies-graph using BTreeMap +// +// * NOTE: We provide all targets with match arms without using wild-card matching nor +// `matches!()` macro to keep the compiler assistance when adding new targets. +pub enum Target { + /// Represents the path `application/apps/indexer` + Core, + /// Represents the path `application/platform` + Shared, + /// Represents the path `application/apps/rustcore/rs-bindings` + Binding, + /// Represents the path `application/apps/rustcore/ts-bindings` + Wrapper, + /// Represents the path `application/apps/rustcore/wasm-bindings` + Wasm, + /// Represents the path `application/client` + Client, + /// Represents the path `application/apps/precompiled/updater + Updater, + /// Represents the path `application/holder` + App, + /// Represents the path `cli` + Cli, +} + +#[derive(Debug, Clone)] +/// Represents a command to run with `process::Command` and its arguments +pub struct ProcessCommand { + pub cmd: String, + pub args: Vec, +} + +impl ProcessCommand { + pub fn new(cmd: String, args: Vec) -> Self { + Self { cmd, args } + } +} + +impl Display for ProcessCommand { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{} {}", self.cmd, self.args.join(" ")) + } +} + +#[derive(Debug, Clone)] +/// Represents command and spawns infos to run with `Target::Test` +pub struct TestSpawnCommand { + command: ProcessCommand, + cwd: PathBuf, + spawn_opts: Option, +} + +impl TestSpawnCommand { + fn new(command: ProcessCommand, cwd: PathBuf, spawn_opts: Option) -> Self { + Self { + command, + cwd, + spawn_opts, + } + } +} + +impl std::fmt::Display for Target { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!( + f, + "{}", + match self { + Target::Core => "Core", + Target::Wrapper => "Wrapper", + Target::Binding => "Binding", + Target::Cli => "Cli", + Target::Client => "Client", + Target::Shared => "Shared", + Target::App => "App", + Target::Wasm => "Wasm", + Target::Updater => "Updater", + } + ) + } +} + +impl FromStr for Target { + type Err = anyhow::Error; + + fn from_str(input: &str) -> Result { + type T = Target; + + if cfg!(debug_assertions) { + // This check to remember to add the newly added enums to this function + match T::App { + T::Core => (), + T::Binding => (), + T::Wrapper => (), + T::Client => (), + T::Shared => (), + T::App => (), + T::Cli => (), + T::Wasm => (), + T::Updater => (), + }; + } + + match input { + "Core" => Ok(T::Core), + "Wrapper" => Ok(T::Wrapper), + "Binding" => Ok(T::Binding), + "Cli" => Ok(T::Cli), + "Client" => Ok(T::Client), + "Shared" => Ok(T::Shared), + "App" => Ok(T::App), + "Wasm" => Ok(T::Wasm), + "Updater" => Ok(T::Updater), + invalid => bail!("Invalid input for Parsing Target: {invalid}"), + } + } +} + +impl Target { + /// Return all the available targets + pub fn all() -> &'static [Target] { + if cfg!(debug_assertions) { + // This check to remember to add the newly added enums to this function + match Target::App { + Target::Core => (), + Target::Binding => (), + Target::Wrapper => (), + Target::Client => (), + Target::Shared => (), + Target::App => (), + Target::Cli => (), + Target::Wasm => (), + Target::Updater => (), + }; + } + + [ + Target::Binding, + Target::Cli, + Target::App, + Target::Core, + Target::Wrapper, + Target::Shared, + Target::Client, + Target::Wasm, + Target::Updater, + ] + .as_slice() + } + + /// Provides the absolute path to the target + pub fn cwd(self) -> PathBuf { + let root = get_root(); + let relative_path = self.relative_cwd(); + + root.join(relative_path) + } + + /// Provides the relative path to the target starting from chipmunk root directory + pub fn relative_cwd(self) -> PathBuf { + let sub_parts = match self { + Target::Core => ["application", "apps", "indexer"].iter(), + Target::Binding => ["application", "apps", "rustcore", "rs-bindings"].iter(), + Target::Wrapper => ["application", "apps", "rustcore", "ts-bindings"].iter(), + Target::Client => ["application", "client"].iter(), + Target::Shared => ["application", "platform"].iter(), + Target::App => ["application", "holder"].iter(), + Target::Cli => ["cli"].iter(), + Target::Wasm => ["application", "apps", "rustcore", "wasm-bindings"].iter(), + Target::Updater => ["application", "apps", "precompiled", "updater"].iter(), + }; + + sub_parts.collect() + } + + /// Provide the kind of the target between Rust or Type-Script + pub fn kind(self) -> TargetKind { + match self { + Target::Binding | Target::Core | Target::Cli | Target::Wasm | Target::Updater => { + TargetKind::Rs + } + Target::Client | Target::Wrapper | Target::Shared | Target::App => TargetKind::Ts, + } + } + + /// Provides the target which this target depend on + pub fn deps(self) -> Vec { + match self { + Target::Core | Target::Cli | Target::Shared | Target::Wasm | Target::Updater => { + Vec::new() + } + Target::Binding => vec![Target::Shared], + Target::Wrapper => vec![Target::Binding, Target::Shared], + Target::Client => vec![Target::Shared, Target::Wasm], + Target::App => vec![ + Target::Wrapper, + Target::Client, + Target::Core, + Target::Updater, + ], + } + } + + /// Returns if the current target has a job to the given job type + pub fn has_job(self, job_type: JobType) -> bool { + match job_type { + JobType::Lint | JobType::Clean | JobType::Build { .. } => true, + + JobType::Install { .. } => match self { + Target::Binding | Target::Client | Target::Shared | Target::App | Target::Wasm => { + true + } + Target::Core | Target::Wrapper | Target::Updater | Target::Cli => false, + }, + + JobType::AfterBuild { .. } => match self { + Target::Binding | Target::App => true, + Target::Core + | Target::Shared + | Target::Wrapper + | Target::Wasm + | Target::Client + | Target::Updater + | Target::Cli => false, + }, + JobType::Test { .. } => match self { + Target::Wrapper | Target::Core | Target::Cli | Target::Wasm => true, + Target::Shared + | Target::Binding + | Target::Client + | Target::Updater + | Target::App => false, + }, + JobType::Run { .. } => false, + } + } + + /// Provide the command that should be used in to build the target + pub fn build_cmd(self, prod: bool) -> anyhow::Result { + let build_cmd = match self { + Target::Binding => binding::get_build_cmd(prod)?, + Target::Wasm => wasm::get_build_cmd(prod), + Target::Updater => updater::get_build_cmd(), + rest_targets => rest_targets.kind().build_cmd(prod), + }; + + Ok(build_cmd) + } + + /// Installs the needed module to perform the development task + /// + /// * `prod`: run install in production + /// * `skip`: skip the task + /// * `overridden_job_type`: override job type to communicate with tracker when install is ran + /// from within another task or when the production flag must be ignored + pub async fn install( + &self, + prod: bool, + skip: bool, + overridden_job_type: Option, + ) -> Option> { + if skip { + return Some( + spawn_skip( + JobDefinition::new(*self, JobType::Install { production: prod }), + format!("Install command for {self}"), + ) + .await, + ); + } + + let job_type = overridden_job_type.unwrap_or(JobType::Install { production: prod }); + + match self { + // We must install ts binding tools before running rs bindings, therefore we call + // wrapper (ts-bindings) install in the rs bindings install. + // Since rs bindings is a dependency for ts bindings, we don't need to call to install + // on ts bindings again. + Target::Binding => { + install_general(Target::Wrapper, prod, job_type, Some(Target::Binding)).await + } + Target::Wrapper => None, + // For app we don't need --production + Target::App => install_general(Target::App, false, job_type, None).await, + rest_targets => install_general(*rest_targets, prod, job_type, None).await, + } + } + + /// Run tests for the giving the target + pub async fn test(&self, production: bool) -> Option> { + match self { + Target::Wrapper => Some(wrapper::run_test(production).await), + rest_targets => rest_targets.run_test_general(production).await, + } + } + + /// Provides the test commands for the given target if available + fn test_cmds(self, production: bool) -> Option> { + match self { + Target::Core => Some(core::get_test_cmds(production)), + Target::Cli => Some(cli::get_test_cmds(production)), + Target::Wasm => Some(wasm::get_test_cmds()), + Target::Shared + | Target::Binding + | Target::Wrapper + | Target::Client + | Target::Updater + | Target::App => None, + } + } + + /// run test using the general routine with `test_cmds()` method + async fn run_test_general( + &self, + production: bool, + ) -> Option> { + let test_cmds = self.test_cmds(production)?; + + debug_assert!(!test_cmds.is_empty()); + + let job_def = JobDefinition::new(*self, JobType::Test { production }); + let spawn_results = join_all(test_cmds.into_iter().map(|cmd| { + spawn( + job_def, + cmd.command, + Some(cmd.cwd), + iter::empty(), + cmd.spawn_opts, + ) + })) + .await; + + let mut spawn_results = spawn_results.into_iter(); + + let mut result = match spawn_results.next()? { + Ok(result) => result, + Err(err) => return Some(Err(err)), + }; + + for next_result in spawn_results { + match next_result { + Ok(next_res) => result.append(next_res), + Err(err) => return Some(Err(err)), + } + } + + Some(Ok(result)) + } + + /// Perform Linting Checks on the giving target + pub async fn check(&self) -> Result { + match self.kind() { + TargetKind::Ts => self.ts_lint().await, + TargetKind::Rs => self.clippy().await, + } + } + + /// Perform Linting the Building the giving target since linting Type-Script doesn't check for + /// compiling errors + async fn ts_lint(&self) -> Result { + let path = self.cwd(); + let job_def = JobDefinition::new(*self, JobType::Lint); + + let command = yarn_command(vec![String::from("run"), String::from("lint")]); + spawn(job_def, command, Some(path), iter::empty(), None).await + } + + /// Runs Clippy for the given rust target + async fn clippy(&self) -> Result { + let path = get_root().join(self.cwd()); + + let job_def = JobDefinition::new(*self, JobType::Lint); + + let cargo_path = DevTool::Cargo.path(); + let command = ProcessCommand::new( + cargo_path.to_string_lossy().to_string(), + vec![ + String::from("clippy"), + String::from("--color"), + String::from("always"), + String::from("--all"), + String::from("--all-features"), + String::from("--"), + String::from("-D"), + String::from("warnings"), + ], + ); + + spawn(job_def, command, Some(path), iter::empty(), None).await + } + + /// Clean the given target, removing it from the checksum tracker as well. + pub async fn reset(&self) -> anyhow::Result { + let job_def = JobDefinition::new(*self, JobType::Clean); + + // Clean doesn't differentiate between development and production, and both of them will be + // cleaned from the files when the data are persisted. + let checksum = ChecksumRecords::get(false)?; + checksum.remove_hash_if_exist(*self)?; + + let mut logs = Vec::new(); + let mut paths_to_remove = vec![self.cwd().join("dist")]; + let path = match self.kind() { + TargetKind::Ts => self.cwd().join("node_modules"), + TargetKind::Rs => self.cwd().join("target"), + }; + paths_to_remove.push(path); + + match self { + Target::Wasm => { + paths_to_remove.push(self.cwd().join("pkg")); + paths_to_remove.push(self.cwd().join("test_output")); + paths_to_remove.push(self.cwd().join("node_modules")); + } + Target::Wrapper => { + paths_to_remove.push(self.cwd().join("spec").join("build")); + let index_node_path = self.cwd().join("src").join("native").join("index.node"); + if index_node_path.exists() { + logs.push(format!("removing file: {}", index_node_path.display())); + fs::remove_file(index_node_path).await?; + } + } + Target::Core + | Target::Shared + | Target::Binding + | Target::Client + | Target::Updater + | Target::App + | Target::Cli => {} + } + + for path in paths_to_remove.into_iter().filter(|p| p.exists()) { + let remove_log = format!("removing directory {}", path.display()); + logs.push(remove_log); + + fstools::rm_folder(job_def, &path)?; + } + + let job = format!("Clean {}", self); + + Ok(SpawnResult::create_for_fs(job, logs)) + } + + /// Runs build considering the currently running builds and already finished ones as well. + pub async fn build(&self, prod: bool, skip: bool) -> Result { + let path = get_root().join(self.cwd()); + let cmd = self.build_cmd(prod)?; + + let spawn_opt = SpawnOptions { + has_skip_info: true, + ..Default::default() + }; + + let job_def = JobDefinition::new(*self, JobType::Build { production: prod }); + + if skip { + spawn_skip(job_def, cmd.to_string()).await + } else { + spawn(job_def, cmd, Some(path), iter::empty(), Some(spawn_opt)).await + } + } + + /// Performs build process without checking the current builds states + + /// Perform any needed copy operation after the build is done + pub async fn after_build( + &self, + prod: bool, + skip: bool, + ) -> Option> { + let job_type = JobType::AfterBuild { production: prod }; + let job_def = JobDefinition::new(*self, job_type); + + if skip { + return Some(spawn_skip(job_def, "Multiple file system commands".into()).await); + } + + // Taken from a discussion on GitHub: + // To build an npm package you would need (in most cases) to be in dev-mode - install dev-dependencies + dependencies, + // therefore we always install in development mode at first. + // But to prepare a package for production, you have to remove dev-dependencies. + // That's not an issue, if npm-package is published in npmjs; but we are coping packages manually in a right destination + // and before copy it, we have to reinstall it to get rid of dev-dependencies. + let reinstall_res = if prod && matches!(self.kind(), TargetKind::Ts) { + let node_path = self.cwd().join("node_modules"); + let remove_log = format!("removing directory {}", node_path.display()); + + if let Err(err) = fstools::rm_folder(job_def, &node_path) { + return Some(Err(err)); + } + + match self.install(true, false, Some(job_type)).await { + Some(Ok(mut spawn_res)) => { + spawn_res.report.insert(0, remove_log); + Some(spawn_res) + } + Some(Err(err)) => return Some(Err(err)), + None => None, + } + } else { + None + }; + + let after_res = match self { + Target::Binding => binding::copy_index_node(job_def), + Target::App => app::copy_client_to_app(job_def).await, + Target::Core + | Target::Shared + | Target::Wrapper + | Target::Wasm + | Target::Client + | Target::Updater + | Target::Cli => return None, + }; + + match (after_res, reinstall_res) { + (res, None) => Some(res), + (Err(err), _) => Some(Err(err)), + (Ok(after_res), Some(mut install_res)) => { + install_res.append(after_res); + Some(Ok(install_res)) + } + } + } +} + +/// run install using the general routine for the given target +/// * `target`: job target to perform its after build jobs +/// * `prod`: build for production +/// * `job_type`: job type to communicate with `tracker` +/// * `overridden_target`: override target to communicate with `tracker` when install is called +/// from within another task. +async fn install_general( + target: Target, + prod: bool, + job_type: JobType, + overridden_target: Option, +) -> Option> { + let cmd = match target { + // Wasm needs `yarn install` command despite having the kind `TargetKind::Rs` + Target::Wasm => TargetKind::Ts.install_cmd(prod), + t => t.kind().install_cmd(prod), + }; + + let job_def = JobDefinition::new(overridden_target.unwrap_or(target), job_type); + + if let Some(cmd) = cmd { + let res = spawn(job_def, cmd, Some(target.cwd()), iter::empty(), None).await; + Some(res) + } else { + None + } +} + +/// Proivdes a process command with yarn as [`ProcessCommand::cmd`] and the given arguments +/// as [`ProcessCommand::args`] +fn yarn_command(args: Vec) -> ProcessCommand { + let yarn_path = DevTool::Yarn.path(); + ProcessCommand::new(yarn_path.to_string_lossy().to_string(), args) +} diff --git a/cli/src/target/target_kind.rs b/cli/src/target/target_kind.rs new file mode 100644 index 0000000000..fbbfcf9ec4 --- /dev/null +++ b/cli/src/target/target_kind.rs @@ -0,0 +1,58 @@ +use crate::dev_tools::DevTool; + +use super::{yarn_command, ProcessCommand}; + +#[derive(Debug, Clone)] +pub enum TargetKind { + /// TypeScript + Ts, + /// Rust + Rs, +} + +impl TargetKind { + /// Provide the general build command for each target type + pub fn build_cmd(&self, prod: bool) -> ProcessCommand { + match self { + TargetKind::Ts => { + let mut args = vec![String::from("run")]; + if prod { + args.push("prod".into()); + } else { + args.push("build".into()); + } + + yarn_command(args) + } + TargetKind::Rs => { + let cargo_path = DevTool::Cargo.path(); + let mut args = vec![ + String::from("build"), + String::from("--color"), + String::from("always"), + ]; + if prod { + args.push("--release".into()); + } + + ProcessCommand::new(cargo_path.to_string_lossy().to_string(), args) + } + } + } + /// Provide the general install command for each target type + pub fn install_cmd(&self, prod: bool) -> Option { + match self { + TargetKind::Ts => { + let mut args = vec![String::from("install")]; + if prod { + args.push("--production".into()); + } + + let command = yarn_command(args); + + Some(command) + } + TargetKind::Rs => None, + } + } +} diff --git a/cli/src/target/updater.rs b/cli/src/target/updater.rs new file mode 100644 index 0000000000..82f86acd93 --- /dev/null +++ b/cli/src/target/updater.rs @@ -0,0 +1,18 @@ +use crate::dev_tools::DevTool; + +use super::ProcessCommand; + +pub fn get_build_cmd() -> ProcessCommand { + let cargo_path = DevTool::Cargo.path(); + + ProcessCommand::new( + cargo_path.to_string_lossy().to_string(), + vec![ + String::from("+stable"), + String::from("build"), + String::from("--color"), + String::from("always"), + String::from("--release"), + ], + ) +} diff --git a/cli/src/target/wasm.rs b/cli/src/target/wasm.rs new file mode 100644 index 0000000000..d3e1738f5f --- /dev/null +++ b/cli/src/target/wasm.rs @@ -0,0 +1,56 @@ +use crate::{dev_tools::DevTool, spawner::SpawnOptions, target::Target}; + +use super::{ProcessCommand, TestSpawnCommand}; + +pub fn get_build_cmd(prod: bool) -> ProcessCommand { + let wasm_pack_path = DevTool::WasmPack.path(); + let env = if prod { "--release" } else { "--dev" }; + + ProcessCommand::new( + wasm_pack_path.to_string_lossy().to_string(), + vec![ + String::from("build"), + String::from(env), + String::from("--target"), + String::from("bundler"), + String::from("--color"), + String::from("always"), + ], + ) +} + +pub fn get_test_cmds() -> Vec { + let cwd = Target::Wasm.cwd(); + + let npm_path = DevTool::Npm.path(); + let wasm_pack_path = DevTool::WasmPack.path(); + + let npm_test_command = if cfg!(windows) { "test_win" } else { "test" }; + + vec![ + TestSpawnCommand::new( + ProcessCommand::new( + wasm_pack_path.to_string_lossy().to_string(), + vec![ + String::from("test"), + String::from("--node"), + String::from("--color"), + String::from("always"), + ], + ), + cwd.clone(), + None, + ), + TestSpawnCommand::new( + ProcessCommand::new( + npm_path.to_string_lossy().to_string(), + vec![String::from("run"), String::from(npm_test_command)], + ), + cwd.join("spec"), + Some(SpawnOptions { + suppress_msg: true, + ..Default::default() + }), + ), + ] +} diff --git a/cli/src/target/wrapper.rs b/cli/src/target/wrapper.rs new file mode 100644 index 0000000000..6d7b8b3227 --- /dev/null +++ b/cli/src/target/wrapper.rs @@ -0,0 +1,121 @@ +use std::{iter, path::PathBuf}; + +use anyhow::{anyhow, Context}; + +use crate::{ + job_type::JobType, + jobs_runner::JobDefinition, + spawner::{spawn, spawn_blocking, SpawnResult}, +}; + +use super::{ProcessCommand, Target}; + +const TEST_SPECS: [&str; 14] = [ + // TODO: + // Running "jobs" here causes the program to receive SIGTRAP from OS because of an + // out-of-memory error in electron app, even if only this job was running (by + // commenting out the other specs). + // + // This error happens while executing function `jobs.getShellProfiles()` in file `session.jobs.spec.ts` + // which will call rust function `get_valid_profiles()` in `indexer/session/src/unbound/commands/shells.rs` + // using the crate `envvars` which panics with piped shells. + // + // The current work-around is blocking the progress bars temporally and running the tests + // sequentially using `Stdio::inherit` to keep using the main shell, printing the results + // of the test directly to standard out, then the progress bars will be shown again. + "jobs", + "search", + "values", + "extract", + "ranges", + "exporting", + "map", + "observe", + "indexes", + "concat", + "cancel", + "errors", + "stream", + "promises", +]; + +pub async fn run_test(production: bool) -> Result { + let job_def = JobDefinition::new(Target::Binding, JobType::Test { production }); + let mut final_result: Option = None; + + let cwd = Target::Wrapper.cwd(); + + let build_spec_path = cwd.join("spec"); + //TODO: This check exists in rake implementation but it need to be improved. + // The check should cover if the test themselves or the code under the tests has been changed. + if !build_spec_path.join("build").exists() { + // Finding tsc path on differnet platforms + let mut test_runner_path = cwd.join("node_modules").join(".bin"); + if cfg!(windows) { + let tsc_path = which::which_in("tsc", Some(&test_runner_path), &test_runner_path) + .context("Error while resolving tsc bin path on Windows")?; + test_runner_path = tsc_path; + } else { + test_runner_path.push("tsc"); + } + + let build_spec_cmd = ProcessCommand::new( + test_runner_path.to_string_lossy().to_string(), + vec![String::from("-p"), String::from("tsconfig.json")], + ); + + let spec_res = spawn( + job_def, + build_spec_cmd, + Some(build_spec_path), + iter::empty(), + None, + ) + .await?; + + final_result = Some(spec_res); + } + + let mut electron_path: PathBuf = cwd.join("node_modules").join(".bin"); + if cfg!(windows) { + electron_path = which::which_in("electron", Some(&electron_path), &electron_path) + .context("Error while resolving electron bin path on Windows")?; + } else { + electron_path.push("electron"); + } + // "electron"].iter().collect(); + let electron_path = electron_path.to_string_lossy(); + + let jasmine_path: PathBuf = [".", "node_modules", "jasmine", "bin", "jasmine.js"] + .iter() + .collect(); + let jasmine_path = jasmine_path.to_string_lossy(); + + let specs_dir_path: PathBuf = ["spec", "build", "spec"].iter().collect(); + + for spec in TEST_SPECS { + let spec_file_name = format!("session.{spec}.spec.js"); + let spec_file_path = specs_dir_path.join(spec_file_name); + let command = ProcessCommand::new( + electron_path.to_string(), + vec![ + jasmine_path.to_string(), + spec_file_path.to_string_lossy().to_string(), + ], + ); + let res = spawn_blocking( + job_def, + command, + Some(cwd.clone()), + vec![(String::from("ELECTRON_RUN_AS_NODE"), String::from("1"))], + ) + .await?; + + match final_result.as_mut() { + Some(acc) => acc.append(res), + None => final_result = Some(res), + }; + } + + final_result.ok_or_else(|| anyhow!("Wrapper doesn't have test specs")) +} diff --git a/cli/src/tracker.rs b/cli/src/tracker.rs new file mode 100644 index 0000000000..f1c6677dcf --- /dev/null +++ b/cli/src/tracker.rs @@ -0,0 +1,367 @@ +use anyhow::{anyhow, Context, Error}; +use console::style; +use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; +use std::{ + collections::BTreeMap, + process::{Command, ExitStatus}, + sync::OnceLock, + time::Instant, +}; +use tokio::sync::{ + mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}, + oneshot, +}; + +use crate::jobs_runner::JobDefinition; + +const TIME_BAR_WIDTH: usize = 5; + +#[derive(Clone, Debug)] +pub enum OperationResult { + Success, + Failed, +} + +impl std::fmt::Display for OperationResult { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!( + f, + "{}", + match self { + OperationResult::Success => style("done").bold().green(), + OperationResult::Failed => style("fail").bold().red(), + } + ) + } +} + +#[derive(Debug)] +/// Represents tasks information that can be sent to and from the tracker +pub enum Tick { + /// Change job status from awaiting to started. + Started(JobDefinition, oneshot::Sender<()>), + /// Start a job giving the job name and the sender to return the job number. + StartAll(Vec, oneshot::Sender<()>), + /// Update the job providing an optional progress value. + Progress(JobDefinition, Option), + /// Send a message to the job + Message(JobDefinition, String), + /// Sets the job as finished providing the job result and a message + Finished(JobDefinition, OperationResult, String), + #[allow(dead_code)] + /// Prints the given text outside the progress bar + Print(String), + /// Close all the jobs and shutdown the progress bars + Shutdown(oneshot::Sender<()>), + /// Suspends the progress bars and execute the giving blocking command + SuspendAndRun(Command, oneshot::Sender>), +} + +#[derive(Clone, Debug)] +pub struct Tracker { + tx: UnboundedSender, +} + +enum JobBarPhase { + Pending, + Running(Instant), + Finished((OperationResult, u64)), +} + +struct JobBarState { + name: String, + bar: ProgressBar, + phase: JobBarPhase, +} + +impl JobBarState { + fn new(name: String, bar: ProgressBar) -> Self { + Self { + name, + bar, + phase: JobBarPhase::Pending, + } + } + + fn start(&mut self) { + assert!( + matches!(self.phase, JobBarPhase::Pending), + "Start can be called on pending jobs only" + ); + + let start_time = Instant::now(); + self.phase = JobBarPhase::Running(start_time); + } +} + +pub fn get_tracker() -> &'static Tracker { + static TRACKER: OnceLock = OnceLock::new(); + + TRACKER.get_or_init(Tracker::new) +} + +impl Tracker { + fn new() -> Self { + let (tx, rx): (UnboundedSender, UnboundedReceiver) = unbounded_channel(); + tokio::spawn(Tracker::run(rx)); + Self { tx } + } + + async fn run(mut rx: UnboundedReceiver) -> Result<(), Error> { + let spinner_style = + ProgressStyle::with_template("{spinner} {prefix:.bold.dim} {wide_msg}")? + .tick_chars("▂▃▅▆▇▆▅▃▂ "); + let mut max_time_len = 0; + let max = u64::MAX; + let mut bars: BTreeMap = BTreeMap::new(); + let mp = MultiProgress::new(); + let start_time = Instant::now(); + while let Some(tick) = rx.recv().await { + match tick { + Tick::Started(job_def, tx_response) => { + let Some(job) = bars.get_mut(&job_def) else { + unreachable!("Job must exist in progress bar before starting it. Job Info: {job_def:?}") + }; + if matches!(job.phase, JobBarPhase::Pending) { + job.start(); + Self::refresh_all_bars(&mut bars, max_time_len, None); + } + + if tx_response.send(()).is_err() { + let _ = mp.println("Fail to send response while starting the jobs"); + } + } + Tick::StartAll(jobs, tx_response) => { + for job in jobs.into_iter() { + let bar = mp.add(ProgressBar::new(max)); + bar.set_style(spinner_style.clone()); + let bar_text = format!( + "{}: {}", + job.target.relative_cwd().display(), + job.job_title() + ); + let job_bar = JobBarState::new(bar_text, bar); + bars.insert(job, job_bar); + } + + Self::refresh_all_bars(&mut bars, max_time_len, None); + if tx_response.send(()).is_err() { + let _ = mp.println("Fail to send response while starting the jobs"); + } + } + Tick::Message(job_def, log) => match bars.get(&job_def) { + Some(job_bar) => job_bar.bar.set_message(log), + None => unreachable!( + "Job must exist in progress bar before messaging it. Job Info: {job_def:?}" + ), + }, + Tick::Progress(job_def, pos) => { + let Some(job_bar) = bars.get(&job_def) else { + unreachable!("Job must exist in progress bar before changing it progress. Job Info: {job_def:?}") + }; + + if let Some(pos) = pos { + job_bar.bar.set_position(pos); + } else { + job_bar.bar.inc(1); + } + } + Tick::Finished(job_def, result, msg) => { + let Some(job_bar) = bars.get_mut(&job_def) else { + unreachable!("Job must exist in progress bar before finishing it. Job Info: {job_def:?}") + }; + + // It doesn't make sense to show that a job is done in 0 seconds + let time = match job_bar.phase { + JobBarPhase::Running(start_time) => start_time.elapsed().as_secs().max(1), + _ => unreachable!("Job must be running when finish is called"), + }; + + max_time_len = max_time_len.max(Self::count_digits(time)); + + job_bar.bar.finish_with_message(msg); + job_bar.phase = JobBarPhase::Finished((result, time)); + + Self::refresh_all_bars(&mut bars, max_time_len, None); + } + Tick::Print(msg) => { + let _ = mp.println(msg); + } + Tick::Shutdown(tx_response) => { + // Finish jobs that are still running + for (_job_def, job_bar) in bars.iter_mut() { + let time = match job_bar.phase { + JobBarPhase::Pending => 1, + JobBarPhase::Running(start_time) => { + start_time.elapsed().as_secs().max(1) + } + JobBarPhase::Finished(_) => continue, + }; + + job_bar.phase = JobBarPhase::Finished((OperationResult::Failed, time)); + max_time_len = max_time_len.max(Self::count_digits(time)); + + job_bar.bar.finish(); + } + + // Insert graphic bar for the running duration of each bars + let total_time = start_time.elapsed().as_secs().max(1) as usize; + Self::refresh_all_bars(&mut bars, max_time_len, Some(total_time)); + + // Insert total time bar + let total_bar = mp.add(ProgressBar::new((bars.len() + 1) as u64)); + total_bar.set_style(spinner_style.clone()); + total_bar.set_prefix(format!("[total] done all in {total_time}s.")); + total_bar.finish(); + + bars.clear(); + // let _ = mp.clear(); + if tx_response.send(()).is_err() { + let _ = mp.println("Fail to send response"); + } + break; + } + Tick::SuspendAndRun(mut command, tx_response) => { + let status = mp + .suspend(|| command.status()) + .context("Error while executing command"); + if tx_response.send(status).is_err() { + let _ = mp.println("Fail to send response"); + } + } + } + } + Ok(()) + } + + fn refresh_all_bars( + bars: &mut BTreeMap, + max_time_len: usize, + total_time: Option, + ) { + let jobs_count_txt = bars.len().to_string(); + + bars.iter_mut().enumerate().for_each(|(idx, (_job_def ,job_bar))| { + let job_number = idx + 1; + let seq_width = jobs_count_txt.len(); + let job = job_bar.name.as_str(); + let line_prefix = match &job_bar.phase { + JobBarPhase::Pending => format!("[{job_number:seq_width$}/{jobs_count_txt}][{}][{job}]", style("wait").bold().blue()), + JobBarPhase::Running(_) => format!("[{job_number:seq_width$}/{jobs_count_txt}][....][{job}]"), + JobBarPhase::Finished((res, time)) => { + if let Some(total_time) = total_time { + let finish_limit = (*time as usize * TIME_BAR_WIDTH) / total_time; + let time_bar: String = (0..TIME_BAR_WIDTH).map(|idx| if idx <= finish_limit {'█'}else {'░'}).collect(); + format!("[{job_number:seq_width$}/{jobs_count_txt}][{res}][{time_bar} {time:max_time_len$}s][{job}].") + }else { + format!("[{job_number:seq_width$}/{jobs_count_txt}][{res}][{time:max_time_len$}s][{job}].") + } + }, + }; + + job_bar.bar.set_prefix(line_prefix); + }); + } + + /// Counts the digits in a number without allocating new string + fn count_digits(mut num: u64) -> usize { + if num == 0 { + return 1; // Special case for zero + } + + let mut count = 0; + while num > 0 { + num /= 10; + count += 1; + } + count + } + + /// Registers all the given jobs setting their status to awaiting. + /// This function should be called once on the start of running the tasks + pub async fn register_all(&self, jobs: Vec) -> Result<(), Error> { + let (tx_response, rx_response) = oneshot::channel(); + self.tx + .send(Tick::StartAll(jobs, tx_response)) + .context("Fail to send tick")?; + rx_response.await.context("Fail to receive tick start all") + } + + /// Change job status from awaiting to started. + pub async fn start(&self, job_def: JobDefinition) -> Result<(), Error> { + let (tx_response, rx_response) = oneshot::channel(); + self.tx + .send(Tick::Started(job_def, tx_response)) + .context("Fail to send tick")?; + rx_response + .await + .context("Fail to receive tick Start Single") + } + + /// Update the job providing an optional progress value. + pub fn progress(&self, job_def: JobDefinition, pos: Option) { + if let Err(e) = self.tx.send(Tick::Progress(job_def, pos)) { + eprintln!("Fail to communicate with tracker: {e}"); + } + } + + /// Send a message to the job + pub fn msg(&self, job_def: JobDefinition, log: String) { + if let Err(e) = self.tx.send(Tick::Message(job_def, log)) { + eprintln!("Fail to communicate with tracker: {e}"); + } + } + + /// Sets the job as finished providing successful result and a message + pub fn success(&self, job_def: JobDefinition, msg: String) { + if let Err(e) = self + .tx + .send(Tick::Finished(job_def, OperationResult::Success, msg)) + { + eprintln!("Fail to communicate with tracker: {e}"); + } + } + + /// Sets the job as finished providing failed result and a message + pub fn fail(&self, job_def: JobDefinition, msg: String) { + if let Err(e) = self + .tx + .send(Tick::Finished(job_def, OperationResult::Failed, msg)) + { + eprintln!("Fail to communicate with tracker: {e}"); + } + } + + /// Close all the jobs and shutdown the progress bars + pub async fn shutdown(&self) -> Result<(), Error> { + let (tx_response, rx_response) = oneshot::channel(); + self.tx + .send(Tick::Shutdown(tx_response)) + .context("Fail to send tick")?; + rx_response.await.context("Fail to receive tick") + } + + /// Prints the given text outside the progress bar + pub fn print(&self, msg: String) { + if let Err(e) = self + .tx + .send(Tick::Print(msg)) + .map_err(|e| anyhow!("Fail to send tick: {e}")) + { + eprintln!("Fail to communicate with tracker: {e}"); + } + } + + /// Suspend the progress bars and run the giving blocking command returning its exit status + pub async fn suspend_and_run( + &self, + cmd: std::process::Command, + ) -> Result { + let (tx_response, rx_response) = oneshot::channel(); + self.tx + .send(Tick::SuspendAndRun(cmd, tx_response)) + .context("Fail to send tick")?; + + rx_response.await.context("Fail to receive tick")? + } +} diff --git a/contribution.md b/contribution.md index ab102de6d9..6642bb71c8 100644 --- a/contribution.md +++ b/contribution.md @@ -1,13 +1,16 @@ # Contributing Chipmunk uses [Rust](https://www.rust-lang.org/) for processing the log files while frontend application is built using the [ElectronJS](https://www.electronjs.org/) + +## Chipmunk Development CLI Tool +With the new [Chipmunk Development CLI Tool](cli/README.md), you can perform various Chipmunk development tasks without the need to install Ruby. ## Pre-requisite To build and run chipmunk on local you will need following languages installed on your system. 1. Rust 2. NodeJS -3. Ruby +3. Ruby (Not needed with the new [Chipmunk Development CLI Tool](cli/README.md)) To check if you have all the pre-requisite installed or not, chipmunk provides the shell script for this purpose. After cloning the repo run following command in your preferred terminal. @@ -52,6 +55,10 @@ node -v which will print the installed NodeJS version on terminal. + +> [!TIP] +> You can use the new [Chipmunk Development CLI Tool](cli/README.md) instead of Ruby & `rake` + ### Installing Ruby You must have latest Ruby installed on your system. Prefer your choice of version manager either [RBENV](https://github.com/rbenv/rbenv) or [RVM](https://rvm.io/).