From af15760171dbf823f7252552e1050c7b76c0f742 Mon Sep 17 00:00:00 2001 From: Jonathan Wang <31040440+jonathanpwang@users.noreply.github.com> Date: Thu, 12 Dec 2024 19:29:32 -0500 Subject: [PATCH] chore: remove stark-backend from workspace (#1023) --- .github/workflows/algebra-extension.yml | 7 + .github/workflows/benchmark-call.yml | 8 + .github/workflows/benchmarks.yml | 2 - .github/workflows/bigint-extension.yml | 7 + .github/workflows/build.yml | 9 +- .github/workflows/cli.yml | 8 +- .github/workflows/ecc.yml | 8 +- .github/workflows/keccak256-extension.yml | 7 + .github/workflows/lints.yml | 7 + .github/workflows/native-extension.yml | 7 + .github/workflows/primitives.yml | 8 +- .github/workflows/recursion.yml | 8 +- .github/workflows/riscv.yml | 7 + .github/workflows/rv32im-extension.yml | 7 + .github/workflows/sdk.yml | 8 +- .github/workflows/stark-backend.yml | 35 -- .github/workflows/toolchain-edsl.yml | 7 + .github/workflows/toolchain.yml | 7 + .github/workflows/vm.yml | 8 +- Cargo.lock | 25 +- Cargo.toml | 6 +- crates/stark-backend/Cargo.toml | 63 -- crates/stark-backend/README.md | 93 --- .../air_builders/debug/check_constraints.rs | 166 ------ .../src/air_builders/debug/mod.rs | 231 -------- crates/stark-backend/src/air_builders/mod.rs | 19 - .../stark-backend/src/air_builders/prover.rs | 232 -------- crates/stark-backend/src/air_builders/sub.rs | 106 ---- .../src/air_builders/symbolic/mod.rs | 467 --------------- .../symbolic/symbolic_expression.rs | 323 ----------- .../symbolic/symbolic_variable.rs | 194 ------- .../src/air_builders/verifier.rs | 127 ---- crates/stark-backend/src/chip.rs | 142 ----- crates/stark-backend/src/circuit_api.rs | 23 - crates/stark-backend/src/commit.rs | 66 --- crates/stark-backend/src/config.rs | 147 ----- crates/stark-backend/src/engine.rs | 168 ------ crates/stark-backend/src/gkr/gate.rs | 54 -- crates/stark-backend/src/gkr/mod.rs | 11 - crates/stark-backend/src/gkr/prover.rs | 545 ------------------ crates/stark-backend/src/gkr/tests.rs | 244 -------- crates/stark-backend/src/gkr/types.rs | 267 --------- crates/stark-backend/src/gkr/verifier.rs | 155 ----- .../stark-backend/src/interaction/README.md | 102 ---- crates/stark-backend/src/interaction/debug.rs | 60 -- crates/stark-backend/src/interaction/mod.rs | 186 ------ crates/stark-backend/src/interaction/rap.rs | 37 -- .../src/interaction/stark_log_up.rs | 545 ------------------ crates/stark-backend/src/interaction/trace.rs | 33 -- crates/stark-backend/src/interaction/utils.rs | 59 -- crates/stark-backend/src/keygen/mod.rs | 242 -------- crates/stark-backend/src/keygen/types.rs | 171 ------ crates/stark-backend/src/keygen/view.rs | 92 --- crates/stark-backend/src/lib.rs | 53 -- crates/stark-backend/src/poly/mod.rs | 2 - crates/stark-backend/src/poly/multi.rs | 266 --------- crates/stark-backend/src/poly/uni.rs | 330 ----------- crates/stark-backend/src/prover/helper.rs | 86 --- crates/stark-backend/src/prover/metrics.rs | 154 ----- crates/stark-backend/src/prover/mod.rs | 503 ---------------- crates/stark-backend/src/prover/opener.rs | 174 ------ .../src/prover/quotient/helper.rs | 30 - .../stark-backend/src/prover/quotient/mod.rs | 244 -------- .../src/prover/quotient/single.rs | 195 ------- crates/stark-backend/src/prover/trace.rs | 203 ------- crates/stark-backend/src/prover/types.rs | 180 ------ crates/stark-backend/src/rap.rs | 124 ---- crates/stark-backend/src/sumcheck.rs | 329 ----------- crates/stark-backend/src/utils.rs | 58 -- .../stark-backend/src/verifier/constraints.rs | 140 ----- crates/stark-backend/src/verifier/error.rs | 16 - crates/stark-backend/src/verifier/mod.rs | 302 ---------- .../tests/cached_lookup/instrumented.rs | 148 ----- .../stark-backend/tests/cached_lookup/mod.rs | 124 ---- .../tests/cached_lookup/prove.rs | 206 ------- .../tests/fib_selector_air/air.rs | 100 ---- .../tests/fib_selector_air/columns.rs | 7 - .../tests/fib_selector_air/mod.rs | 3 - .../tests/fib_selector_air/trace.rs | 21 - .../tests/fib_triples_air/air.rs | 56 -- .../tests/fib_triples_air/columns.rs | 21 - .../tests/fib_triples_air/mod.rs | 3 - .../tests/fib_triples_air/trace.rs | 27 - .../stark-backend/tests/integration_test.rs | 262 --------- crates/stark-backend/tests/interaction/mod.rs | 276 --------- .../tests/partitioned_sum_air/air.rs | 47 -- .../tests/partitioned_sum_air/mod.rs | 96 --- crates/stark-sdk/Cargo.toml | 50 -- crates/stark-sdk/src/bench/mod.rs | 114 ---- .../stark-sdk/src/config/baby_bear_blake3.rs | 34 -- .../src/config/baby_bear_bytehash.rs | 123 ---- .../stark-sdk/src/config/baby_bear_keccak.rs | 34 -- .../src/config/baby_bear_poseidon2.rs | 267 --------- .../src/config/baby_bear_poseidon2_root.rs | 248 -------- crates/stark-sdk/src/config/fri_params.rs | 75 --- .../src/config/goldilocks_poseidon.rs | 171 ------ crates/stark-sdk/src/config/instrument.rs | 102 ---- crates/stark-sdk/src/config/mod.rs | 42 -- crates/stark-sdk/src/cost_estimate.rs | 222 ------- .../stark-sdk/src/dummy_airs/fib_air/air.rs | 55 -- .../stark-sdk/src/dummy_airs/fib_air/chip.rs | 67 --- .../src/dummy_airs/fib_air/columns.rs | 16 - .../stark-sdk/src/dummy_airs/fib_air/mod.rs | 4 - .../stark-sdk/src/dummy_airs/fib_air/trace.rs | 16 - .../interaction/dummy_interaction_air.rs | 285 --------- .../src/dummy_airs/interaction/mod.rs | 64 -- crates/stark-sdk/src/dummy_airs/mod.rs | 3 - crates/stark-sdk/src/engine.rs | 94 --- crates/stark-sdk/src/lib.rs | 14 - crates/stark-sdk/src/utils.rs | 56 -- crates/stark-sdk/tests/serde_type.rs | 6 - docs/specs/vm/stark.md | 2 +- 112 files changed, 128 insertions(+), 12388 deletions(-) delete mode 100644 .github/workflows/stark-backend.yml delete mode 100644 crates/stark-backend/Cargo.toml delete mode 100644 crates/stark-backend/README.md delete mode 100644 crates/stark-backend/src/air_builders/debug/check_constraints.rs delete mode 100644 crates/stark-backend/src/air_builders/debug/mod.rs delete mode 100644 crates/stark-backend/src/air_builders/mod.rs delete mode 100644 crates/stark-backend/src/air_builders/prover.rs delete mode 100644 crates/stark-backend/src/air_builders/sub.rs delete mode 100644 crates/stark-backend/src/air_builders/symbolic/mod.rs delete mode 100644 crates/stark-backend/src/air_builders/symbolic/symbolic_expression.rs delete mode 100644 crates/stark-backend/src/air_builders/symbolic/symbolic_variable.rs delete mode 100644 crates/stark-backend/src/air_builders/verifier.rs delete mode 100644 crates/stark-backend/src/chip.rs delete mode 100644 crates/stark-backend/src/circuit_api.rs delete mode 100644 crates/stark-backend/src/commit.rs delete mode 100644 crates/stark-backend/src/config.rs delete mode 100644 crates/stark-backend/src/engine.rs delete mode 100644 crates/stark-backend/src/gkr/gate.rs delete mode 100644 crates/stark-backend/src/gkr/mod.rs delete mode 100644 crates/stark-backend/src/gkr/prover.rs delete mode 100644 crates/stark-backend/src/gkr/tests.rs delete mode 100644 crates/stark-backend/src/gkr/types.rs delete mode 100644 crates/stark-backend/src/gkr/verifier.rs delete mode 100644 crates/stark-backend/src/interaction/README.md delete mode 100644 crates/stark-backend/src/interaction/debug.rs delete mode 100644 crates/stark-backend/src/interaction/mod.rs delete mode 100644 crates/stark-backend/src/interaction/rap.rs delete mode 100644 crates/stark-backend/src/interaction/stark_log_up.rs delete mode 100644 crates/stark-backend/src/interaction/trace.rs delete mode 100644 crates/stark-backend/src/interaction/utils.rs delete mode 100644 crates/stark-backend/src/keygen/mod.rs delete mode 100644 crates/stark-backend/src/keygen/types.rs delete mode 100644 crates/stark-backend/src/keygen/view.rs delete mode 100644 crates/stark-backend/src/lib.rs delete mode 100644 crates/stark-backend/src/poly/mod.rs delete mode 100644 crates/stark-backend/src/poly/multi.rs delete mode 100644 crates/stark-backend/src/poly/uni.rs delete mode 100644 crates/stark-backend/src/prover/helper.rs delete mode 100644 crates/stark-backend/src/prover/metrics.rs delete mode 100644 crates/stark-backend/src/prover/mod.rs delete mode 100644 crates/stark-backend/src/prover/opener.rs delete mode 100644 crates/stark-backend/src/prover/quotient/helper.rs delete mode 100644 crates/stark-backend/src/prover/quotient/mod.rs delete mode 100644 crates/stark-backend/src/prover/quotient/single.rs delete mode 100644 crates/stark-backend/src/prover/trace.rs delete mode 100644 crates/stark-backend/src/prover/types.rs delete mode 100644 crates/stark-backend/src/rap.rs delete mode 100644 crates/stark-backend/src/sumcheck.rs delete mode 100644 crates/stark-backend/src/utils.rs delete mode 100644 crates/stark-backend/src/verifier/constraints.rs delete mode 100644 crates/stark-backend/src/verifier/error.rs delete mode 100644 crates/stark-backend/src/verifier/mod.rs delete mode 100644 crates/stark-backend/tests/cached_lookup/instrumented.rs delete mode 100644 crates/stark-backend/tests/cached_lookup/mod.rs delete mode 100644 crates/stark-backend/tests/cached_lookup/prove.rs delete mode 100644 crates/stark-backend/tests/fib_selector_air/air.rs delete mode 100644 crates/stark-backend/tests/fib_selector_air/columns.rs delete mode 100644 crates/stark-backend/tests/fib_selector_air/mod.rs delete mode 100644 crates/stark-backend/tests/fib_selector_air/trace.rs delete mode 100644 crates/stark-backend/tests/fib_triples_air/air.rs delete mode 100644 crates/stark-backend/tests/fib_triples_air/columns.rs delete mode 100644 crates/stark-backend/tests/fib_triples_air/mod.rs delete mode 100644 crates/stark-backend/tests/fib_triples_air/trace.rs delete mode 100644 crates/stark-backend/tests/integration_test.rs delete mode 100644 crates/stark-backend/tests/interaction/mod.rs delete mode 100644 crates/stark-backend/tests/partitioned_sum_air/air.rs delete mode 100644 crates/stark-backend/tests/partitioned_sum_air/mod.rs delete mode 100644 crates/stark-sdk/Cargo.toml delete mode 100644 crates/stark-sdk/src/bench/mod.rs delete mode 100644 crates/stark-sdk/src/config/baby_bear_blake3.rs delete mode 100644 crates/stark-sdk/src/config/baby_bear_bytehash.rs delete mode 100644 crates/stark-sdk/src/config/baby_bear_keccak.rs delete mode 100644 crates/stark-sdk/src/config/baby_bear_poseidon2.rs delete mode 100644 crates/stark-sdk/src/config/baby_bear_poseidon2_root.rs delete mode 100644 crates/stark-sdk/src/config/fri_params.rs delete mode 100644 crates/stark-sdk/src/config/goldilocks_poseidon.rs delete mode 100644 crates/stark-sdk/src/config/instrument.rs delete mode 100644 crates/stark-sdk/src/config/mod.rs delete mode 100644 crates/stark-sdk/src/cost_estimate.rs delete mode 100644 crates/stark-sdk/src/dummy_airs/fib_air/air.rs delete mode 100644 crates/stark-sdk/src/dummy_airs/fib_air/chip.rs delete mode 100644 crates/stark-sdk/src/dummy_airs/fib_air/columns.rs delete mode 100644 crates/stark-sdk/src/dummy_airs/fib_air/mod.rs delete mode 100644 crates/stark-sdk/src/dummy_airs/fib_air/trace.rs delete mode 100644 crates/stark-sdk/src/dummy_airs/interaction/dummy_interaction_air.rs delete mode 100644 crates/stark-sdk/src/dummy_airs/interaction/mod.rs delete mode 100644 crates/stark-sdk/src/dummy_airs/mod.rs delete mode 100644 crates/stark-sdk/src/engine.rs delete mode 100644 crates/stark-sdk/src/lib.rs delete mode 100644 crates/stark-sdk/src/utils.rs delete mode 100644 crates/stark-sdk/tests/serde_type.rs diff --git a/.github/workflows/algebra-extension.yml b/.github/workflows/algebra-extension.yml index cbc1d60c03..fabe254aaa 100644 --- a/.github/workflows/algebra-extension.yml +++ b/.github/workflows/algebra-extension.yml @@ -30,6 +30,13 @@ jobs: cache-on-failure: true - uses: taiki-e/install-action@nextest + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + - name: Run algebra extension crate tests working-directory: extensions/algebra/circuit run: | diff --git a/.github/workflows/benchmark-call.yml b/.github/workflows/benchmark-call.yml index faa5dda0ad..a2c99d80af 100644 --- a/.github/workflows/benchmark-call.yml +++ b/.github/workflows/benchmark-call.yml @@ -125,6 +125,14 @@ jobs: ########################################################################## # Environment setup # ########################################################################## + + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + - uses: actions/checkout@v4 with: ref: ${{ github.head_ref || github.ref }} diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml index 0fe819bfb5..40bc6395de 100644 --- a/.github/workflows/benchmarks.yml +++ b/.github/workflows/benchmarks.yml @@ -7,8 +7,6 @@ on: types: [opened, synchronize, reopened, labeled] branches: ["**"] paths: - - "crates/stark-backend/**" - - "crates/stark-sdk/**" - "crates/circuits/**" - "crates/vm/**" - "crates/toolchain/**" diff --git a/.github/workflows/bigint-extension.yml b/.github/workflows/bigint-extension.yml index 901973a48d..6584decaf4 100644 --- a/.github/workflows/bigint-extension.yml +++ b/.github/workflows/bigint-extension.yml @@ -30,6 +30,13 @@ jobs: cache-on-failure: true - uses: taiki-e/install-action@nextest + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + - name: Run bigint extension crate tests working-directory: extensions/bigint/circuit run: | diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index fe931c204e..4b0263e0b8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -23,6 +23,13 @@ jobs: with: cache-on-failure: true + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + - name: Run build run: | - cargo build --verbose \ No newline at end of file + cargo build --verbose diff --git a/.github/workflows/cli.yml b/.github/workflows/cli.yml index f811c33817..390ed6e06e 100644 --- a/.github/workflows/cli.yml +++ b/.github/workflows/cli.yml @@ -6,7 +6,6 @@ on: pull_request: branches: ["**"] paths: - - "crates/stark-backend/**" - "crates/circuits/primitives/**" - "crates/vm/**" - "crates/sdk/**" @@ -58,6 +57,13 @@ jobs: run: | bash ./extensions/native/recursion/trusted_setup_s3.sh + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + # TODO: CLI build, transpile, run, (keygen), prove, contract, verify - name: Run app-level CLI commands working-directory: crates/cli diff --git a/.github/workflows/ecc.yml b/.github/workflows/ecc.yml index 45668bd0bf..25e5b59867 100644 --- a/.github/workflows/ecc.yml +++ b/.github/workflows/ecc.yml @@ -6,7 +6,6 @@ on: pull_request: branches: ["**"] paths: - - "crates/stark-backend/**" - "crates/circuits/primitives/**" - "crates/vm/**" - "crates/cli/**" @@ -36,6 +35,13 @@ jobs: cache-on-failure: true - uses: taiki-e/install-action@nextest + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + - name: Run pairing-guest crate tests working-directory: extensions/pairing/guest run: | diff --git a/.github/workflows/keccak256-extension.yml b/.github/workflows/keccak256-extension.yml index 9b99d0f1a9..980368ba8c 100644 --- a/.github/workflows/keccak256-extension.yml +++ b/.github/workflows/keccak256-extension.yml @@ -22,6 +22,13 @@ jobs: - runs-on=${{ github.run_id }} - runner=64cpu-linux-arm64 + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable diff --git a/.github/workflows/lints.yml b/.github/workflows/lints.yml index 6a80a56d7a..3bafac4f11 100644 --- a/.github/workflows/lints.yml +++ b/.github/workflows/lints.yml @@ -23,6 +23,13 @@ jobs: with: cache-on-failure: true + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + - name: Run fmt run: | cargo fmt --all -- --check diff --git a/.github/workflows/native-extension.yml b/.github/workflows/native-extension.yml index 3c4379d2c2..abcf52e397 100644 --- a/.github/workflows/native-extension.yml +++ b/.github/workflows/native-extension.yml @@ -30,6 +30,13 @@ jobs: cache-on-failure: true - uses: taiki-e/install-action@nextest + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + - name: Run native extension crate tests working-directory: extensions/native/circuit run: | diff --git a/.github/workflows/primitives.yml b/.github/workflows/primitives.yml index 0dd6739b28..8dc0a3e9f8 100644 --- a/.github/workflows/primitives.yml +++ b/.github/workflows/primitives.yml @@ -6,7 +6,6 @@ on: pull_request: branches: ["**"] paths: - - "crates/stark-backend/**" - "crates/circuits/primitives/**" - "crates/circuits/poseidon2-air/**" @@ -32,6 +31,13 @@ jobs: cache-on-failure: true - uses: taiki-e/install-action@nextest + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + - name: Run tests for primitives working-directory: crates/circuits/primitives run: | diff --git a/.github/workflows/recursion.yml b/.github/workflows/recursion.yml index 41c2b79442..7e504c0ee9 100644 --- a/.github/workflows/recursion.yml +++ b/.github/workflows/recursion.yml @@ -6,7 +6,6 @@ on: pull_request: branches: ["**"] paths: - - "crates/stark-backend/**" - "crates/circuits/primitives/**" - "crates/vm/**" - "extensions/native/compiler/**" @@ -36,6 +35,13 @@ jobs: - name: Install solc # svm should support arm64 linux run: (hash svm 2>/dev/null || cargo install --version 0.2.23 svm-rs) && svm install 0.8.19 && solc --version + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + - name: Run recursion crate tests working-directory: extensions/native/recursion run: | diff --git a/.github/workflows/riscv.yml b/.github/workflows/riscv.yml index 7b4c27e77d..344d42a68c 100644 --- a/.github/workflows/riscv.yml +++ b/.github/workflows/riscv.yml @@ -40,6 +40,13 @@ jobs: with: submodules: recursive + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + - name: Run Makefile working-directory: crates/toolchain/tests/rv32im-test-vectors run: | diff --git a/.github/workflows/rv32im-extension.yml b/.github/workflows/rv32im-extension.yml index 9e69f3a7ef..0eda272f05 100644 --- a/.github/workflows/rv32im-extension.yml +++ b/.github/workflows/rv32im-extension.yml @@ -30,6 +30,13 @@ jobs: cache-on-failure: true - uses: taiki-e/install-action@nextest + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + - name: Run rv32im extension crate tests working-directory: extensions/rv32im/circuit run: | diff --git a/.github/workflows/sdk.yml b/.github/workflows/sdk.yml index e53a08bf07..6ca42473de 100644 --- a/.github/workflows/sdk.yml +++ b/.github/workflows/sdk.yml @@ -6,7 +6,6 @@ on: pull_request: branches: ["**"] paths: - - "crates/stark-backend/**" - "crates/circuits/primitives/**" - "crates/vm/**" - "crates/sdk/**" @@ -36,6 +35,13 @@ jobs: - name: Install solc # svm should support arm64 linux run: (hash svm 2>/dev/null || cargo install --version 0.2.23 svm-rs) && svm install 0.8.19 && solc --version + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + - name: Install architecture specific tools run: | arch=$(uname -m) diff --git a/.github/workflows/stark-backend.yml b/.github/workflows/stark-backend.yml deleted file mode 100644 index ee57b3d200..0000000000 --- a/.github/workflows/stark-backend.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: Stark Backend Tests - -on: - push: - branches: ["main"] - pull_request: - branches: ["**"] - paths: - - "crates/stark-backend/**" - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} - cancel-in-progress: true - -env: - CARGO_TERM_COLOR: always - -jobs: - build: - runs-on: - - runs-on=${{ github.run_id }} - - runner=16cpu-linux-arm64 - - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - - uses: Swatinem/rust-cache@v2 - with: - cache-on-failure: true - - uses: taiki-e/install-action@nextest - - - name: Run tests - working-directory: crates/stark-backend - run: | - cargo nextest run --features parallel diff --git a/.github/workflows/toolchain-edsl.yml b/.github/workflows/toolchain-edsl.yml index fa19137c0f..3b2c70835b 100644 --- a/.github/workflows/toolchain-edsl.yml +++ b/.github/workflows/toolchain-edsl.yml @@ -32,6 +32,13 @@ jobs: cache-on-failure: true - uses: taiki-e/install-action@nextest + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + - name: Run circuit and compiler tests working-directory: extensions/native/compiler run: | diff --git a/.github/workflows/toolchain.yml b/.github/workflows/toolchain.yml index 2064a0e9d8..ef7f1d98f5 100644 --- a/.github/workflows/toolchain.yml +++ b/.github/workflows/toolchain.yml @@ -32,6 +32,13 @@ jobs: cache-on-failure: true - uses: taiki-e/install-action@nextest + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + - name: Run toolchain tests working-directory: crates/toolchain/tests run: | diff --git a/.github/workflows/vm.yml b/.github/workflows/vm.yml index 3bcfb0c130..e7f93c3889 100644 --- a/.github/workflows/vm.yml +++ b/.github/workflows/vm.yml @@ -6,7 +6,6 @@ on: pull_request: branches: ["**"] paths: - - "crates/stark-backend/**" - "crates/circuits/**" - "crates/vm/**" @@ -32,6 +31,13 @@ jobs: cache-on-failure: true - uses: taiki-e/install-action@nextest + # TEMPORARY + - name: Give GitHub Actions access to private repositories + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.GH_ACTIONS_DEPLOY_PRIVATE_KEY }} + - name: Run vm crate tests working-directory: crates/vm run: | diff --git a/Cargo.lock b/Cargo.lock index 343163dcd0..bb247a4f3f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3419,49 +3419,34 @@ dependencies = [ [[package]] name = "openvm-stark-backend" -version = "0.1.0" +version = "0.1.0-alpha" +source = "git+ssh://git@github.com/openvm-org/stark-backend#79fcdbe321c21f2b146e1b4d58a88662c2abd6b4" dependencies = [ "async-trait", "cfg-if", - "csv", "derivative", - "eyre", "itertools 0.13.0", "metrics", "mimalloc", - "openvm-circuit-primitives-derive", - "openvm-stark-sdk", "p3-air", - "p3-baby-bear", "p3-challenger", "p3-commit", - "p3-dft", "p3-field", - "p3-fri", - "p3-goldilocks", - "p3-keccak", "p3-matrix", "p3-maybe-rayon", - "p3-mds", - "p3-merkle-tree", - "p3-poseidon2", - "p3-symmetric", "p3-uni-stark", "p3-util", - "rand", "rayon", "serde", - "serde_json", "thiserror 1.0.69", "tikv-jemallocator", "tracing", - "tracing-forest", - "tracing-subscriber", ] [[package]] name = "openvm-stark-sdk" -version = "0.1.0" +version = "0.1.0-alpha" +source = "git+ssh://git@github.com/openvm-org/stark-backend#79fcdbe321c21f2b146e1b4d58a88662c2abd6b4" dependencies = [ "derive_more 0.99.18", "ff 0.13.0", @@ -3469,8 +3454,6 @@ dependencies = [ "metrics", "metrics-tracing-context", "metrics-util", - "openvm-circuit-primitives-derive", - "openvm-instructions", "openvm-stark-backend", "p3-baby-bear", "p3-blake3", diff --git a/Cargo.toml b/Cargo.toml index 22d4b2dcba..109c11cfee 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,8 +15,6 @@ members = [ "crates/circuits/poseidon2-air", "crates/circuits/primitives", "crates/circuits/primitives/derive", - "crates/stark-backend", - "crates/stark-sdk", "crates/toolchain/transpiler", "crates/toolchain/openvm", "crates/toolchain/build", @@ -107,8 +105,8 @@ openvm-platform = { path = "crates/toolchain/platform", default-features = false openvm-transpiler = { path = "crates/toolchain/transpiler", default-features = false } openvm-circuit = { path = "crates/vm", default-features = false } openvm-circuit-derive = { path = "crates/vm/derive", default-features = false } -openvm-stark-backend = { path = "crates/stark-backend", default-features = false } -openvm-stark-sdk = { path = "crates/stark-sdk", default-features = false } +openvm-stark-backend = { git = "ssh://git@github.com/openvm-org/stark-backend", default-features = false } +openvm-stark-sdk = { git = "ssh://git@github.com/openvm-org/stark-backend", default-features = false } # Extensions openvm-algebra-circuit = { path = "extensions/algebra/circuit", default-features = false } diff --git a/crates/stark-backend/Cargo.toml b/crates/stark-backend/Cargo.toml deleted file mode 100644 index 9d25e77ea1..0000000000 --- a/crates/stark-backend/Cargo.toml +++ /dev/null @@ -1,63 +0,0 @@ -[package] -name = "openvm-stark-backend" -version.workspace = true -authors.workspace = true -edition.workspace = true -description = "Multi-matrix STARK backend with logup built on top of Plonky3." - -[dependencies] -p3-air = { workspace = true } -p3-challenger = { workspace = true } -p3-commit = { workspace = true } -p3-field = { workspace = true } -p3-matrix = { workspace = true } -p3-maybe-rayon = { workspace = true } -p3-uni-stark = { workspace = true } -p3-util = { workspace = true } - -rayon = { workspace = true, optional = true } -itertools.workspace = true -tracing.workspace = true -serde = { workspace = true, default-features = false, features = [ - "derive", - "alloc", - "rc", -] } -derivative = { workspace = true } -metrics = { workspace = true, optional = true } -cfg-if = { workspace = true } -thiserror.workspace = true -async-trait.workspace = true - -[target.'cfg(unix)'.dependencies] -tikv-jemallocator = { version = "0.6", optional = true } - -mimalloc = { version = "0.1.43", optional = true } - -[dev-dependencies] -openvm-circuit-primitives-derive = { workspace = true } -openvm-stark-sdk = { workspace = true } - -p3-dft = { workspace = true } -p3-merkle-tree = { workspace = true } -p3-fri = { workspace = true } -p3-baby-bear = { workspace = true } -p3-poseidon2 = { workspace = true } -p3-keccak = { workspace = true } -p3-symmetric = { workspace = true } -p3-mds = { workspace = true } -p3-goldilocks = { workspace = true } - -rand = "0.8.5" -tracing-subscriber = { version = "0.3.17", features = ["std", "env-filter"] } -tracing-forest = { version = "0.1.6", features = ["ansi", "smallvec"] } -serde_json = "1.0.117" -csv = "1.3.0" -eyre = "0.6.12" - -[features] -default = ["parallel"] -parallel = ["p3-maybe-rayon/parallel", "dep:rayon"] -jemalloc = ["dep:tikv-jemallocator"] -jemalloc-prof = ["jemalloc", "tikv-jemallocator?/profiling"] -bench-metrics = ["dep:metrics"] diff --git a/crates/stark-backend/README.md b/crates/stark-backend/README.md deleted file mode 100644 index c89ba055a0..0000000000 --- a/crates/stark-backend/README.md +++ /dev/null @@ -1,93 +0,0 @@ -# STARK Backend - -The backend is a low-level API built on top of Plonky3. Its goal is to prove multiple STARKs presented in the form of multiple RAPs and their trace matrices. -The backend is not intended to own trace generation, with some caveats (see Interactive AIRs below). - -## RAPs - -A RAP is a Randomized AIR with Preprocessing. An AIR is an Algebraic Intermediate Representation. A RAP can be summarized as a way to specify certain specific gates on -a trace matrix, where the trace matrix is segmented as follows: - -![RAP diagram](../assets/rap.png) - -where - -- preprocessed columns are fixed and do not change depending on the inputs -- main columns are values the prover fills in on a per-proof basis, and the values can depend on inputs -- there may be additional columns corresponding to trace challenge phases: in each such phase (if any), the prover generates some random challenges via Fiat-Shamir, after observing commitments to all columns in previous phases, including preprocessed and main. The RAP may then define constraints that use these random challenges as constants. Note that all `after_challenge` columns must be in the extension field. -- constraints may depend on `public_values` which are viewable by prover and verifier -- there may also be other `exposed_values_after_challenge` which are shared by both the prover and verifier. These values are public values that depend on the random challenges in each phase. - -Traditionally in STARKs, the preprocessed trace sub-matrix is committed into a single commitment. The main trace sub-matrix is committed into another **single** commitment. -The sub-matrix in each phase of the `after_challenge` trace is committed into another -single commitment. This uses a Matrix Commitment which commits to a matrix in-batch, -instead of a single vector. - -To support _cached trace_, we extend the RAP interface to further allow **partitioning** -the main trace matrix into sub-matrices, where each sub-matrix can be committed to -separately. - -![RAP with partitioned main](../assets/rap_partitioned.png) - -Currently we only see a use case for partitioning the main trace matrix, and none of the other segments. - -## Multiple STARKs - -The backend supports the simultaneous proving of a system of multiple RAPs with trace matrices of different heights and widths. This brings additional nuance because Plonky3 -supports the notion of a Mixed Matrix Commitment Scheme (MMCS), which allows the -simultaneous commitment to a set of matrices of different heights. - -![Multi RAPs](../assets/multi_trace_raps.png) - -The backend currently supports the following: - -- The preprocessed trace of each RAP is committed to individually. - - The motivation is to allow switching out subsets of RAPs in the system flexibly. - - If needed, it is possible to specify sets of RAPs that are always associated, so their preprocessed trace are always committed together -- There is a set of main trace multi-matrix commitments shared amongst all RAPs, where - each part in the partition of the main trace of each RAP can belong to any of these commitments. The expectation is that most parts all share a single commitment, but - parts of the trace that can be cached should have its own dedicated commitment. -- For each trace challenge phase, all trace matrices in that phase across all RAPs are - committed together. - -Due to the need to support cached trace, the backend does not fully own the -trace commitment process, although it does provide simple APIs to assist the process - see `TraceCommitmentBuilder`. - -Given RAPs with all traces committed, the backend prover handles computations -and commitment of quotient polynomials and FRI commit and query phases. This is -done by `MultiTraceStarkProver::prove_raps_with_committed_traces`. This function -should be able to support general RAPs as described in the previous section, but -it does assume the `challenger` has already observed all trace commitments and public -values. - -The general verifier is supported in `MultiTraceStarkVerifier::verify_raps`. This does -handle all `challenger` observations of public values and trace commitments. The -number of challenges to observe in between trace challenge phases is read from the -partial verifying key. - -## Interactive AIRs - -There is currently no frontend to write general RAPs (e.g., a `RapBuilder`), although -it is not difficult to add one. - -Instead, only a special type of RAP is supported: an AIR with Interactions. -An AIR with preprocessed and main trace can be extended to a RAP -with one challenge phase via the [Interactions API](./src/interaction/README.md). - -The backend currently has special support for Interactive AIRs, and completely owns -the generation of the trace in the challenge phase for these RAPs -- for reference, -Plonky3 refers to this phase's trace as the **permutation** trace. -This is done in `MultiTraceStarkProver::prove`, which internally calls -`prove_raps_with_committed_traces`. - -To fully support the Interaction API, the verifier also does a final cumulative -sum check. This is done in `MultiTraceStarkVerifier::verify`. -This can be framed as an additional operation to perform on the per-RAP -exposed values after the challenge phase. - -## TODO - -Codify special verifier instructions for operations that should be performed on -public values and exposed values, in a serializable way. -These instructions should be extended to equality constraints between public values -and trace commitments. diff --git a/crates/stark-backend/src/air_builders/debug/check_constraints.rs b/crates/stark-backend/src/air_builders/debug/check_constraints.rs deleted file mode 100644 index d82178a9e3..0000000000 --- a/crates/stark-backend/src/air_builders/debug/check_constraints.rs +++ /dev/null @@ -1,166 +0,0 @@ -use itertools::izip; -use p3_air::BaseAir; -use p3_field::{AbstractField, Field}; -use p3_matrix::{dense::RowMajorMatrixView, stack::VerticalPair, Matrix}; -use p3_maybe_rayon::prelude::*; - -use crate::{ - air_builders::debug::DebugConstraintBuilder, - config::{StarkGenericConfig, Val}, - interaction::{ - debug::{generate_logical_interactions, LogicalInteractions}, - InteractionType, RapPhaseSeqKind, SymbolicInteraction, - }, - rap::{PartitionedBaseAir, Rap}, -}; - -/// Check that all constraints vanish on the subgroup. -#[allow(clippy::too_many_arguments)] -pub fn check_constraints( - rap: &R, - rap_name: &str, - preprocessed: &Option>>, - partitioned_main: &[RowMajorMatrixView>], - after_challenge: &[RowMajorMatrixView], - challenges: &[Vec], - public_values: &[Val], - exposed_values_after_challenge: &[Vec], - rap_phase_seq_kind: RapPhaseSeqKind, -) where - R: for<'a> Rap> - + BaseAir> - + PartitionedBaseAir> - + ?Sized, - SC: StarkGenericConfig, -{ - let height = partitioned_main[0].height(); - assert!(partitioned_main.iter().all(|mat| mat.height() == height)); - assert!(after_challenge.iter().all(|mat| mat.height() == height)); - - // Check that constraints are satisfied. - (0..height).into_par_iter().for_each(|i| { - let i_next = (i + 1) % height; - - let (preprocessed_local, preprocessed_next) = preprocessed - .as_ref() - .map(|preprocessed| { - ( - preprocessed.row_slice(i).to_vec(), - preprocessed.row_slice(i_next).to_vec(), - ) - }) - .unwrap_or((vec![], vec![])); - - let partitioned_main_row_pair = partitioned_main - .iter() - .map(|part| (part.row_slice(i), part.row_slice(i_next))) - .collect::>(); - let partitioned_main = partitioned_main_row_pair - .iter() - .map(|(local, next)| { - VerticalPair::new( - RowMajorMatrixView::new_row(local), - RowMajorMatrixView::new_row(next), - ) - }) - .collect::>(); - - let after_challenge_row_pair = after_challenge - .iter() - .map(|mat| (mat.row_slice(i), mat.row_slice(i_next))) - .collect::>(); - let after_challenge = after_challenge_row_pair - .iter() - .map(|(local, next)| { - VerticalPair::new( - RowMajorMatrixView::new_row(local), - RowMajorMatrixView::new_row(next), - ) - }) - .collect::>(); - - let mut builder = DebugConstraintBuilder { - air_name: rap_name, - row_index: i, - preprocessed: VerticalPair::new( - RowMajorMatrixView::new_row(preprocessed_local.as_slice()), - RowMajorMatrixView::new_row(preprocessed_next.as_slice()), - ), - partitioned_main, - after_challenge, - challenges, - public_values, - exposed_values_after_challenge, - is_first_row: Val::::ZERO, - is_last_row: Val::::ZERO, - is_transition: Val::::ONE, - rap_phase_seq_kind, - has_common_main: rap.common_main_width() > 0, - }; - if i == 0 { - builder.is_first_row = Val::::ONE; - } - if i == height - 1 { - builder.is_last_row = Val::::ONE; - builder.is_transition = Val::::ZERO; - } - - rap.eval(&mut builder); - }); -} - -pub fn check_logup( - air_names: &[String], - interactions: &[&[SymbolicInteraction]], - preprocessed: &[Option>], - partitioned_main: &[Vec>], - public_values: &[Vec], -) { - let mut logical_interactions = LogicalInteractions::::default(); - for (air_idx, (interactions, preprocessed, partitioned_main, public_values)) in - izip!(interactions, preprocessed, partitioned_main, public_values).enumerate() - { - generate_logical_interactions( - air_idx, - interactions, - preprocessed, - partitioned_main, - public_values, - &mut logical_interactions, - ); - } - - let mut logup_failed = false; - // For each bus, check each `fields` key by summing up multiplicities. - for (bus_idx, bus_interactions) in logical_interactions.at_bus.into_iter() { - for (fields, connections) in bus_interactions.into_iter() { - let mut sum = F::ZERO; - for (_, itype, count) in &connections { - match *itype { - InteractionType::Send => { - sum += *count; - } - InteractionType::Receive => { - sum -= *count; - } - } - } - if !sum.is_zero() { - logup_failed = true; - println!( - "Bus {} failed to balance the multiplicities for fields={:?}. The bus connections for this were:", - bus_idx, fields - ); - for (air_idx, itype, count) in connections { - println!( - " Air idx: {}, Air name: {}, interaction type: {:?}, count: {:?}", - air_idx, air_names[air_idx], itype, count - ); - } - } - } - } - if logup_failed { - panic!("LogUp multiset equality check failed."); - } -} diff --git a/crates/stark-backend/src/air_builders/debug/mod.rs b/crates/stark-backend/src/air_builders/debug/mod.rs deleted file mode 100644 index e5ddbf297c..0000000000 --- a/crates/stark-backend/src/air_builders/debug/mod.rs +++ /dev/null @@ -1,231 +0,0 @@ -use p3_air::{ - AirBuilder, AirBuilderWithPublicValues, ExtensionBuilder, PairBuilder, PermutationAirBuilder, -}; -use p3_field::AbstractField; -use p3_matrix::{dense::RowMajorMatrixView, stack::VerticalPair}; - -use super::{PartitionedAirBuilder, ViewPair}; -use crate::{ - config::{StarkGenericConfig, Val}, - interaction::{ - rap::InteractionPhaseAirBuilder, Interaction, InteractionBuilder, InteractionType, - RapPhaseSeqKind, - }, - rap::PermutationAirBuilderWithExposedValues, -}; - -pub mod check_constraints; - -/// An `AirBuilder` which asserts that each constraint is zero, allowing any failed constraints to -/// be detected early. -pub struct DebugConstraintBuilder<'a, SC: StarkGenericConfig> { - pub air_name: &'a str, - pub row_index: usize, - pub preprocessed: ViewPair<'a, Val>, - pub partitioned_main: Vec>>, - pub after_challenge: Vec>, - pub challenges: &'a [Vec], - pub is_first_row: Val, - pub is_last_row: Val, - pub is_transition: Val, - pub public_values: &'a [Val], - pub exposed_values_after_challenge: &'a [Vec], - pub rap_phase_seq_kind: RapPhaseSeqKind, - pub has_common_main: bool, -} - -impl<'a, SC> AirBuilder for DebugConstraintBuilder<'a, SC> -where - SC: StarkGenericConfig, -{ - type F = Val; - type Expr = Val; - type Var = Val; - type M = VerticalPair>, RowMajorMatrixView<'a, Val>>; - - /// It is difficult to horizontally concatenate matrices when the main trace is partitioned, so we disable this method in that case. - fn main(&self) -> Self::M { - if self.partitioned_main.len() == 1 { - self.partitioned_main[0] - } else { - panic!("Main trace is either empty or partitioned. This function should not be used.") - } - } - - fn is_first_row(&self) -> Self::Expr { - self.is_first_row - } - - fn is_last_row(&self) -> Self::Expr { - self.is_last_row - } - - fn is_transition_window(&self, size: usize) -> Self::Expr { - if size == 2 { - self.is_transition - } else { - panic!("only supports a window size of 2") - } - } - - fn assert_zero>(&mut self, x: I) { - assert_eq!( - x.into(), - Val::::ZERO, - "constraints had nonzero value on air {},row {}", - self.air_name, - self.row_index - ); - } - - fn assert_eq, I2: Into>(&mut self, x: I1, y: I2) { - let x = x.into(); - let y = y.into(); - assert_eq!( - x, y, - "values didn't match on air {}, row {}: {} != {}", - self.air_name, self.row_index, x, y - ); - } -} - -impl PairBuilder for DebugConstraintBuilder<'_, SC> -where - SC: StarkGenericConfig, -{ - fn preprocessed(&self) -> Self::M { - self.preprocessed - } -} - -impl ExtensionBuilder for DebugConstraintBuilder<'_, SC> -where - SC: StarkGenericConfig, -{ - type EF = SC::Challenge; - type ExprEF = SC::Challenge; - type VarEF = SC::Challenge; - - fn assert_zero_ext(&mut self, x: I) - where - I: Into, - { - assert_eq!( - x.into(), - SC::Challenge::ZERO, - "constraints had nonzero value on row {}", - self.row_index - ); - } - - fn assert_eq_ext(&mut self, x: I1, y: I2) - where - I1: Into, - I2: Into, - { - let x = x.into(); - let y = y.into(); - assert_eq!( - x, y, - "values didn't match on air {}, row {}: {} != {}", - self.air_name, self.row_index, x, y - ); - } -} - -impl<'a, SC> PermutationAirBuilder for DebugConstraintBuilder<'a, SC> -where - SC: StarkGenericConfig, -{ - type MP = ViewPair<'a, SC::Challenge>; - - type RandomVar = SC::Challenge; - - fn permutation(&self) -> Self::MP { - *self - .after_challenge - .first() - .expect("Challenge phase not supported") - } - - fn permutation_randomness(&self) -> &[Self::EF] { - self.challenges - .first() - .expect("Challenge phase not supported") - } -} - -impl AirBuilderWithPublicValues for DebugConstraintBuilder<'_, SC> -where - SC: StarkGenericConfig, -{ - type PublicVar = Val; - - fn public_values(&self) -> &[Self::F] { - self.public_values - } -} - -impl PermutationAirBuilderWithExposedValues for DebugConstraintBuilder<'_, SC> -where - SC: StarkGenericConfig, -{ - fn permutation_exposed_values(&self) -> &[Self::EF] { - self.exposed_values_after_challenge - .first() - .expect("Challenge phase not supported") - } -} - -impl PartitionedAirBuilder for DebugConstraintBuilder<'_, SC> -where - SC: StarkGenericConfig, -{ - fn cached_mains(&self) -> &[Self::M] { - let mut num_cached_mains = self.partitioned_main.len(); - if self.has_common_main { - num_cached_mains -= 1; - } - &self.partitioned_main[..num_cached_mains] - } - fn common_main(&self) -> &Self::M { - assert!(self.has_common_main, "AIR doesn't have a common main trace"); - self.partitioned_main.last().unwrap() - } -} - -// No-op implementation -impl InteractionBuilder for DebugConstraintBuilder<'_, SC> -where - SC: StarkGenericConfig, -{ - fn push_interaction>( - &mut self, - _bus_index: usize, - _fields: impl IntoIterator, - _count: impl Into, - _interaction_type: InteractionType, - ) { - // no-op, interactions are debugged elsewhere - } - - fn num_interactions(&self) -> usize { - 0 - } - - fn all_interactions(&self) -> &[Interaction] { - &[] - } -} - -impl InteractionPhaseAirBuilder for DebugConstraintBuilder<'_, SC> { - fn finalize_interactions(&mut self) {} - - fn interaction_chunk_size(&self) -> usize { - 0 - } - - fn rap_phase_seq_kind(&self) -> RapPhaseSeqKind { - self.rap_phase_seq_kind - } -} diff --git a/crates/stark-backend/src/air_builders/mod.rs b/crates/stark-backend/src/air_builders/mod.rs deleted file mode 100644 index 229272765b..0000000000 --- a/crates/stark-backend/src/air_builders/mod.rs +++ /dev/null @@ -1,19 +0,0 @@ -use p3_air::AirBuilder; -use p3_matrix::{dense::RowMajorMatrixView, stack::VerticalPair}; - -pub mod debug; -pub mod prover; -pub mod sub; -pub mod symbolic; -pub mod verifier; - -pub type ViewPair<'a, T> = VerticalPair, RowMajorMatrixView<'a, T>>; - -/// AIR builder that supports main trace matrix which is partitioned -/// into sub-matrices which belong to different commitments. -pub trait PartitionedAirBuilder: AirBuilder { - /// Cached main trace matrix. - fn cached_mains(&self) -> &[Self::M]; - /// Common main trace matrix. Panic if there is no common main trace. - fn common_main(&self) -> &Self::M; -} diff --git a/crates/stark-backend/src/air_builders/prover.rs b/crates/stark-backend/src/air_builders/prover.rs deleted file mode 100644 index b0a5c01ba6..0000000000 --- a/crates/stark-backend/src/air_builders/prover.rs +++ /dev/null @@ -1,232 +0,0 @@ -// Folder: Folding builder -use p3_air::{ - AirBuilder, AirBuilderWithPublicValues, ExtensionBuilder, PairBuilder, PermutationAirBuilder, -}; -use p3_field::AbstractField; -use p3_matrix::Matrix; - -use super::{ - symbolic::{ - symbolic_expression::SymbolicEvaluator, - symbolic_variable::{Entry, SymbolicVariable}, - }, - PartitionedAirBuilder, ViewPair, -}; -use crate::{ - config::{PackedChallenge, PackedVal, StarkGenericConfig, Val}, - interaction::{ - rap::InteractionPhaseAirBuilder, Interaction, InteractionBuilder, InteractionType, - RapPhaseSeqKind, - }, - rap::PermutationAirBuilderWithExposedValues, -}; - -/// A folder for prover constraints. -pub struct ProverConstraintFolder<'a, SC: StarkGenericConfig> { - pub preprocessed: ViewPair<'a, PackedVal>, - pub partitioned_main: Vec>>, - pub after_challenge: Vec>>, - pub challenges: &'a [Vec>], - pub is_first_row: PackedVal, - pub is_last_row: PackedVal, - pub is_transition: PackedVal, - pub alpha_powers: &'a [SC::Challenge], - pub accumulator: PackedChallenge, - pub constraint_index: usize, - pub public_values: &'a [Val], - pub exposed_values_after_challenge: &'a [&'a [PackedChallenge]], - pub interactions: Vec>>, - /// Number of interactions to bundle in permutation trace, if applicable. - pub interaction_chunk_size: usize, - pub rap_phase_seq_kind: RapPhaseSeqKind, - pub has_common_main: bool, -} - -impl<'a, SC> AirBuilder for ProverConstraintFolder<'a, SC> -where - SC: StarkGenericConfig, -{ - type F = Val; - type Expr = PackedVal; - type Var = PackedVal; - type M = ViewPair<'a, PackedVal>; - - /// It is difficult to horizontally concatenate matrices when the main trace is partitioned, so we disable this method in that case. - fn main(&self) -> Self::M { - if self.partitioned_main.len() == 1 { - self.partitioned_main[0] - } else { - panic!("Main trace is either empty or partitioned. This function should not be used.") - } - } - - fn is_first_row(&self) -> Self::Expr { - self.is_first_row - } - - fn is_last_row(&self) -> Self::Expr { - self.is_last_row - } - - fn is_transition_window(&self, size: usize) -> Self::Expr { - if size == 2 { - self.is_transition - } else { - panic!("only supports a window size of 2") - } - } - - fn assert_zero>(&mut self, x: I) { - let x: PackedVal = x.into(); - let alpha_power = self.alpha_powers[self.constraint_index]; - self.accumulator += PackedChallenge::::from_f(alpha_power) * x; - self.constraint_index += 1; - } -} - -impl PairBuilder for ProverConstraintFolder<'_, SC> -where - SC: StarkGenericConfig, -{ - fn preprocessed(&self) -> Self::M { - self.preprocessed - } -} - -impl ExtensionBuilder for ProverConstraintFolder<'_, SC> -where - SC: StarkGenericConfig, -{ - type EF = SC::Challenge; - type ExprEF = PackedChallenge; - type VarEF = PackedChallenge; - - fn assert_zero_ext(&mut self, x: I) - where - I: Into, - { - let x: PackedChallenge = x.into(); - let alpha_power = self.alpha_powers[self.constraint_index]; - self.accumulator += PackedChallenge::::from_f(alpha_power) * x; - self.constraint_index += 1; - } -} - -impl AirBuilderWithPublicValues for ProverConstraintFolder<'_, SC> { - type PublicVar = Self::F; - - fn public_values(&self) -> &[Self::F] { - self.public_values - } -} - -// PermutationAirBuilder is just a special kind of RAP builder -impl<'a, SC> PermutationAirBuilder for ProverConstraintFolder<'a, SC> -where - SC: StarkGenericConfig, -{ - type MP = ViewPair<'a, PackedChallenge>; - - type RandomVar = PackedChallenge; - - fn permutation(&self) -> Self::MP { - *self - .after_challenge - .first() - .expect("Challenge phase not supported") - } - - fn permutation_randomness(&self) -> &[Self::RandomVar] { - self.challenges - .first() - .map(|c| c.as_slice()) - .expect("Challenge phase not supported") - } -} - -impl PermutationAirBuilderWithExposedValues for ProverConstraintFolder<'_, SC> -where - SC: StarkGenericConfig, -{ - fn permutation_exposed_values(&self) -> &[Self::VarEF] { - self.exposed_values_after_challenge - .first() - .expect("Challenge phase not supported") - } -} - -impl PartitionedAirBuilder for ProverConstraintFolder<'_, SC> -where - SC: StarkGenericConfig, -{ - fn cached_mains(&self) -> &[Self::M] { - let mut num_cached_mains = self.partitioned_main.len(); - if self.has_common_main { - num_cached_mains -= 1; - } - &self.partitioned_main[..num_cached_mains] - } - fn common_main(&self) -> &Self::M { - assert!(self.has_common_main, "AIR doesn't have a common main trace"); - self.partitioned_main.last().unwrap() - } -} - -impl InteractionBuilder for ProverConstraintFolder<'_, SC> -where - SC: StarkGenericConfig, -{ - fn push_interaction>( - &mut self, - bus_index: usize, - fields: impl IntoIterator, - count: impl Into, - interaction_type: InteractionType, - ) { - let fields = fields.into_iter().map(|f| f.into()).collect(); - let count = count.into(); - self.interactions.push(Interaction { - bus_index, - fields, - count, - interaction_type, - }); - } - - fn num_interactions(&self) -> usize { - self.interactions.len() - } - - fn all_interactions(&self) -> &[Interaction] { - &self.interactions - } -} - -impl SymbolicEvaluator, PackedVal> for ProverConstraintFolder<'_, SC> -where - SC: StarkGenericConfig, -{ - fn eval_var(&self, symbolic_var: SymbolicVariable>) -> PackedVal { - let index = symbolic_var.index; - match symbolic_var.entry { - Entry::Preprocessed { offset } => self.preprocessed.get(offset, index), - Entry::Main { part_index, offset } => { - self.partitioned_main[part_index].get(offset, index) - } - Entry::Public => self.public_values[index].into(), - _ => panic!("After challenge evaluation not allowed"), - } - } -} - -impl InteractionPhaseAirBuilder for ProverConstraintFolder<'_, SC> { - fn finalize_interactions(&mut self) {} - - fn interaction_chunk_size(&self) -> usize { - self.interaction_chunk_size - } - - fn rap_phase_seq_kind(&self) -> RapPhaseSeqKind { - self.rap_phase_seq_kind - } -} diff --git a/crates/stark-backend/src/air_builders/sub.rs b/crates/stark-backend/src/air_builders/sub.rs deleted file mode 100644 index d5679cfcaa..0000000000 --- a/crates/stark-backend/src/air_builders/sub.rs +++ /dev/null @@ -1,106 +0,0 @@ -//! Copied from sp1/core under MIT license - -use std::{ - iter::{Skip, Take}, - ops::{Deref, Range}, -}; - -use p3_air::{AirBuilder, BaseAir}; -use p3_matrix::Matrix; - -/// A submatrix of a matrix. The matrix will contain a subset of the columns of `self.inner`. -pub struct SubMatrixRowSlices, T: Send + Sync> { - inner: M, - column_range: Range, - _phantom: std::marker::PhantomData, -} - -impl, T: Send + Sync> SubMatrixRowSlices { - pub const fn new(inner: M, column_range: Range) -> Self { - Self { - inner, - column_range, - _phantom: std::marker::PhantomData, - } - } -} - -/// Implement `Matrix` for `SubMatrixRowSlices`. -impl, T: Send + Sync> Matrix for SubMatrixRowSlices { - type Row<'a> - = Skip>> - where - Self: 'a; - - #[inline] - fn row(&self, r: usize) -> Self::Row<'_> { - self.inner - .row(r) - .take(self.column_range.end) - .skip(self.column_range.start) - } - - #[inline] - fn row_slice(&self, r: usize) -> impl Deref { - self.row(r).collect::>() - } - - #[inline] - fn width(&self) -> usize { - self.column_range.len() - } - - #[inline] - fn height(&self) -> usize { - self.inner.height() - } -} - -/// A builder used to eval a sub-air. This will handle enforcing constraints for a subset of a -/// trace matrix. E.g. if a particular air needs to be enforced for a subset of the columns of -/// the trace, then the SubAirBuilder can be used. -pub struct SubAirBuilder<'a, AB: AirBuilder, SubAir: BaseAir, T> { - inner: &'a mut AB, - column_range: Range, - _phantom: std::marker::PhantomData<(SubAir, T)>, -} - -impl<'a, AB: AirBuilder, SubAir: BaseAir, T> SubAirBuilder<'a, AB, SubAir, T> { - pub fn new(inner: &'a mut AB, column_range: Range) -> Self { - Self { - inner, - column_range, - _phantom: std::marker::PhantomData, - } - } -} - -/// Implement `AirBuilder` for `SubAirBuilder`. -impl, F> AirBuilder for SubAirBuilder<'_, AB, SubAir, F> { - type F = AB::F; - type Expr = AB::Expr; - type Var = AB::Var; - type M = SubMatrixRowSlices; - - fn main(&self) -> Self::M { - let matrix = self.inner.main(); - - SubMatrixRowSlices::new(matrix, self.column_range.clone()) - } - - fn is_first_row(&self) -> Self::Expr { - self.inner.is_first_row() - } - - fn is_last_row(&self) -> Self::Expr { - self.inner.is_last_row() - } - - fn is_transition_window(&self, size: usize) -> Self::Expr { - self.inner.is_transition_window(size) - } - - fn assert_zero>(&mut self, x: I) { - self.inner.assert_zero(x.into()); - } -} diff --git a/crates/stark-backend/src/air_builders/symbolic/mod.rs b/crates/stark-backend/src/air_builders/symbolic/mod.rs deleted file mode 100644 index 3dc01bf46f..0000000000 --- a/crates/stark-backend/src/air_builders/symbolic/mod.rs +++ /dev/null @@ -1,467 +0,0 @@ -// Copied from uni-stark/src/symbolic_builder.rs to allow A: ?Sized - -use itertools::Itertools; -use p3_air::{ - AirBuilder, AirBuilderWithPublicValues, ExtensionBuilder, PairBuilder, PermutationAirBuilder, -}; -use p3_field::Field; -use p3_matrix::{dense::RowMajorMatrix, Matrix}; -use p3_util::log2_ceil_usize; -use serde::{Deserialize, Serialize}; -use tracing::instrument; - -use self::{ - symbolic_expression::SymbolicExpression, - symbolic_variable::{Entry, SymbolicVariable}, -}; -use super::PartitionedAirBuilder; -use crate::{ - interaction::{ - rap::InteractionPhaseAirBuilder, Interaction, InteractionBuilder, InteractionType, - RapPhaseSeqKind, SymbolicInteraction, - }, - keygen::types::{StarkVerifyingParams, TraceWidth}, - rap::{BaseAirWithPublicValues, PermutationAirBuilderWithExposedValues, Rap}, -}; - -pub mod symbolic_expression; -pub mod symbolic_variable; - -/// Symbolic constraints for a single AIR with interactions. -/// The constraints contain the constraints on the logup partial sums. -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(bound = "F: Field")] -pub struct SymbolicConstraints { - /// All constraints of the RAP, including the constraints on the logup partial sums. - pub constraints: Vec>, - /// Only for debug purposes. `constraints` also contains the constraints on the logup partial sums. - pub interactions: Vec>>, -} - -impl SymbolicConstraints { - pub fn max_constraint_degree(&self) -> usize { - Iterator::max(self.constraints.iter().map(|c| c.degree_multiple())).unwrap_or(0) - } - - pub fn get_log_quotient_degree(&self) -> usize { - // We pad to at least degree 2, since a quotient argument doesn't make sense with smaller degrees. - let constraint_degree = self.max_constraint_degree().max(2); - - // The quotient's actual degree is approximately (max_constraint_degree - 1) * (trace height), - // where subtracting 1 comes from division by the zerofier. - // But we pad it to a power of two so that we can efficiently decompose the quotient. - log2_ceil_usize(constraint_degree - 1) - } - - /// Returns the maximum field degree and count degree across all interactions - pub fn max_interaction_degrees(&self) -> (usize, usize) { - let max_field_degree = self - .interactions - .iter() - .map(|interaction| { - interaction - .fields - .iter() - .map(|field| field.degree_multiple()) - .max() - .unwrap_or(0) - }) - .max() - .unwrap_or(0); - - let max_count_degree = self - .interactions - .iter() - .map(|interaction| interaction.count.degree_multiple()) - .max() - .unwrap_or(0); - - (max_field_degree, max_count_degree) - } -} - -#[instrument(name = "evaluate constraints symbolically", skip_all, level = "debug")] -pub fn get_symbolic_builder( - rap: &R, - width: &TraceWidth, - num_challenges_to_sample: &[usize], - num_exposed_values_after_challenge: &[usize], - rap_phase_seq_kind: RapPhaseSeqKind, - interaction_chunk_size: usize, -) -> SymbolicRapBuilder -where - F: Field, - R: Rap> + BaseAirWithPublicValues + ?Sized, -{ - let mut builder = SymbolicRapBuilder::new( - width, - rap.num_public_values(), - num_challenges_to_sample, - num_exposed_values_after_challenge, - rap_phase_seq_kind, - interaction_chunk_size, - ); - Rap::eval(rap, &mut builder); - builder -} - -/// An `AirBuilder` for evaluating constraints symbolically, and recording them for later use. -#[derive(Debug)] -pub struct SymbolicRapBuilder { - preprocessed: RowMajorMatrix>, - partitioned_main: Vec>>, - after_challenge: Vec>>, - public_values: Vec>, - challenges: Vec>>, - exposed_values_after_challenge: Vec>>, - constraints: Vec>, - interactions: Vec>, - interaction_chunk_size: usize, - rap_phase_seq_kind: RapPhaseSeqKind, - trace_width: TraceWidth, -} - -impl SymbolicRapBuilder { - /// - `num_challenges_to_sample`: for each challenge phase, how many challenges to sample - /// - `num_exposed_values_after_challenge`: in each challenge phase, how many values to expose to verifier - pub(crate) fn new( - // FIXME: width.after_challenge is incorrect. It cannot be determined when the function is called. - width: &TraceWidth, - num_public_values: usize, - num_challenges_to_sample: &[usize], - num_exposed_values_after_challenge: &[usize], - rap_phase_seq_kind: RapPhaseSeqKind, - interaction_chunk_size: usize, - ) -> Self { - let preprocessed_width = width.preprocessed.unwrap_or(0); - let prep_values = [0, 1] - .into_iter() - .flat_map(|offset| { - (0..width.preprocessed.unwrap_or(0)) - .map(move |index| SymbolicVariable::new(Entry::Preprocessed { offset }, index)) - }) - .collect(); - let preprocessed = RowMajorMatrix::new(prep_values, preprocessed_width); - - let mut partitioned_main: Vec<_> = width - .cached_mains - .iter() - .enumerate() - .map(|(part_index, &width)| gen_main_trace(part_index, width)) - .collect(); - if width.common_main != 0 { - partitioned_main.push(gen_main_trace(width.cached_mains.len(), width.common_main)); - } - let after_challenge = Self::new_after_challenge(&width.after_challenge); - - let public_values = (0..num_public_values) - .map(move |index| SymbolicVariable::new(Entry::Public, index)) - .collect(); - - let challenges = Self::new_challenges(num_challenges_to_sample); - - let exposed_values_after_challenge = - Self::new_exposed_values_after_challenge(num_exposed_values_after_challenge); - - Self { - preprocessed, - partitioned_main, - after_challenge, - public_values, - challenges, - exposed_values_after_challenge, - constraints: vec![], - interactions: vec![], - interaction_chunk_size, - rap_phase_seq_kind, - trace_width: width.clone(), - } - } - - pub fn constraints(self) -> SymbolicConstraints { - SymbolicConstraints { - constraints: self.constraints, - interactions: self.interactions, - } - } - - pub fn params(&self) -> StarkVerifyingParams { - let width = self.width(); - let num_exposed_values_after_challenge = self.num_exposed_values_after_challenge(); - let num_challenges_to_sample = self.num_challenges_to_sample(); - StarkVerifyingParams { - width, - num_public_values: self.public_values.len(), - num_exposed_values_after_challenge, - num_challenges_to_sample, - } - } - - pub fn width(&self) -> TraceWidth { - let mut ret = self.trace_width.clone(); - ret.after_challenge = self.after_challenge.iter().map(|m| m.width()).collect(); - ret - } - - pub fn num_exposed_values_after_challenge(&self) -> Vec { - self.exposed_values_after_challenge - .iter() - .map(|c| c.len()) - .collect() - } - - pub fn num_challenges_to_sample(&self) -> Vec { - self.challenges.iter().map(|c| c.len()).collect() - } - - fn new_after_challenge( - width_after_phase: &[usize], - ) -> Vec>> { - width_after_phase - .iter() - .map(|&width| { - let mat_values = [0, 1] - .into_iter() - .flat_map(|offset| { - (0..width).map(move |index| { - SymbolicVariable::new(Entry::Permutation { offset }, index) - }) - }) - .collect_vec(); - RowMajorMatrix::new(mat_values, width) - }) - .collect_vec() - } - - fn new_challenges(num_challenges_to_sample: &[usize]) -> Vec>> { - num_challenges_to_sample - .iter() - .map(|&num_challenges| { - (0..num_challenges) - .map(|index| SymbolicVariable::new(Entry::Challenge, index)) - .collect_vec() - }) - .collect_vec() - } - - fn new_exposed_values_after_challenge( - num_exposed_values_after_challenge: &[usize], - ) -> Vec>> { - num_exposed_values_after_challenge - .iter() - .map(|&num| { - (0..num) - .map(|index| SymbolicVariable::new(Entry::Exposed, index)) - .collect_vec() - }) - .collect_vec() - } -} - -impl AirBuilder for SymbolicRapBuilder { - type F = F; - type Expr = SymbolicExpression; - type Var = SymbolicVariable; - type M = RowMajorMatrix; - - /// It is difficult to horizontally concatenate matrices when the main trace is partitioned, so we disable this method in that case. - fn main(&self) -> Self::M { - if self.partitioned_main.len() == 1 { - self.partitioned_main[0].clone() - } else { - panic!("Main trace is either empty or partitioned. This function should not be used.") - } - } - - fn is_first_row(&self) -> Self::Expr { - SymbolicExpression::IsFirstRow - } - - fn is_last_row(&self) -> Self::Expr { - SymbolicExpression::IsLastRow - } - - fn is_transition_window(&self, size: usize) -> Self::Expr { - if size == 2 { - SymbolicExpression::IsTransition - } else { - panic!("uni-stark only supports a window size of 2") - } - } - - fn assert_zero>(&mut self, x: I) { - self.constraints.push(x.into()); - } -} - -impl PairBuilder for SymbolicRapBuilder { - fn preprocessed(&self) -> Self::M { - self.preprocessed.clone() - } -} - -impl ExtensionBuilder for SymbolicRapBuilder { - type EF = F; - type ExprEF = SymbolicExpression; - type VarEF = SymbolicVariable; - - fn assert_zero_ext(&mut self, x: I) - where - I: Into, - { - self.constraints.push(x.into()); - } -} - -impl AirBuilderWithPublicValues for SymbolicRapBuilder { - type PublicVar = SymbolicVariable; - - fn public_values(&self) -> &[Self::PublicVar] { - &self.public_values - } -} - -impl PermutationAirBuilder for SymbolicRapBuilder { - type MP = RowMajorMatrix; - type RandomVar = SymbolicVariable; - - fn permutation(&self) -> Self::MP { - self.after_challenge - .first() - .expect("Challenge phase not supported") - .clone() - } - - fn permutation_randomness(&self) -> &[Self::RandomVar] { - self.challenges - .first() - .map(|c| c.as_slice()) - .expect("Challenge phase not supported") - } -} - -impl PermutationAirBuilderWithExposedValues for SymbolicRapBuilder { - fn permutation_exposed_values(&self) -> &[Self::VarEF] { - self.exposed_values_after_challenge - .first() - .map(|c| c.as_slice()) - .expect("Challenge phase not supported") - } -} - -impl InteractionBuilder for SymbolicRapBuilder { - fn push_interaction>( - &mut self, - bus_index: usize, - fields: impl IntoIterator, - count: impl Into, - interaction_type: InteractionType, - ) { - let fields = fields.into_iter().map(|f| f.into()).collect(); - let count = count.into(); - self.interactions.push(Interaction { - bus_index, - fields, - count, - interaction_type, - }); - } - - fn num_interactions(&self) -> usize { - self.interactions.len() - } - - fn all_interactions(&self) -> &[Interaction] { - &self.interactions - } -} - -impl InteractionPhaseAirBuilder for SymbolicRapBuilder { - fn finalize_interactions(&mut self) { - let num_interactions = self.num_interactions(); - if num_interactions != 0 { - assert!( - self.after_challenge.is_empty(), - "after_challenge width should be auto-populated by the InteractionBuilder" - ); - assert!(self.challenges.is_empty()); - assert!(self.exposed_values_after_challenge.is_empty()); - - let perm_width = num_interactions.div_ceil(self.interaction_chunk_size) + 1; - self.after_challenge = Self::new_after_challenge(&[perm_width]); - - let phases_shapes = self.rap_phase_seq_kind.shape(); - let phase_shape = phases_shapes.first().unwrap(); - - self.challenges = Self::new_challenges(&[phase_shape.num_challenges]); - self.exposed_values_after_challenge = - Self::new_exposed_values_after_challenge(&[phase_shape.num_exposed_values]); - } - } - - fn interaction_chunk_size(&self) -> usize { - self.interaction_chunk_size - } - - fn rap_phase_seq_kind(&self) -> RapPhaseSeqKind { - self.rap_phase_seq_kind - } -} - -impl PartitionedAirBuilder for SymbolicRapBuilder { - fn cached_mains(&self) -> &[Self::M] { - &self.partitioned_main[..self.trace_width.cached_mains.len()] - } - fn common_main(&self) -> &Self::M { - assert_ne!( - self.trace_width.common_main, 0, - "AIR doesn't have a common main trace" - ); - &self.partitioned_main[self.trace_width.cached_mains.len()] - } -} - -#[allow(dead_code)] -struct LocalOnlyChecker; - -#[allow(dead_code)] -impl LocalOnlyChecker { - fn check_var(var: SymbolicVariable) -> bool { - match var.entry { - Entry::Preprocessed { offset } => offset == 0, - Entry::Main { offset, .. } => offset == 0, - Entry::Permutation { offset } => offset == 0, - Entry::Public => true, - Entry::Challenge => true, - Entry::Exposed => true, - } - } - - fn check_expr(expr: &SymbolicExpression) -> bool { - match expr { - SymbolicExpression::Variable(var) => Self::check_var(*var), - SymbolicExpression::IsFirstRow => false, - SymbolicExpression::IsLastRow => false, - SymbolicExpression::IsTransition => false, - SymbolicExpression::Constant(_) => true, - SymbolicExpression::Add { x, y, .. } => Self::check_expr(x) && Self::check_expr(y), - SymbolicExpression::Sub { x, y, .. } => Self::check_expr(x) && Self::check_expr(y), - SymbolicExpression::Neg { x, .. } => Self::check_expr(x), - SymbolicExpression::Mul { x, y, .. } => Self::check_expr(x) && Self::check_expr(y), - } - } -} - -fn gen_main_trace( - part_index: usize, - width: usize, -) -> RowMajorMatrix> { - let mat_values = [0, 1] - .into_iter() - .flat_map(|offset| { - (0..width) - .map(move |index| SymbolicVariable::new(Entry::Main { part_index, offset }, index)) - }) - .collect_vec(); - RowMajorMatrix::new(mat_values, width) -} diff --git a/crates/stark-backend/src/air_builders/symbolic/symbolic_expression.rs b/crates/stark-backend/src/air_builders/symbolic/symbolic_expression.rs deleted file mode 100644 index fd3cc87fff..0000000000 --- a/crates/stark-backend/src/air_builders/symbolic/symbolic_expression.rs +++ /dev/null @@ -1,323 +0,0 @@ -// Copied from uni-stark/src/symbolic_expression.rs to use Arc instead of Rc. - -use core::{ - fmt::Debug, - iter::{Product, Sum}, - ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}, -}; -use std::sync::Arc; - -use p3_field::{AbstractField, Field}; -use serde::{Deserialize, Serialize}; - -use super::symbolic_variable::SymbolicVariable; - -/// An expression over `SymbolicVariable`s. -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(bound = "F: Field")] -pub enum SymbolicExpression { - Variable(SymbolicVariable), - IsFirstRow, - IsLastRow, - IsTransition, - Constant(F), - Add { - x: Arc, - y: Arc, - degree_multiple: usize, - }, - Sub { - x: Arc, - y: Arc, - degree_multiple: usize, - }, - Neg { - x: Arc, - degree_multiple: usize, - }, - Mul { - x: Arc, - y: Arc, - degree_multiple: usize, - }, -} - -impl SymbolicExpression { - /// Returns the multiple of `n` (the trace length) in this expression's degree. - pub const fn degree_multiple(&self) -> usize { - match self { - SymbolicExpression::Variable(v) => v.degree_multiple(), - SymbolicExpression::IsFirstRow => 1, - SymbolicExpression::IsLastRow => 1, - SymbolicExpression::IsTransition => 0, - SymbolicExpression::Constant(_) => 0, - SymbolicExpression::Add { - degree_multiple, .. - } => *degree_multiple, - SymbolicExpression::Sub { - degree_multiple, .. - } => *degree_multiple, - SymbolicExpression::Neg { - degree_multiple, .. - } => *degree_multiple, - SymbolicExpression::Mul { - degree_multiple, .. - } => *degree_multiple, - } - } - - pub fn rotate(&self, offset: usize) -> Self { - match self { - SymbolicExpression::Variable(v) => v.rotate(offset).into(), - SymbolicExpression::IsFirstRow => unreachable!("IsFirstRow should not be rotated"), - SymbolicExpression::IsLastRow => unreachable!("IsLastRow should not be rotated"), - SymbolicExpression::IsTransition => unreachable!("IsTransition should not be rotated"), - SymbolicExpression::Constant(c) => Self::Constant(*c), - SymbolicExpression::Add { - x, - y, - degree_multiple, - } => Self::Add { - x: Arc::new(x.rotate(offset)), - y: Arc::new(y.rotate(offset)), - degree_multiple: *degree_multiple, - }, - SymbolicExpression::Sub { - x, - y, - degree_multiple, - } => Self::Sub { - x: Arc::new(x.rotate(offset)), - y: Arc::new(y.rotate(offset)), - degree_multiple: *degree_multiple, - }, - SymbolicExpression::Neg { x, degree_multiple } => Self::Neg { - x: Arc::new(x.rotate(offset)), - degree_multiple: *degree_multiple, - }, - SymbolicExpression::Mul { - x, - y, - degree_multiple, - } => Self::Mul { - x: Arc::new(x.rotate(offset)), - y: Arc::new(y.rotate(offset)), - degree_multiple: *degree_multiple, - }, - } - } - - pub fn next(&self) -> Self { - self.rotate(1) - } -} - -impl Default for SymbolicExpression { - fn default() -> Self { - Self::Constant(F::ZERO) - } -} - -impl From for SymbolicExpression { - fn from(value: F) -> Self { - Self::Constant(value) - } -} - -impl AbstractField for SymbolicExpression { - type F = F; - - const ZERO: Self = Self::Constant(F::ZERO); - const ONE: Self = Self::Constant(F::ONE); - const TWO: Self = Self::Constant(F::TWO); - const NEG_ONE: Self = Self::Constant(F::NEG_ONE); - - #[inline] - fn from_f(f: Self::F) -> Self { - f.into() - } - - fn from_bool(b: bool) -> Self { - Self::Constant(F::from_bool(b)) - } - - fn from_canonical_u8(n: u8) -> Self { - Self::Constant(F::from_canonical_u8(n)) - } - - fn from_canonical_u16(n: u16) -> Self { - Self::Constant(F::from_canonical_u16(n)) - } - - fn from_canonical_u32(n: u32) -> Self { - Self::Constant(F::from_canonical_u32(n)) - } - - fn from_canonical_u64(n: u64) -> Self { - Self::Constant(F::from_canonical_u64(n)) - } - - fn from_canonical_usize(n: usize) -> Self { - Self::Constant(F::from_canonical_usize(n)) - } - - fn from_wrapped_u32(n: u32) -> Self { - Self::Constant(F::from_wrapped_u32(n)) - } - - fn from_wrapped_u64(n: u64) -> Self { - Self::Constant(F::from_wrapped_u64(n)) - } -} - -impl Add for SymbolicExpression { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - let degree_multiple = self.degree_multiple().max(rhs.degree_multiple()); - Self::Add { - x: Arc::new(self), - y: Arc::new(rhs), - degree_multiple, - } - } -} - -impl Add for SymbolicExpression { - type Output = Self; - - fn add(self, rhs: F) -> Self { - self + Self::from(rhs) - } -} - -impl AddAssign for SymbolicExpression { - fn add_assign(&mut self, rhs: Self) { - *self = self.clone() + rhs; - } -} - -impl AddAssign for SymbolicExpression { - fn add_assign(&mut self, rhs: F) { - *self += Self::from(rhs); - } -} - -impl Sum for SymbolicExpression { - fn sum>(iter: I) -> Self { - iter.reduce(|x, y| x + y).unwrap_or(Self::ZERO) - } -} - -impl Sum for SymbolicExpression { - fn sum>(iter: I) -> Self { - iter.map(|x| Self::from(x)).sum() - } -} - -impl Sub for SymbolicExpression { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - let degree_multiple = self.degree_multiple().max(rhs.degree_multiple()); - Self::Sub { - x: Arc::new(self), - y: Arc::new(rhs), - degree_multiple, - } - } -} - -impl Sub for SymbolicExpression { - type Output = Self; - - fn sub(self, rhs: F) -> Self { - self - Self::from(rhs) - } -} - -impl SubAssign for SymbolicExpression { - fn sub_assign(&mut self, rhs: Self) { - *self = self.clone() - rhs; - } -} - -impl SubAssign for SymbolicExpression { - fn sub_assign(&mut self, rhs: F) { - *self -= Self::from(rhs); - } -} - -impl Neg for SymbolicExpression { - type Output = Self; - - fn neg(self) -> Self { - let degree_multiple = self.degree_multiple(); - Self::Neg { - x: Arc::new(self), - degree_multiple, - } - } -} - -impl Mul for SymbolicExpression { - type Output = Self; - - fn mul(self, rhs: Self) -> Self { - #[allow(clippy::suspicious_arithmetic_impl)] - let degree_multiple = self.degree_multiple() + rhs.degree_multiple(); - Self::Mul { - x: Arc::new(self), - y: Arc::new(rhs), - degree_multiple, - } - } -} - -impl Mul for SymbolicExpression { - type Output = Self; - - fn mul(self, rhs: F) -> Self { - self * Self::from(rhs) - } -} - -impl MulAssign for SymbolicExpression { - fn mul_assign(&mut self, rhs: Self) { - *self = self.clone() * rhs; - } -} - -impl MulAssign for SymbolicExpression { - fn mul_assign(&mut self, rhs: F) { - *self *= Self::from(rhs); - } -} - -impl Product for SymbolicExpression { - fn product>(iter: I) -> Self { - iter.reduce(|x, y| x * y).unwrap_or(Self::ONE) - } -} - -impl Product for SymbolicExpression { - fn product>(iter: I) -> Self { - iter.map(|x| Self::from(x)).product() - } -} - -pub trait SymbolicEvaluator> { - fn eval_var(&self, symbolic_var: SymbolicVariable) -> E; - - fn eval_expr(&self, symbolic_expr: &SymbolicExpression) -> E { - match symbolic_expr { - SymbolicExpression::Variable(var) => self.eval_var(*var), - SymbolicExpression::Constant(c) => (*c).into(), - SymbolicExpression::Add { x, y, .. } => self.eval_expr(x) + self.eval_expr(y), - SymbolicExpression::Sub { x, y, .. } => self.eval_expr(x) - self.eval_expr(y), - SymbolicExpression::Neg { x, .. } => -self.eval_expr(x), - SymbolicExpression::Mul { x, y, .. } => self.eval_expr(x) * self.eval_expr(y), - _ => unreachable!("Expression cannot be evaluated"), - } - } -} diff --git a/crates/stark-backend/src/air_builders/symbolic/symbolic_variable.rs b/crates/stark-backend/src/air_builders/symbolic/symbolic_variable.rs deleted file mode 100644 index 91f9573c6d..0000000000 --- a/crates/stark-backend/src/air_builders/symbolic/symbolic_variable.rs +++ /dev/null @@ -1,194 +0,0 @@ -// Copied from uni-stark/src/symbolic_variable.rs. - -use core::{ - marker::PhantomData, - ops::{Add, Mul, Sub}, -}; - -use p3_field::Field; -use serde::{Deserialize, Serialize}; - -use super::symbolic_expression::SymbolicExpression; - -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub enum Entry { - Preprocessed { - offset: usize, - }, - /// Main may be partitioned - Main { - part_index: usize, - offset: usize, - }, - Permutation { - offset: usize, - }, - Public, - Challenge, - Exposed, -} - -impl Entry { - /// Advance the internal offset of the entry by the given `offset`. - pub fn rotate(self, offset: usize) -> Self { - match self { - Entry::Preprocessed { offset: old_offset } => Entry::Preprocessed { - offset: old_offset + offset, - }, - Entry::Main { - part_index, - offset: old_offset, - } => Entry::Main { - part_index, - offset: old_offset + offset, - }, - Entry::Permutation { offset: old_offset } => Entry::Permutation { - offset: old_offset + offset, - }, - Entry::Public | Entry::Challenge | Entry::Exposed => self, - } - } - - pub fn next(self) -> Self { - self.rotate(1) - } -} - -/// A variable within the evaluation window, i.e. a column in either the local or next row. -#[derive(Copy, Clone, Debug, Serialize, Deserialize)] -pub struct SymbolicVariable { - pub entry: Entry, - pub index: usize, - pub(crate) _phantom: PhantomData, -} - -impl SymbolicVariable { - pub const fn new(entry: Entry, index: usize) -> Self { - Self { - entry, - index, - _phantom: PhantomData, - } - } - - pub const fn degree_multiple(&self) -> usize { - match self.entry { - Entry::Preprocessed { .. } | Entry::Main { .. } | Entry::Permutation { .. } => 1, - Entry::Public | Entry::Challenge | Entry::Exposed => 0, - } - } - - pub fn rotate(self, offset: usize) -> Self { - Self { - entry: self.entry.rotate(offset), - index: self.index, - _phantom: PhantomData, - } - } - - pub fn next(self) -> Self { - self.rotate(1) - } -} - -impl From> for SymbolicExpression { - fn from(value: SymbolicVariable) -> Self { - SymbolicExpression::Variable(value) - } -} - -impl Add for SymbolicVariable { - type Output = SymbolicExpression; - - fn add(self, rhs: Self) -> Self::Output { - SymbolicExpression::from(self) + SymbolicExpression::from(rhs) - } -} - -impl Add for SymbolicVariable { - type Output = SymbolicExpression; - - fn add(self, rhs: F) -> Self::Output { - SymbolicExpression::from(self) + SymbolicExpression::from(rhs) - } -} - -impl Add> for SymbolicVariable { - type Output = SymbolicExpression; - - fn add(self, rhs: SymbolicExpression) -> Self::Output { - SymbolicExpression::from(self) + rhs - } -} - -impl Add> for SymbolicExpression { - type Output = Self; - - fn add(self, rhs: SymbolicVariable) -> Self::Output { - self + Self::from(rhs) - } -} - -impl Sub for SymbolicVariable { - type Output = SymbolicExpression; - - fn sub(self, rhs: Self) -> Self::Output { - SymbolicExpression::from(self) - SymbolicExpression::from(rhs) - } -} - -impl Sub for SymbolicVariable { - type Output = SymbolicExpression; - - fn sub(self, rhs: F) -> Self::Output { - SymbolicExpression::from(self) - SymbolicExpression::from(rhs) - } -} - -impl Sub> for SymbolicVariable { - type Output = SymbolicExpression; - - fn sub(self, rhs: SymbolicExpression) -> Self::Output { - SymbolicExpression::from(self) - rhs - } -} - -impl Sub> for SymbolicExpression { - type Output = Self; - - fn sub(self, rhs: SymbolicVariable) -> Self::Output { - self - Self::from(rhs) - } -} - -impl Mul for SymbolicVariable { - type Output = SymbolicExpression; - - fn mul(self, rhs: Self) -> Self::Output { - SymbolicExpression::from(self) * SymbolicExpression::from(rhs) - } -} - -impl Mul for SymbolicVariable { - type Output = SymbolicExpression; - - fn mul(self, rhs: F) -> Self::Output { - SymbolicExpression::from(self) * SymbolicExpression::from(rhs) - } -} - -impl Mul> for SymbolicVariable { - type Output = SymbolicExpression; - - fn mul(self, rhs: SymbolicExpression) -> Self::Output { - SymbolicExpression::from(self) * rhs - } -} - -impl Mul> for SymbolicExpression { - type Output = Self; - - fn mul(self, rhs: SymbolicVariable) -> Self::Output { - self * Self::from(rhs) - } -} diff --git a/crates/stark-backend/src/air_builders/verifier.rs b/crates/stark-backend/src/air_builders/verifier.rs deleted file mode 100644 index 27f1c104f7..0000000000 --- a/crates/stark-backend/src/air_builders/verifier.rs +++ /dev/null @@ -1,127 +0,0 @@ -use std::{ - marker::PhantomData, - ops::{AddAssign, MulAssign}, -}; - -use p3_field::{AbstractField, ExtensionField, Field}; -use p3_matrix::Matrix; -use p3_maybe_rayon::prelude::join; - -use super::{ - symbolic::{ - symbolic_expression::{SymbolicEvaluator, SymbolicExpression}, - symbolic_variable::{Entry, SymbolicVariable}, - }, - ViewPair, -}; -use crate::config::{StarkGenericConfig, Val}; - -pub type VerifierConstraintFolder<'a, SC> = GenericVerifierConstraintFolder< - 'a, - Val, - ::Challenge, - Val, - ::Challenge, - ::Challenge, ->; -// Struct definition copied from sp1 under MIT license. -/// A folder for verifier constraints with generic types. -/// -/// `Var` is still a challenge type because this is a verifier. -pub struct GenericVerifierConstraintFolder<'a, F, EF, PubVar, Var, Expr> { - pub preprocessed: ViewPair<'a, Var>, - pub partitioned_main: Vec>, - pub after_challenge: Vec>, - pub challenges: &'a [Vec], - pub is_first_row: Var, - pub is_last_row: Var, - pub is_transition: Var, - pub alpha: Var, - pub accumulator: Expr, - pub public_values: &'a [PubVar], - pub exposed_values_after_challenge: &'a [Vec], - pub _marker: PhantomData<(F, EF)>, -} - -impl GenericVerifierConstraintFolder<'_, F, EF, PubVar, Var, Expr> -where - F: Field, - EF: ExtensionField, - Expr: AbstractField + From + MulAssign + AddAssign + Send + Sync, - Var: Into + Copy + Send + Sync, - PubVar: Into + Copy + Send + Sync, -{ - pub fn eval_constraints(&mut self, constraints: &[SymbolicExpression]) { - for constraint in constraints { - let x = self.eval_expr(constraint); - self.assert_zero(x); - } - } - - pub fn assert_zero(&mut self, x: impl Into) { - let x = x.into(); - self.accumulator *= self.alpha; - self.accumulator += x; - } -} - -impl SymbolicEvaluator - for GenericVerifierConstraintFolder<'_, F, EF, PubVar, Var, Expr> -where - F: Field, - EF: ExtensionField, - Expr: AbstractField + From + Send + Sync, - Var: Into + Copy + Send + Sync, - PubVar: Into + Copy + Send + Sync, -{ - fn eval_var(&self, symbolic_var: SymbolicVariable) -> Expr { - let index = symbolic_var.index; - match symbolic_var.entry { - Entry::Preprocessed { offset } => self.preprocessed.get(offset, index).into(), - Entry::Main { part_index, offset } => { - self.partitioned_main[part_index].get(offset, index).into() - } - Entry::Public => self.public_values[index].into(), - Entry::Permutation { offset } => self - .after_challenge - .first() - .expect("Challenge phase not supported") - .get(offset, index) - .into(), - Entry::Challenge => self - .challenges - .first() - .expect("Challenge phase not supported")[index] - .into(), - Entry::Exposed => self - .exposed_values_after_challenge - .first() - .expect("Challenge phase not supported")[index] - .into(), - } - } - - fn eval_expr(&self, symbolic_expr: &SymbolicExpression) -> Expr { - // TODO[jpw] don't use recursion to avoid stack overflow - match symbolic_expr { - SymbolicExpression::Variable(var) => self.eval_var(*var), - SymbolicExpression::Constant(c) => (*c).into(), - SymbolicExpression::Add { x, y, .. } => { - let (x, y) = join(|| self.eval_expr(x), || self.eval_expr(y)); - x + y - } - SymbolicExpression::Sub { x, y, .. } => { - let (x, y) = join(|| self.eval_expr(x), || self.eval_expr(y)); - x - y - } - SymbolicExpression::Neg { x, .. } => -self.eval_expr(x), - SymbolicExpression::Mul { x, y, .. } => { - let (x, y) = join(|| self.eval_expr(x), || self.eval_expr(y)); - x * y - } - SymbolicExpression::IsFirstRow => self.is_first_row.into(), - SymbolicExpression::IsLastRow => self.is_last_row.into(), - SymbolicExpression::IsTransition => self.is_transition.into(), - } - } -} diff --git a/crates/stark-backend/src/chip.rs b/crates/stark-backend/src/chip.rs deleted file mode 100644 index 16bff39df0..0000000000 --- a/crates/stark-backend/src/chip.rs +++ /dev/null @@ -1,142 +0,0 @@ -use std::{ - cell::RefCell, - rc::Rc, - sync::{Arc, Mutex}, -}; - -use crate::{config::StarkGenericConfig, prover::types::AirProofInput, rap::AnyRap}; - -/// A chip is a stateful struct that stores the state necessary to -/// generate the trace of an AIR. This trait is for proving purposes -/// and has a generic [StarkGenericConfig] since it needs to know the STARK config. -pub trait Chip: ChipUsageGetter + Sized { - fn air(&self) -> Arc>; - /// Generate all necessary input for proving a single AIR. - fn generate_air_proof_input(self) -> AirProofInput; - fn generate_air_proof_input_with_id(self, air_id: usize) -> (usize, AirProofInput) { - (air_id, self.generate_air_proof_input()) - } -} - -/// A trait to get chip usage information. -pub trait ChipUsageGetter { - fn air_name(&self) -> String; - /// If the chip has a state-independent trace height that is determined - /// upon construction, return this height. This is used to distinguish - /// "static" versus "dynamic" usage metrics. - fn constant_trace_height(&self) -> Option { - None - } - /// Height of used rows in the main trace. - fn current_trace_height(&self) -> usize; - /// Width of the main trace - fn trace_width(&self) -> usize; - /// For metrics collection - fn current_trace_cells(&self) -> usize { - self.trace_width() * self.current_trace_height() - } -} - -impl> Chip for RefCell { - fn air(&self) -> Arc> { - self.borrow().air() - } - fn generate_air_proof_input(self) -> AirProofInput { - self.into_inner().generate_air_proof_input() - } -} - -impl> Chip for Rc { - fn air(&self) -> Arc> { - self.as_ref().air() - } - fn generate_air_proof_input(self) -> AirProofInput { - if let Some(c) = Rc::into_inner(self) { - c.generate_air_proof_input() - } else { - panic!("Cannot generate AirProofInput while other chips still hold a reference"); - } - } -} - -impl ChipUsageGetter for Rc { - fn air_name(&self) -> String { - self.as_ref().air_name() - } - fn constant_trace_height(&self) -> Option { - self.as_ref().constant_trace_height() - } - fn current_trace_height(&self) -> usize { - self.as_ref().current_trace_height() - } - fn trace_width(&self) -> usize { - self.as_ref().trace_width() - } -} - -impl ChipUsageGetter for RefCell { - fn air_name(&self) -> String { - self.borrow().air_name() - } - fn constant_trace_height(&self) -> Option { - self.borrow().constant_trace_height() - } - fn current_trace_height(&self) -> usize { - self.borrow().current_trace_height() - } - fn trace_width(&self) -> usize { - self.borrow().trace_width() - } -} - -impl> Chip for Arc { - fn air(&self) -> Arc> { - self.as_ref().air() - } - fn generate_air_proof_input(self) -> AirProofInput { - if let Some(c) = Arc::into_inner(self) { - c.generate_air_proof_input() - } else { - panic!("Cannot generate AirProofInput while other chips still hold a reference"); - } - } -} - -impl ChipUsageGetter for Arc { - fn air_name(&self) -> String { - self.as_ref().air_name() - } - fn constant_trace_height(&self) -> Option { - self.as_ref().constant_trace_height() - } - fn current_trace_height(&self) -> usize { - self.as_ref().current_trace_height() - } - fn trace_width(&self) -> usize { - self.as_ref().trace_width() - } -} - -impl> Chip for Mutex { - fn air(&self) -> Arc> { - self.lock().unwrap().air() - } - fn generate_air_proof_input(self) -> AirProofInput { - self.into_inner().unwrap().generate_air_proof_input() - } -} - -impl ChipUsageGetter for Mutex { - fn air_name(&self) -> String { - self.lock().unwrap().air_name() - } - fn constant_trace_height(&self) -> Option { - self.lock().unwrap().constant_trace_height() - } - fn current_trace_height(&self) -> usize { - self.lock().unwrap().current_trace_height() - } - fn trace_width(&self) -> usize { - self.lock().unwrap().trace_width() - } -} diff --git a/crates/stark-backend/src/circuit_api.rs b/crates/stark-backend/src/circuit_api.rs deleted file mode 100644 index 9362d2cc37..0000000000 --- a/crates/stark-backend/src/circuit_api.rs +++ /dev/null @@ -1,23 +0,0 @@ -use async_trait::async_trait; - -use crate::{ - config::StarkGenericConfig, - prover::types::{Proof, ProofInput}, - verifier::VerificationError, -}; - -/// Async prover for a specific circuit using a specific Stark config. -#[async_trait] -pub trait AsyncCircuitProver { - async fn prove(&self, proof_input: ProofInput) -> Proof; -} - -/// Prover for a specific circuit using a specific Stark config. -pub trait CircuitProver { - fn prove(&self, proof_input: ProofInput) -> Proof; -} - -/// Verifier for a specific circuit using a specific Stark config. -pub trait CircuitVerifier { - fn verify(&self, proof: &Proof) -> Result<(), VerificationError>; -} diff --git a/crates/stark-backend/src/commit.rs b/crates/stark-backend/src/commit.rs deleted file mode 100644 index ae73fb425d..0000000000 --- a/crates/stark-backend/src/commit.rs +++ /dev/null @@ -1,66 +0,0 @@ -use serde::{Deserialize, Serialize}; - -use crate::config::{PcsProverData, StarkGenericConfig}; - -/// In a multi-matrix system, we record a pointer from each matrix to the commitment its stored in -/// as well as the index of the matrix within that commitment. -/// The intended use case is to track the list of pointers for all main trace matrix parts in a single STARK. -/// -/// The pointers are in reference to an implicit global list of commitments -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct MatrixCommitmentPointers { - /// For each matrix, the pointer - pub matrix_ptrs: Vec, -} - -impl MatrixCommitmentPointers { - pub fn new(matrix_ptrs: Vec) -> Self { - Self { matrix_ptrs } - } -} - -/// When a single matrix belong to a multi-matrix commitment in some list of commitments, -/// this pointer identifies the index of the commitment in the list, and then the index -/// of the matrix within that commitment. -/// -/// The pointer is in reference to an implicit global list of commitments -#[derive(Clone, Copy, Debug, Serialize, Deserialize)] -pub struct SingleMatrixCommitPtr { - pub commit_index: usize, - pub matrix_index: usize, -} - -impl SingleMatrixCommitPtr { - pub fn new(commit_index: usize, matrix_index: usize) -> Self { - Self { - commit_index, - matrix_index, - } - } -} - -/// The PCS commits to multiple matrices at once, so this struct stores -/// references to get PCS data relevant to a single matrix (e.g., LDE matrix, openings). -pub struct CommittedSingleMatrixView<'a, SC: StarkGenericConfig> { - /// Prover data, includes LDE matrix of trace and Merkle tree. - /// The prover data can commit to multiple trace matrices, so - /// `matrix_index` is needed to identify this trace. - pub data: &'a PcsProverData, - /// The index of the trace matrix in the prover data. - pub matrix_index: usize, -} - -impl<'a, SC: StarkGenericConfig> CommittedSingleMatrixView<'a, SC> { - pub fn new(data: &'a PcsProverData, matrix_index: usize) -> Self { - Self { data, matrix_index } - } -} - -impl Clone for CommittedSingleMatrixView<'_, SC> { - fn clone(&self) -> Self { - Self { - data: self.data, - matrix_index: self.matrix_index, - } - } -} diff --git a/crates/stark-backend/src/config.rs b/crates/stark-backend/src/config.rs deleted file mode 100644 index 80cf85153e..0000000000 --- a/crates/stark-backend/src/config.rs +++ /dev/null @@ -1,147 +0,0 @@ -//! [StarkGenericConfig] and associated types. Originally taken from Plonky3 under MIT license. - -use std::marker::PhantomData; - -use p3_challenger::{CanObserve, CanSample, FieldChallenger}; -use p3_commit::{Pcs, PolynomialSpace}; -use p3_field::{ExtensionField, Field}; - -use crate::interaction::RapPhaseSeq; - -/// Based on [p3_uni_stark::StarkGenericConfig]. -pub trait StarkGenericConfig -where - Domain: Send + Sync, - Com: Send + Sync, - PcsProof: Send + Sync, - PcsProverData: Send + Sync, -{ - /// The PCS used to commit to trace polynomials. - type Pcs: Pcs; - - /// The RAP challenge phases used to establish, e.g., that interactions are balanced. - type RapPhaseSeq: RapPhaseSeq, Self::Challenge, Self::Challenger>; - - /// The field from which most random challenges are drawn. - type Challenge: ExtensionField> + Send + Sync; - - /// The challenger (Fiat-Shamir) implementation used. - type Challenger: FieldChallenger> - + CanObserve<>::Commitment> - + CanSample; - - fn pcs(&self) -> &Self::Pcs; - - fn rap_phase_seq(&self) -> &Self::RapPhaseSeq; -} - -pub type Val = <<::Pcs as Pcs< - ::Challenge, - ::Challenger, ->>::Domain as PolynomialSpace>::Val; - -pub type Com = <::Pcs as Pcs< - ::Challenge, - ::Challenger, ->>::Commitment; - -pub type PcsProverData = <::Pcs as Pcs< - ::Challenge, - ::Challenger, ->>::ProverData; - -pub type PcsProof = <::Pcs as Pcs< - ::Challenge, - ::Challenger, ->>::Proof; - -pub type PcsError = <::Pcs as Pcs< - ::Challenge, - ::Challenger, ->>::Error; - -pub type Domain = <::Pcs as Pcs< - ::Challenge, - ::Challenger, ->>::Domain; - -pub type RapPhaseSeqPartialProof = <::RapPhaseSeq as RapPhaseSeq< - Val, - ::Challenge, - ::Challenger, ->>::PartialProof; - -pub type RapPhaseSeqProvingKey = <::RapPhaseSeq as RapPhaseSeq< - Val, - ::Challenge, - ::Challenger, ->>::ProvingKey; - -pub type RapPhaseSeqError = <::RapPhaseSeq as RapPhaseSeq< - Val, - ::Challenge, - ::Challenger, ->>::Error; - -pub type PackedVal = as Field>::Packing; - -pub type PackedChallenge = - <::Challenge as ExtensionField>>::ExtensionPacking; - -#[derive(Debug)] -pub struct StarkConfig { - pcs: Pcs, - rap_phase: RapPhaseSeq, - _phantom: PhantomData<(Challenge, Challenger)>, -} - -impl StarkConfig { - pub const fn new(pcs: Pcs, rap_phase: RapPhaseSeq) -> Self { - Self { - pcs, - rap_phase, - _phantom: PhantomData, - } - } -} - -impl StarkGenericConfig - for StarkConfig -where - Challenge: ExtensionField<::Val>, - Pcs: p3_commit::Pcs, - Pcs::Domain: Send + Sync, - Pcs::Commitment: Send + Sync, - Pcs::ProverData: Send + Sync, - Pcs::Proof: Send + Sync, - Rps: RapPhaseSeq<::Val, Challenge, Challenger>, - Challenger: FieldChallenger<::Val> - + CanObserve<>::Commitment> - + CanSample, -{ - type Pcs = Pcs; - type RapPhaseSeq = Rps; - type Challenge = Challenge; - type Challenger = Challenger; - - fn pcs(&self) -> &Self::Pcs { - &self.pcs - } - fn rap_phase_seq(&self) -> &Self::RapPhaseSeq { - &self.rap_phase - } -} - -pub struct UniStarkConfig(pub SC); - -impl p3_uni_stark::StarkGenericConfig for UniStarkConfig { - type Pcs = SC::Pcs; - - type Challenge = SC::Challenge; - - type Challenger = SC::Challenger; - - fn pcs(&self) -> &Self::Pcs { - self.0.pcs() - } -} diff --git a/crates/stark-backend/src/engine.rs b/crates/stark-backend/src/engine.rs deleted file mode 100644 index 6b828ff44e..0000000000 --- a/crates/stark-backend/src/engine.rs +++ /dev/null @@ -1,168 +0,0 @@ -use std::sync::Arc; - -use itertools::izip; -use p3_matrix::dense::DenseMatrix; - -use crate::{ - config::{StarkGenericConfig, Val}, - keygen::{ - types::{MultiStarkProvingKey, MultiStarkVerifyingKey}, - MultiStarkKeygenBuilder, - }, - prover::{ - types::{AirProofInput, Proof, ProofInput, TraceCommitter}, - MultiTraceStarkProver, - }, - rap::AnyRap, - verifier::{MultiTraceStarkVerifier, VerificationError}, -}; - -/// Data for verifying a Stark proof. -pub struct VerificationData { - pub vk: MultiStarkVerifyingKey, - pub proof: Proof, -} - -/// Testing engine -pub trait StarkEngine { - /// Stark config - fn config(&self) -> &SC; - /// Creates a new challenger with a deterministic state. - /// Creating new challenger for prover and verifier separately will result in - /// them having the same starting state. - fn new_challenger(&self) -> SC::Challenger; - - fn keygen_builder(&self) -> MultiStarkKeygenBuilder { - MultiStarkKeygenBuilder::new(self.config()) - } - - fn prover(&self) -> MultiTraceStarkProver { - MultiTraceStarkProver::new(self.config()) - } - - fn verifier(&self) -> MultiTraceStarkVerifier { - MultiTraceStarkVerifier::new(self.config()) - } - - // TODO[jpw]: the following does not belong in this crate! dev tooling only - - /// Runs a single end-to-end test for a given set of AIRs and traces. - /// This includes proving/verifying key generation, creating a proof, and verifying the proof. - /// This function should only be used on AIRs where the main trace is **not** partitioned. - fn run_simple_test_impl( - &self, - chips: Vec>>, - traces: Vec>>, - public_values: Vec>>, - ) -> Result, VerificationError> { - self.run_test_impl(AirProofInput::multiple_simple(chips, traces, public_values)) - } - - /// Runs a single end-to-end test for a given set of chips and traces partitions. - /// This includes proving/verifying key generation, creating a proof, and verifying the proof. - fn run_test_impl( - &self, - air_proof_inputs: Vec>, - ) -> Result, VerificationError> { - let mut keygen_builder = self.keygen_builder(); - let air_ids = self.set_up_keygen_builder(&mut keygen_builder, &air_proof_inputs); - let proof_input = ProofInput { - per_air: izip!(air_ids, air_proof_inputs).collect(), - }; - let pk = keygen_builder.generate_pk(); - let vk = pk.get_vk(); - let proof = self.prove(&pk, proof_input); - self.verify(&vk, &proof)?; - Ok(VerificationData { vk, proof }) - } - - /// Add AIRs and get AIR IDs - fn set_up_keygen_builder( - &self, - keygen_builder: &mut MultiStarkKeygenBuilder<'_, SC>, - air_proof_inputs: &[AirProofInput], - ) -> Vec { - air_proof_inputs - .iter() - .map(|air_proof_input| { - let air = air_proof_input.air.clone(); - assert_eq!( - air_proof_input.raw.cached_mains.len(), - air.cached_main_widths().len() - ); - let common_main_width = air.common_main_width(); - if common_main_width == 0 { - assert!(air_proof_input.raw.common_main.is_none()); - } else { - assert_eq!( - air_proof_input.raw.common_main.as_ref().unwrap().width, - air.common_main_width() - ); - } - keygen_builder.add_air(air) - }) - .collect() - } - - fn prove_then_verify( - &self, - pk: &MultiStarkProvingKey, - proof_input: ProofInput, - ) -> Result<(), VerificationError> { - let proof = self.prove(pk, proof_input); - self.verify(&pk.get_vk(), &proof) - } - - fn prove(&self, pk: &MultiStarkProvingKey, proof_input: ProofInput) -> Proof { - let prover = self.prover(); - let committer = TraceCommitter::new(prover.pcs()); - - let air_proof_inputs = proof_input - .per_air - .into_iter() - .map(|(air_id, mut air_proof_input)| { - // Commit cached traces if they are not provided - if air_proof_input.cached_mains_pdata.is_empty() - && !air_proof_input.raw.cached_mains.is_empty() - { - air_proof_input.cached_mains_pdata = air_proof_input - .raw - .cached_mains - .iter() - .map(|trace| committer.commit(vec![trace.as_ref().clone()])) - .collect(); - } else { - assert_eq!( - air_proof_input.cached_mains_pdata.len(), - air_proof_input.raw.cached_mains.len() - ); - } - (air_id, air_proof_input) - }) - .collect(); - let proof_input = ProofInput { - per_air: air_proof_inputs, - }; - - let mut challenger = self.new_challenger(); - - #[cfg(feature = "bench-metrics")] - let prove_start = std::time::Instant::now(); - let _proof = prover.prove(&mut challenger, pk, proof_input); - #[cfg(feature = "bench-metrics")] - metrics::gauge!("stark_prove_excluding_trace_time_ms") - .set(prove_start.elapsed().as_millis() as f64); - - _proof - } - - fn verify( - &self, - vk: &MultiStarkVerifyingKey, - proof: &Proof, - ) -> Result<(), VerificationError> { - let mut challenger = self.new_challenger(); - let verifier = self.verifier(); - verifier.verify(&mut challenger, vk, proof) - } -} diff --git a/crates/stark-backend/src/gkr/gate.rs b/crates/stark-backend/src/gkr/gate.rs deleted file mode 100644 index 742ccedb0c..0000000000 --- a/crates/stark-backend/src/gkr/gate.rs +++ /dev/null @@ -1,54 +0,0 @@ -use p3_field::Field; - -use crate::{gkr::types::GkrMask, poly::uni::Fraction}; - -/// Defines how a circuit operates locally on two input rows to produce a single output row. -/// This local 2-to-1 constraint is what gives the whole circuit its "binary tree" structure. -/// -/// Binary tree structured circuits have a highly regular wiring pattern that fit the structure of -/// the circuits defined in [Thaler13] which allow for efficient linear time (linear in size of the -/// circuit) GKR prover implementations. -/// -/// [Thaler13]: https://eprint.iacr.org/2013/351.pdf -#[derive(Debug, Clone, Copy)] -pub enum Gate { - LogUp, - GrandProduct, -} - -impl Gate { - /// Returns the output after applying the gate to the mask. - pub(crate) fn eval( - &self, - mask: &GkrMask, - ) -> Result, InvalidNumMaskColumnsError> { - Ok(match self { - Self::LogUp => { - if mask.columns().len() != 2 { - return Err(InvalidNumMaskColumnsError); - } - - let [numerator_a, numerator_b] = mask.columns()[0]; - let [denominator_a, denominator_b] = mask.columns()[1]; - - let a = Fraction::new(numerator_a, denominator_a); - let b = Fraction::new(numerator_b, denominator_b); - let res = a + b; - - vec![res.numerator, res.denominator] - } - Self::GrandProduct => { - if mask.columns().len() != 1 { - return Err(InvalidNumMaskColumnsError); - } - - let [a, b] = mask.columns()[0]; - vec![a * b] - } - }) - } -} - -/// Error indicating the mask has an invalid number of columns for a gate's operation. -#[derive(Debug)] -pub struct InvalidNumMaskColumnsError; diff --git a/crates/stark-backend/src/gkr/mod.rs b/crates/stark-backend/src/gkr/mod.rs deleted file mode 100644 index f860f22378..0000000000 --- a/crates/stark-backend/src/gkr/mod.rs +++ /dev/null @@ -1,11 +0,0 @@ -mod gate; -mod prover; -#[cfg(test)] -mod tests; -mod types; -mod verifier; - -pub use gate::*; -pub use prover::*; -pub use types::*; -pub use verifier::*; diff --git a/crates/stark-backend/src/gkr/prover.rs b/crates/stark-backend/src/gkr/prover.rs deleted file mode 100644 index 80981ba8da..0000000000 --- a/crates/stark-backend/src/gkr/prover.rs +++ /dev/null @@ -1,545 +0,0 @@ -//! Copied from starkware-libs/stwo under Apache-2.0 license. -//! GKR batch prover for Grand Product and LogUp lookup arguments. -use std::{ - iter::{successors, zip}, - ops::Deref, -}; - -use itertools::Itertools; -use p3_challenger::FieldChallenger; -use p3_field::Field; -use thiserror::Error; - -use crate::{ - gkr::types::{GkrArtifact, GkrBatchProof, GkrMask, Layer}, - poly::{ - multi::{hypercube_eq, Mle, MultivariatePolyOracle}, - uni::{random_linear_combination, UnivariatePolynomial}, - }, - sumcheck, - sumcheck::SumcheckArtifacts, -}; - -/// For a given `y`, stores evaluations of [hypercube_eq](x, y) on all 2^{n-1} boolean hypercube -/// points of the form `x = (0, x_2, ..., x_n)`. -/// -/// Evaluations are stored in lexicographic order i.e. `evals[0] = eq((0, ..., 0, 0), y)`, -/// `evals[1] = eq((0, ..., 0, 1), y)`, etc. -#[derive(Debug, Clone)] -struct HypercubeEqEvals { - y: Vec, - evals: Vec, -} - -impl HypercubeEqEvals { - pub fn eval(y: &[F]) -> Self { - let y = y.to_vec(); - - if y.is_empty() { - let evals = vec![F::ONE]; - return Self { evals, y }; - } - - // Compute evaluations for when x_0 = 0. - let evals = Self::gen(&y[1..], F::ONE - y[0]); - assert_eq!(evals.len(), 1 << (y.len() - 1)); - Self { evals, y } - } - - /// Returns evaluations of the function `x -> eq(x, y) * v` for each `x` in `{0, 1}^n`. - fn gen(y: &[F], v: F) -> Vec { - let mut evals = Vec::with_capacity(1 << y.len()); - evals.push(v); - - for &y_i in y.iter().rev() { - for j in 0..evals.len() { - // `lhs[j] = eq(0, y_i) * c[i]` - // `rhs[j] = eq(1, y_i) * c[i]` - let tmp = evals[j] * y_i; - evals.push(tmp); - evals[j] -= tmp; - } - } - - evals - } -} - -impl Deref for HypercubeEqEvals { - type Target = [F]; - - fn deref(&self) -> &Self::Target { - self.evals.deref() - } -} - -/// Multivariate polynomial `P` that expresses the relation between two consecutive GKR layers. -/// -/// When the input layer is [`Layer::GrandProduct`] (represented by multilinear column `inp`) -/// the polynomial represents: -/// -/// ```text -/// P(x) = eq(x, y) * inp(x, 0) * inp(x, 1) -/// ``` -/// -/// When the input layer is LogUp (represented by multilinear columns `inp_numer` and -/// `inp_denom`) the polynomial represents: -/// -/// ```text -/// numer(x) = inp_numer(x, 0) * inp_denom(x, 1) + inp_numer(x, 1) * inp_denom(x, 0) -/// denom(x) = inp_denom(x, 0) * inp_denom(x, 1) -/// -/// P(x) = eq(x, y) * (numer(x) + lambda * denom(x)) -/// ``` -struct GkrMultivariatePolyOracle<'a, F: Clone> { - pub eq_evals: &'a HypercubeEqEvals, - pub input_layer: Layer, - pub eq_fixed_var_correction: F, - /// Used by LogUp to perform a random linear combination of the numerators and denominators. - pub lambda: F, -} - -impl MultivariatePolyOracle for GkrMultivariatePolyOracle<'_, F> { - fn arity(&self) -> usize { - self.input_layer.n_variables() - 1 - } - - fn marginalize_first(&self, claim: F) -> UnivariatePolynomial { - let n_variables = self.arity(); - assert_ne!(n_variables, 0); - let n_terms = 1 << (n_variables - 1); - // Vector used to generate evaluations of `eq(x, y)` for `x` in the boolean hypercube. - let y = &self.eq_evals.y; - let lambda = self.lambda; - - let (mut eval_at_0, mut eval_at_2) = match &self.input_layer { - Layer::GrandProduct(col) => eval_grand_product_sum(self.eq_evals, col, n_terms), - Layer::LogUpGeneric { - numerators, - denominators, - } - | Layer::LogUpMultiplicities { - numerators, - denominators, - } => eval_logup_sum(self.eq_evals, numerators, denominators, n_terms, lambda), - Layer::LogUpSingles { denominators } => { - eval_logup_singles_sum(self.eq_evals, denominators, n_terms, lambda) - } - }; - - eval_at_0 *= self.eq_fixed_var_correction; - eval_at_2 *= self.eq_fixed_var_correction; - correct_sum_as_poly_in_first_variable(eval_at_0, eval_at_2, claim, y, n_variables) - } - - fn partial_evaluation(self, alpha: F) -> Self { - if self.is_constant() { - return self; - } - - let z0 = self.eq_evals.y[self.eq_evals.y.len() - self.arity()]; - let eq_fixed_var_correction = self.eq_fixed_var_correction * hypercube_eq(&[alpha], &[z0]); - - Self { - eq_evals: self.eq_evals, - eq_fixed_var_correction, - input_layer: self.input_layer.fix_first_variable(alpha), - lambda: self.lambda, - } - } -} - -/// Evaluates `sum_x eq(({0}^|r|, 0, x), y) * inp(r, t, x, 0) * inp(r, t, x, 1)` at `t=0` and `t=2`. -/// -/// Output of the form: `(eval_at_0, eval_at_2)`. -fn eval_grand_product_sum( - eq_evals: &HypercubeEqEvals, - input_layer: &Mle, - n_terms: usize, -) -> (F, F) { - let mut eval_at_0 = F::ZERO; - let mut eval_at_2 = F::ZERO; - - for i in 0..n_terms { - // Input polynomial values at (r, {0, 1, 2}, bits(i), {0, 1}) - let (inp_r0_0, inp_r0_1) = (input_layer[i * 2], input_layer[i * 2 + 1]); - let (inp_r1_0, inp_r1_1) = ( - input_layer[(n_terms + i) * 2], - input_layer[(n_terms + i) * 2 + 1], - ); - - // Calculate values at t = 2 - let inp_r2_0 = inp_r1_0.double() - inp_r0_0; - let inp_r2_1 = inp_r1_1.double() - inp_r0_1; - - // Product polynomials at t = 0 and t = 2 - let prod_at_r0i = inp_r0_0 * inp_r0_1; - let prod_at_r2i = inp_r2_0 * inp_r2_1; - - // Accumulate evaluated terms - let eq_eval_at_0i = eq_evals[i]; - eval_at_0 += eq_eval_at_0i * prod_at_r0i; - eval_at_2 += eq_eval_at_0i * prod_at_r2i; - } - - (eval_at_0, eval_at_2) -} - -/// Evaluates `sum_x eq(({0}^|r|, 0, x), y) * (inp_numer(r, t, x, 0) * inp_denom(r, t, x, 1) + -/// inp_numer(r, t, x, 1) * inp_denom(r, t, x, 0) + lambda * inp_denom(r, t, x, 0) * inp_denom(r, t, -/// x, 1))` at `t=0` and `t=2`. -/// -/// Output of the form: `(eval_at_0, eval_at_2)`. -fn eval_logup_sum( - eq_evals: &HypercubeEqEvals, - input_numerators: &Mle, - input_denominators: &Mle, - n_terms: usize, - lambda: F, -) -> (F, F) { - let mut eval_at_0 = F::ZERO; - let mut eval_at_2 = F::ZERO; - - for i in 0..n_terms { - // Gather input values at (r, {0, 1, 2}, bits(i), {0, 1}) - let (numer_r0_0, denom_r0_0) = (input_numerators[i * 2], input_denominators[i * 2]); - let (numer_r0_1, denom_r0_1) = (input_numerators[i * 2 + 1], input_denominators[i * 2 + 1]); - let (numer_r1_0, denom_r1_0) = ( - input_numerators[(n_terms + i) * 2], - input_denominators[(n_terms + i) * 2], - ); - let (numer_r1_1, denom_r1_1) = ( - input_numerators[(n_terms + i) * 2 + 1], - input_denominators[(n_terms + i) * 2 + 1], - ); - - // Calculate values at r, t = 2 - let numer_r2_0 = numer_r1_0.double() - numer_r0_0; - let denom_r2_0 = denom_r1_0.double() - denom_r0_0; - let numer_r2_1 = numer_r1_1.double() - numer_r0_1; - let denom_r2_1 = denom_r1_1.double() - denom_r0_1; - - // Compute fractions at t = 0 and t = 2 - let numer_at_r0i = numer_r0_0 * denom_r0_1 + numer_r0_1 * denom_r0_0; - let denom_at_r0i = denom_r0_1 * denom_r0_0; - let numer_at_r2i = numer_r2_0 * denom_r2_1 + numer_r2_1 * denom_r2_0; - let denom_at_r2i = denom_r2_1 * denom_r2_0; - - // Accumulate the evaluated terms - let eq_eval_at_0i = eq_evals[i]; - eval_at_0 += eq_eval_at_0i * (numer_at_r0i + lambda * denom_at_r0i); - eval_at_2 += eq_eval_at_0i * (numer_at_r2i + lambda * denom_at_r2i); - } - - (eval_at_0, eval_at_2) -} - -/// Evaluates `sum_x eq(({0}^|r|, 0, x), y) * (inp_denom(r, t, x, 1) + inp_denom(r, t, x, 0) + -/// lambda * inp_denom(r, t, x, 0) * inp_denom(r, t, x, 1))` at `t=0` and `t=2`. -/// -/// Output of the form: `(eval_at_0, eval_at_2)`. -fn eval_logup_singles_sum( - eq_evals: &HypercubeEqEvals, - input_denominators: &Mle, - n_terms: usize, - lambda: F, -) -> (F, F) { - let mut eval_at_0 = F::ZERO; - let mut eval_at_2 = F::ZERO; - - for i in 0..n_terms { - // Input denominator values at (r, {0, 1, 2}, bits(i), {0, 1}) - let (inp_denom_r0_0, inp_denom_r0_1) = - (input_denominators[i * 2], input_denominators[i * 2 + 1]); - let (inp_denom_r1_0, inp_denom_r1_1) = ( - input_denominators[(n_terms + i) * 2], - input_denominators[(n_terms + i) * 2 + 1], - ); - - // Calculate values at t = 2 - let inp_denom_r2_0 = inp_denom_r1_0.double() - inp_denom_r0_0; - let inp_denom_r2_1 = inp_denom_r1_1.double() - inp_denom_r0_1; - - // Fraction addition polynomials at t = 0 and t = 2 - let numer_at_r0i = inp_denom_r0_0 + inp_denom_r0_1; - let denom_at_r0i = inp_denom_r0_0 * inp_denom_r0_1; - let numer_at_r2i = inp_denom_r2_0 + inp_denom_r2_1; - let denom_at_r2i = inp_denom_r2_0 * inp_denom_r2_1; - - // Accumulate evaluated terms - let eq_eval_at_0i = eq_evals[i]; - eval_at_0 += eq_eval_at_0i * (numer_at_r0i + lambda * denom_at_r0i); - eval_at_2 += eq_eval_at_0i * (numer_at_r2i + lambda * denom_at_r2i); - } - - (eval_at_0, eval_at_2) -} - -impl GkrMultivariatePolyOracle<'_, F> { - fn is_constant(&self) -> bool { - self.arity() == 0 - } - - /// Returns all input layer columns restricted to a line. - /// - /// Let `l` be the line satisfying `l(0) = b*` and `l(1) = c*`. Oracles that represent constants - /// are expressed by values `c_i(b*)` and `c_i(c*)` where `c_i` represents the input GKR layer's - /// `i`th column (for binary tree GKR `b* = (r, 0)`, `c* = (r, 1)`). - /// - /// If this oracle represents a constant, then each `c_i` restricted to `l` is returned. - /// Otherwise, an [`Err`] is returned. - /// - /// For more context see page 64. - fn try_into_mask(self) -> Result, NotConstantPolyError> { - if !self.is_constant() { - return Err(NotConstantPolyError); - } - - let columns = match self.input_layer { - Layer::GrandProduct(mle) => vec![mle.as_ref().try_into().unwrap()], - Layer::LogUpGeneric { - numerators, - denominators, - } => { - let numerators = numerators.as_ref().try_into().unwrap(); - let denominators = denominators.as_ref().try_into().unwrap(); - vec![numerators, denominators] - } - // Should never get called. - Layer::LogUpMultiplicities { .. } => unimplemented!(), - Layer::LogUpSingles { denominators } => { - let numerators = [F::ONE; 2]; - let denominators = denominators.as_ref().try_into().unwrap(); - vec![numerators, denominators] - } - }; - - Ok(GkrMask::new(columns)) - } -} - -/// Error returned when a polynomial is expected to be constant but it is not. -#[derive(Debug, Error)] -#[error("polynomial is not constant")] -pub struct NotConstantPolyError; - -/// Batch proves lookup circuits with GKR. -/// -/// The input layers should be committed to the channel before calling this function. -// GKR algorithm: (page 64) -pub fn prove_batch( - challenger: &mut impl FieldChallenger, - input_layer_by_instance: Vec>, -) -> (GkrBatchProof, GkrArtifact) { - let n_instances = input_layer_by_instance.len(); - let n_layers_by_instance = input_layer_by_instance - .iter() - .map(|l| l.n_variables()) - .collect_vec(); - let n_layers = *n_layers_by_instance.iter().max().unwrap(); - - // Evaluate all instance circuits and collect the layer values. - let mut layers_by_instance = input_layer_by_instance - .into_iter() - .map(|input_layer| gen_layers(input_layer).into_iter().rev()) - .collect_vec(); - - let mut output_claims_by_instance = vec![None; n_instances]; - let mut layer_masks_by_instance = (0..n_instances).map(|_| Vec::new()).collect_vec(); - let mut sumcheck_proofs = Vec::new(); - - let mut ood_point = Vec::new(); - let mut claims_to_verify_by_instance = vec![None; n_instances]; - - for layer in 0..n_layers { - let n_remaining_layers = n_layers - layer; - - // Check all the instances for output layers. - for (instance, layers) in layers_by_instance.iter_mut().enumerate() { - if n_layers_by_instance[instance] == n_remaining_layers { - let output_layer = layers.next().unwrap(); - let output_layer_values = output_layer.try_into_output_layer_values().unwrap(); - claims_to_verify_by_instance[instance] = Some(output_layer_values.clone()); - output_claims_by_instance[instance] = Some(output_layer_values); - } - } - - // Seed the channel with layer claims. - for claims_to_verify in claims_to_verify_by_instance.iter().flatten() { - challenger.observe_slice(claims_to_verify); - } - - let eq_evals = HypercubeEqEvals::eval(&ood_point); - let sumcheck_alpha = challenger.sample(); - let instance_lambda = challenger.sample(); - - let mut sumcheck_oracles = Vec::new(); - let mut sumcheck_claims = Vec::new(); - let mut sumcheck_instances = Vec::new(); - - // Create the multivariate polynomial oracles used with sumcheck. - for (instance, claims_to_verify) in claims_to_verify_by_instance.iter().enumerate() { - if let Some(claims_to_verify) = claims_to_verify { - let layer = layers_by_instance[instance].next().unwrap(); - - sumcheck_oracles.push(GkrMultivariatePolyOracle { - eq_evals: &eq_evals, - input_layer: layer, - eq_fixed_var_correction: F::ONE, - lambda: instance_lambda, - }); - sumcheck_claims.push(random_linear_combination(claims_to_verify, instance_lambda)); - sumcheck_instances.push(instance); - } - } - - let ( - sumcheck_proof, - SumcheckArtifacts { - evaluation_point: sumcheck_ood_point, - constant_poly_oracles, - .. - }, - ) = sumcheck::prove_batch( - sumcheck_claims, - sumcheck_oracles, - sumcheck_alpha, - challenger, - ); - - sumcheck_proofs.push(sumcheck_proof); - - let masks = constant_poly_oracles - .into_iter() - .map(|oracle| oracle.try_into_mask().unwrap()) - .collect_vec(); - - // Seed the channel with the layer masks. - for (&instance, mask) in zip(&sumcheck_instances, &masks) { - for column in mask.columns() { - challenger.observe_slice(column); - } - layer_masks_by_instance[instance].push(mask.clone()); - } - - let challenge = challenger.sample(); - ood_point = sumcheck_ood_point; - ood_point.push(challenge); - - // Set the claims to prove in the layer above. - for (instance, mask) in zip(sumcheck_instances, masks) { - claims_to_verify_by_instance[instance] = Some(mask.reduce_at_point(challenge)); - } - } - - let output_claims_by_instance = output_claims_by_instance - .into_iter() - .map(Option::unwrap) - .collect(); - - let claims_to_verify_by_instance = claims_to_verify_by_instance - .into_iter() - .map(Option::unwrap) - .collect(); - - let proof = GkrBatchProof { - sumcheck_proofs, - layer_masks_by_instance, - output_claims_by_instance, - }; - - let artifact = GkrArtifact { - ood_point, - claims_to_verify_by_instance, - n_variables_by_instance: n_layers_by_instance, - }; - - (proof, artifact) -} - -/// Executes the GKR circuit on the input layer and returns all the circuit's layers. -fn gen_layers(input_layer: Layer) -> Vec> { - let n_variables = input_layer.n_variables(); - let layers = successors(Some(input_layer), |layer| layer.next_layer()).collect_vec(); - assert_eq!(layers.len(), n_variables + 1); - layers -} - -/// Computes `r(t) = sum_x eq((t, x), y[-k:]) * p(t, x)` from evaluations of -/// `f(t) = sum_x eq(({0}^(n - k), 0, x), y) * p(t, x)`. -/// -/// Note `claim` must equal `r(0) + r(1)` and `r` must have degree <= 3. -/// -/// For more context see `Layer::into_multivariate_poly()` docs. -/// See also (section 3.2). -pub fn correct_sum_as_poly_in_first_variable( - f_at_0: F, - f_at_2: F, - claim: F, - y: &[F], - k: usize, -) -> UnivariatePolynomial { - assert_ne!(k, 0); - let n = y.len(); - assert!(k <= n); - - // We evaluated `f(0)` and `f(2)` - the inputs. - // We want to compute `r(t) = f(t) * eq(t, y[n - k]) / eq(0, y[:n - k + 1])`. - let a_const = hypercube_eq(&vec![F::ZERO; n - k + 1], &y[..n - k + 1]).inverse(); - - // Find the additional root of `r(t)`, by finding the root of `eq(t, y[n - k])`: - // 0 = eq(t, y[n - k]) - // = t * y[n - k] + (1 - t)(1 - y[n - k]) - // = 1 - y[n - k] - t(1 - 2 * y[n - k]) - // => t = (1 - y[n - k]) / (1 - 2 * y[n - k]) - // = b - let b_const = (F::ONE - y[n - k]) / (F::ONE - y[n - k].double()); - - // We get that `r(t) = f(t) * eq(t, y[n - k]) * a`. - let r_at_0 = f_at_0 * hypercube_eq(&[F::ZERO], &[y[n - k]]) * a_const; - let r_at_1 = claim - r_at_0; - let r_at_2 = f_at_2 * hypercube_eq(&[F::TWO], &[y[n - k]]) * a_const; - - // Interpolate. - UnivariatePolynomial::from_interpolation(&[ - (F::ZERO, r_at_0), - (F::ONE, r_at_1), - (F::TWO, r_at_2), - (b_const, F::ZERO), - ]) -} - -#[cfg(test)] -mod tests { - use p3_baby_bear::BabyBear; - use p3_field::AbstractField; - use rand::Rng; - - use crate::{gkr::prover::HypercubeEqEvals, poly::multi::hypercube_eq}; - - #[test] - fn test_gen_eq_evals() { - type F = BabyBear; - - let mut rng = rand::thread_rng(); - - let v: F = rng.gen(); - let y: Vec = vec![rng.gen(), rng.gen(), rng.gen()]; - - let eq_evals = HypercubeEqEvals::gen(&y, v); - - assert_eq!( - *eq_evals, - [ - hypercube_eq(&[F::ZERO, F::ZERO, F::ZERO], &y) * v, - hypercube_eq(&[F::ZERO, F::ZERO, F::ONE], &y) * v, - hypercube_eq(&[F::ZERO, F::ONE, F::ZERO], &y) * v, - hypercube_eq(&[F::ZERO, F::ONE, F::ONE], &y) * v, - hypercube_eq(&[F::ONE, F::ZERO, F::ZERO], &y) * v, - hypercube_eq(&[F::ONE, F::ZERO, F::ONE], &y) * v, - hypercube_eq(&[F::ONE, F::ONE, F::ZERO], &y) * v, - hypercube_eq(&[F::ONE, F::ONE, F::ONE], &y) * v, - ] - ); - } -} diff --git a/crates/stark-backend/src/gkr/tests.rs b/crates/stark-backend/src/gkr/tests.rs deleted file mode 100644 index b5f9a28b9a..0000000000 --- a/crates/stark-backend/src/gkr/tests.rs +++ /dev/null @@ -1,244 +0,0 @@ -use std::iter::zip; - -use itertools::Itertools; -use openvm_stark_sdk::{ - config::baby_bear_blake3::default_engine, engine::StarkEngine, utils::create_seeded_rng, -}; -use p3_baby_bear::BabyBear; -use p3_field::AbstractField; -use rand::Rng; - -use crate::{ - gkr::{self, Gate, GkrArtifact, GkrError, Layer}, - poly::{multi::Mle, uni::Fraction}, -}; - -#[test] -fn test_batch() -> Result<(), GkrError> { - const LOG_N: usize = 5; - - let engine = default_engine(); - let mut rng = create_seeded_rng(); - - let col0 = Mle::new((0..1 << LOG_N).map(|_| rng.gen()).collect_vec()); - let col1 = Mle::new((0..1 << LOG_N).map(|_| rng.gen()).collect_vec()); - - let product0 = col0.iter().copied().product(); - let product1 = col1.iter().copied().product(); - - let input_layers = vec![ - Layer::GrandProduct(col0.clone()), - Layer::GrandProduct(col1.clone()), - ]; - let (proof, _) = gkr::prove_batch(&mut engine.new_challenger(), input_layers); - - let GkrArtifact { - ood_point, - claims_to_verify_by_instance, - n_variables_by_instance, - } = gkr::partially_verify_batch( - vec![Gate::GrandProduct; 2], - &proof, - &mut engine.new_challenger(), - )?; - - assert_eq!(n_variables_by_instance, [LOG_N, LOG_N]); - assert_eq!(proof.output_claims_by_instance.len(), 2); - assert_eq!(claims_to_verify_by_instance.len(), 2); - assert_eq!(proof.output_claims_by_instance[0], &[product0]); - assert_eq!(proof.output_claims_by_instance[1], &[product1]); - let claim0 = &claims_to_verify_by_instance[0]; - let claim1 = &claims_to_verify_by_instance[1]; - assert_eq!(claim0, &[col0.eval(&ood_point)]); - assert_eq!(claim1, &[col1.eval(&ood_point)]); - Ok(()) -} - -#[test] -fn test_batch_with_different_sizes() -> Result<(), GkrError> { - let engine = default_engine(); - let mut rng = create_seeded_rng(); - - const LOG_N0: usize = 5; - const LOG_N1: usize = 7; - - let col0 = Mle::new((0..1 << LOG_N0).map(|_| rng.gen()).collect()); - let col1 = Mle::new((0..1 << LOG_N1).map(|_| rng.gen()).collect()); - - let product0 = col0.iter().copied().product(); - let product1 = col1.iter().copied().product(); - - let input_layers = vec![ - Layer::GrandProduct(col0.clone()), - Layer::GrandProduct(col1.clone()), - ]; - let (proof, _) = gkr::prove_batch(&mut engine.new_challenger(), input_layers); - - let GkrArtifact { - ood_point, - claims_to_verify_by_instance, - n_variables_by_instance, - } = gkr::partially_verify_batch( - vec![Gate::GrandProduct; 2], - &proof, - &mut engine.new_challenger(), - )?; - - assert_eq!(n_variables_by_instance, [LOG_N0, LOG_N1]); - assert_eq!(proof.output_claims_by_instance.len(), 2); - assert_eq!(claims_to_verify_by_instance.len(), 2); - assert_eq!(proof.output_claims_by_instance[0], &[product0]); - assert_eq!(proof.output_claims_by_instance[1], &[product1]); - let claim0 = &claims_to_verify_by_instance[0]; - let claim1 = &claims_to_verify_by_instance[1]; - let n_vars = ood_point.len(); - assert_eq!(claim0, &[col0.eval(&ood_point[n_vars - LOG_N0..])]); - assert_eq!(claim1, &[col1.eval(&ood_point[n_vars - LOG_N1..])]); - Ok(()) -} - -#[test] -fn test_grand_product() -> Result<(), GkrError> { - const N: usize = 1 << 5; - - let engine = default_engine(); - let mut rng = create_seeded_rng(); - - let values = (0..N).map(|_| rng.gen()).collect_vec(); - let product = values.iter().copied().product(); - let col = Mle::::new(values); - let input_layer = Layer::GrandProduct(col.clone()); - let (proof, _) = gkr::prove_batch(&mut engine.new_challenger(), vec![input_layer]); - - let GkrArtifact { - ood_point: r, - claims_to_verify_by_instance, - n_variables_by_instance: _, - } = gkr::partially_verify_batch( - vec![Gate::GrandProduct], - &proof, - &mut engine.new_challenger(), - )?; - - assert_eq!(proof.output_claims_by_instance, [vec![product]]); - assert_eq!(claims_to_verify_by_instance, [vec![col.eval(&r)]]); - Ok(()) -} - -#[test] -fn test_logup_with_generic_trace() -> Result<(), GkrError> { - const N: usize = 1 << 5; - type F = BabyBear; - let mut rng = create_seeded_rng(); - - let numerator_values = (0..N).map(|_| rng.gen()).collect(); - let denominator_values = (0..N).map(|_| rng.gen()).collect(); - - let sum: Fraction = zip(&numerator_values, &denominator_values) - .map(|(&n, &d)| Fraction::new(n, d)) - .sum(); - let numerators = Mle::::new(numerator_values); - let denominators = Mle::::new(denominator_values); - let top_layer = Layer::LogUpGeneric { - numerators: numerators.clone(), - denominators: denominators.clone(), - }; - - let engine = default_engine(); - - let (proof, _) = gkr::prove_batch(&mut engine.new_challenger(), vec![top_layer]); - - let GkrArtifact { - ood_point, - claims_to_verify_by_instance, - n_variables_by_instance: _, - } = gkr::partially_verify_batch(vec![Gate::LogUp], &proof, &mut engine.new_challenger())?; - - assert_eq!(claims_to_verify_by_instance.len(), 1); - assert_eq!(proof.output_claims_by_instance.len(), 1); - assert_eq!( - claims_to_verify_by_instance[0], - [numerators.eval(&ood_point), denominators.eval(&ood_point)] - ); - assert_eq!( - proof.output_claims_by_instance[0], - [sum.numerator, sum.denominator] - ); - Ok(()) -} - -#[test] -fn test_logup_with_singles_trace() -> Result<(), GkrError> { - const N: usize = 1 << 5; - type F = BabyBear; - - let mut rng = create_seeded_rng(); - let denominator_values = (0..N).map(|_| rng.gen()).collect_vec(); - let sum: Fraction = denominator_values - .iter() - .map(|&d| Fraction::new(F::ONE, d)) - .sum(); - let denominators = Mle::new(denominator_values); - let top_layer = Layer::LogUpSingles { - denominators: denominators.clone(), - }; - - let engine = default_engine(); - let (proof, _) = gkr::prove_batch(&mut engine.new_challenger(), vec![top_layer]); - - let GkrArtifact { - ood_point, - claims_to_verify_by_instance, - n_variables_by_instance: _, - } = gkr::partially_verify_batch(vec![Gate::LogUp], &proof, &mut engine.new_challenger())?; - - assert_eq!(claims_to_verify_by_instance.len(), 1); - assert_eq!(proof.output_claims_by_instance.len(), 1); - assert_eq!( - claims_to_verify_by_instance[0], - [F::ONE, denominators.eval(&ood_point)] - ); - assert_eq!( - proof.output_claims_by_instance[0], - [sum.numerator, sum.denominator] - ); - Ok(()) -} - -#[test] -fn test_logup_with_multiplicities_trace() -> Result<(), GkrError> { - const N: usize = 1 << 5; - let mut rng = create_seeded_rng(); - let numerator_values = (0..N).map(|_| rng.gen::()).collect_vec(); - let denominator_values = (0..N).map(|_| rng.gen::()).collect_vec(); - let sum: Fraction = zip(&numerator_values, &denominator_values) - .map(|(&n, &d)| Fraction::new(n, d)) - .sum(); - let numerators = Mle::new(numerator_values); - let denominators = Mle::new(denominator_values); - let top_layer = Layer::LogUpMultiplicities { - numerators: numerators.clone(), - denominators: denominators.clone(), - }; - - let engine = default_engine(); - let (proof, _) = gkr::prove_batch(&mut engine.new_challenger(), vec![top_layer]); - - let GkrArtifact { - ood_point, - claims_to_verify_by_instance, - n_variables_by_instance: _, - } = gkr::partially_verify_batch(vec![Gate::LogUp], &proof, &mut engine.new_challenger())?; - - assert_eq!(claims_to_verify_by_instance.len(), 1); - assert_eq!(proof.output_claims_by_instance.len(), 1); - assert_eq!( - claims_to_verify_by_instance[0], - [numerators.eval(&ood_point), denominators.eval(&ood_point)] - ); - assert_eq!( - proof.output_claims_by_instance[0], - [sum.numerator, sum.denominator] - ); - Ok(()) -} diff --git a/crates/stark-backend/src/gkr/types.rs b/crates/stark-backend/src/gkr/types.rs deleted file mode 100644 index 8a800dcda0..0000000000 --- a/crates/stark-backend/src/gkr/types.rs +++ /dev/null @@ -1,267 +0,0 @@ -use std::ops::Index; - -use p3_field::Field; -use thiserror::Error; - -use crate::{ - poly::{ - multi::{fold_mle_evals, Mle, MultivariatePolyOracle}, - uni::Fraction, - }, - sumcheck::{SumcheckError, SumcheckProof}, -}; - -/// Batch GKR proof. -pub struct GkrBatchProof { - /// Sum-check proof for each layer. - pub sumcheck_proofs: Vec>, - /// Mask for each layer for each instance. - pub layer_masks_by_instance: Vec>>, - /// Column circuit outputs for each instance. - pub output_claims_by_instance: Vec>, -} - -/// Values of interest obtained from the execution of the GKR protocol. -pub struct GkrArtifact { - /// Out-of-domain (OOD) point for evaluating columns in the input layer. - pub ood_point: Vec, - /// The claimed evaluation at `ood_point` for each column in the input layer of each instance. - pub claims_to_verify_by_instance: Vec>, - /// The number of variables that interpolate the input layer of each instance. - pub n_variables_by_instance: Vec, -} - -/// Stores two evaluations of each column in a GKR layer. -#[derive(Debug, Clone)] -pub struct GkrMask { - columns: Vec<[F; 2]>, -} - -impl GkrMask { - pub fn new(columns: Vec<[F; 2]>) -> Self { - Self { columns } - } - - pub fn columns(&self) -> &[[F; 2]] { - &self.columns - } -} - -impl GkrMask { - pub fn to_rows(&self) -> [Vec; 2] { - self.columns.iter().map(|[a, b]| (a, b)).unzip().into() - } - - /// Returns all `p_i(x)` where `p_i` interpolates column `i` of the mask on `{0, 1}`. - pub fn reduce_at_point(&self, x: F) -> Vec { - self.columns - .iter() - .map(|&[v0, v1]| fold_mle_evals(x, v0, v1)) - .collect() - } -} - -/// Error encountered during GKR protocol verification. -#[derive(Error, Debug)] -pub enum GkrError { - /// The proof is malformed. - #[error("proof data is invalid")] - MalformedProof, - /// Mask has an invalid number of columns. - #[error("mask in layer {instance_layer} of instance {instance} is invalid")] - InvalidMask { - instance: usize, - /// Layer of the instance (but not necessarily the batch). - instance_layer: LayerIndex, - }, - /// There is a mismatch between the number of instances in the proof and the number of - /// instances passed for verification. - #[error("provided an invalid number of instances (given {given}, proof expects {proof})")] - NumInstancesMismatch { given: usize, proof: usize }, - /// There was an error with one of the sumcheck proofs. - #[error("sum-check invalid in layer {layer}: {source}")] - InvalidSumcheck { - layer: LayerIndex, - source: SumcheckError, - }, - /// The circuit polynomial the verifier evaluated doesn't match claim from sumcheck. - #[error("circuit check failed in layer {layer} (calculated {output}, claim {claim})")] - CircuitCheckFailure { - claim: F, - output: F, - layer: LayerIndex, - }, -} - -/// GKR layer index where 0 corresponds to the output layer. -pub type LayerIndex = usize; - -/// Represents a layer in a binary tree structured GKR circuit. -/// -/// Layers can contain multiple columns, for example [LogUp] which has separate columns for -/// numerators and denominators. -/// -/// [LogUp]: https://eprint.iacr.org/2023/1284.pdf -#[derive(Debug, Clone)] -pub enum Layer { - GrandProduct(Mle), - LogUpGeneric { - numerators: Mle, - denominators: Mle, - }, - LogUpMultiplicities { - numerators: Mle, - denominators: Mle, - }, - /// All numerators implicitly equal "1". - LogUpSingles { - denominators: Mle, - }, -} - -impl Layer { - /// Returns the number of variables used to interpolate the layer's gate values. - pub fn n_variables(&self) -> usize { - match self { - Self::GrandProduct(mle) - | Self::LogUpSingles { denominators: mle } - | Self::LogUpMultiplicities { - denominators: mle, .. - } - | Self::LogUpGeneric { - denominators: mle, .. - } => mle.arity(), - } - } - - fn is_output_layer(&self) -> bool { - self.n_variables() == 0 - } - - /// Produces the next layer from the current layer. - /// - /// The next layer is strictly half the size of the current layer. - /// Returns [`None`] if called on an output layer. - pub fn next_layer(&self) -> Option { - if self.is_output_layer() { - return None; - } - - let next_layer = match self { - Layer::GrandProduct(layer) => Self::next_grand_product_layer(layer), - Layer::LogUpGeneric { - numerators, - denominators, - } - | Layer::LogUpMultiplicities { - numerators, - denominators, - } => Self::next_logup_layer(MleExpr::Mle(numerators), denominators), - Layer::LogUpSingles { denominators } => { - Self::next_logup_layer(MleExpr::Constant(F::ONE), denominators) - } - }; - Some(next_layer) - } - - fn next_grand_product_layer(layer: &Mle) -> Layer { - let res = layer - .chunks_exact(2) // Process in chunks of 2 elements - .map(|chunk| chunk[0] * chunk[1]) // Multiply each pair - .collect(); - Layer::GrandProduct(Mle::new(res)) - } - - fn next_logup_layer(numerators: MleExpr<'_, F>, denominators: &Mle) -> Layer { - let half_n = 1 << (denominators.arity() - 1); - let mut next_numerators = Vec::with_capacity(half_n); - let mut next_denominators = Vec::with_capacity(half_n); - - for i in 0..half_n { - let a = Fraction::new(numerators[i * 2], denominators[i * 2]); - let b = Fraction::new(numerators[i * 2 + 1], denominators[i * 2 + 1]); - let res = a + b; - next_numerators.push(res.numerator); - next_denominators.push(res.denominator); - } - - Layer::LogUpGeneric { - numerators: Mle::new(next_numerators), - denominators: Mle::new(next_denominators), - } - } - - /// Returns each column output if the layer is an output layer, otherwise returns an `Err`. - pub fn try_into_output_layer_values(self) -> Result, NotOutputLayerError> { - if !self.is_output_layer() { - return Err(NotOutputLayerError); - } - - Ok(match self { - Layer::LogUpSingles { denominators } => { - let numerator = F::ONE; - let denominator = denominators[0]; - vec![numerator, denominator] - } - Layer::LogUpGeneric { - numerators, - denominators, - } - | Layer::LogUpMultiplicities { - numerators, - denominators, - } => { - let numerator = numerators[0]; - let denominator = denominators[0]; - vec![numerator, denominator] - } - Layer::GrandProduct(col) => { - vec![col[0]] - } - }) - } - - /// Returns a transformed layer with the first variable of each column fixed to `assignment`. - pub fn fix_first_variable(self, x0: F) -> Self { - if self.n_variables() == 0 { - return self; - } - - match self { - Self::GrandProduct(mle) => Self::GrandProduct(mle.partial_evaluation(x0)), - Self::LogUpGeneric { - numerators, - denominators, - } - | Self::LogUpMultiplicities { - numerators, - denominators, - } => Self::LogUpGeneric { - numerators: numerators.partial_evaluation(x0), - denominators: denominators.partial_evaluation(x0), - }, - Self::LogUpSingles { denominators } => Self::LogUpSingles { - denominators: denominators.partial_evaluation(x0), - }, - } - } -} - -#[derive(Debug)] -pub struct NotOutputLayerError; - -enum MleExpr<'a, F: Field> { - Constant(F), - Mle(&'a Mle), -} - -impl Index for MleExpr<'_, F> { - type Output = F; - - fn index(&self, index: usize) -> &F { - match self { - Self::Constant(v) => v, - Self::Mle(mle) => &mle[index], - } - } -} diff --git a/crates/stark-backend/src/gkr/verifier.rs b/crates/stark-backend/src/gkr/verifier.rs deleted file mode 100644 index 6edeb411af..0000000000 --- a/crates/stark-backend/src/gkr/verifier.rs +++ /dev/null @@ -1,155 +0,0 @@ -//! Copied from starkware-libs/stwo under Apache-2.0 license. -//! GKR batch verifier for Grand Product and LogUp lookup arguments. - -use p3_challenger::FieldChallenger; -use p3_field::Field; - -use crate::{ - gkr::{ - gate::Gate, - types::{GkrArtifact, GkrBatchProof, GkrError}, - }, - poly::{multi::hypercube_eq, uni::random_linear_combination}, - sumcheck, -}; - -/// Partially verifies a batch GKR proof. -/// -/// On successful verification the function returns a [`GkrArtifact`] which stores the out-of-domain -/// point and claimed evaluations in the input layer columns for each instance at the OOD point. -/// These claimed evaluations are not checked in this function - hence partial verification. -pub fn partially_verify_batch( - gate_by_instance: Vec, - proof: &GkrBatchProof, - challenger: &mut impl FieldChallenger, -) -> Result, GkrError> { - let GkrBatchProof { - sumcheck_proofs, - layer_masks_by_instance, - output_claims_by_instance, - } = proof; - - if layer_masks_by_instance.len() != output_claims_by_instance.len() { - return Err(GkrError::MalformedProof); - } - - let n_instances = layer_masks_by_instance.len(); - let instance_n_layers = |instance: usize| layer_masks_by_instance[instance].len(); - let n_layers = (0..n_instances).map(instance_n_layers).max().unwrap(); - - if n_layers != sumcheck_proofs.len() { - return Err(GkrError::MalformedProof); - } - - if gate_by_instance.len() != n_instances { - return Err(GkrError::NumInstancesMismatch { - given: gate_by_instance.len(), - proof: n_instances, - }); - } - - let mut ood_point = vec![]; - let mut claims_to_verify_by_instance = vec![None; n_instances]; - - for (layer, sumcheck_proof) in sumcheck_proofs.iter().enumerate() { - let n_remaining_layers = n_layers - layer; - - // Check for output layers. - for instance in 0..n_instances { - if instance_n_layers(instance) == n_remaining_layers { - let output_claims = output_claims_by_instance[instance].clone(); - claims_to_verify_by_instance[instance] = Some(output_claims); - } - } - - // Seed the channel with layer claims. - for claims_to_verify in claims_to_verify_by_instance.iter().flatten() { - challenger.observe_slice(claims_to_verify); - } - - let sumcheck_alpha = challenger.sample(); - let instance_lambda = challenger.sample(); - - let mut sumcheck_claims = Vec::new(); - let mut sumcheck_instances = Vec::new(); - - // Prepare the sumcheck claim. - for (instance, claims_to_verify) in claims_to_verify_by_instance.iter().enumerate() { - if let Some(claims_to_verify) = claims_to_verify { - let n_unused_variables = n_layers - instance_n_layers(instance); - let doubling_factor = F::from_canonical_u32(1 << n_unused_variables); - let claim = - random_linear_combination(claims_to_verify, instance_lambda) * doubling_factor; - sumcheck_claims.push(claim); - sumcheck_instances.push(instance); - } - } - - let sumcheck_claim = random_linear_combination(&sumcheck_claims, sumcheck_alpha); - let (sumcheck_ood_point, sumcheck_eval) = - sumcheck::partially_verify(sumcheck_claim, sumcheck_proof, challenger) - .map_err(|source| GkrError::InvalidSumcheck { layer, source })?; - - let mut layer_evals = Vec::new(); - - // Evaluate the circuit locally at sumcheck OOD point. - for &instance in &sumcheck_instances { - let n_unused = n_layers - instance_n_layers(instance); - let mask = &layer_masks_by_instance[instance][layer - n_unused]; - let gate = &gate_by_instance[instance]; - let gate_output = gate.eval(mask).map_err(|_| { - let instance_layer = instance_n_layers(layer) - n_remaining_layers; - GkrError::InvalidMask { - instance, - instance_layer, - } - })?; - // TODO: Consider simplifying the code by just using the same eq eval for all instances - // regardless of size. - let eq_eval = hypercube_eq(&ood_point[n_unused..], &sumcheck_ood_point[n_unused..]); - layer_evals.push(eq_eval * random_linear_combination(&gate_output, instance_lambda)); - } - - let layer_eval = random_linear_combination(&layer_evals, sumcheck_alpha); - - if sumcheck_eval != layer_eval { - return Err(GkrError::CircuitCheckFailure { - claim: sumcheck_eval, - output: layer_eval, - layer, - }); - } - - // Seed the channel with the layer masks. - for &instance in &sumcheck_instances { - let n_unused = n_layers - instance_n_layers(instance); - let mask = &layer_masks_by_instance[instance][layer - n_unused]; - for column in mask.columns() { - challenger.observe_slice(column); - } - } - - // Set the OOD evaluation point for layer above. - let challenge = challenger.sample(); - ood_point = sumcheck_ood_point; - ood_point.push(challenge); - - // Set the claims to verify in the layer above. - for instance in sumcheck_instances { - let n_unused = n_layers - instance_n_layers(instance); - let mask = &layer_masks_by_instance[instance][layer - n_unused]; - claims_to_verify_by_instance[instance] = Some(mask.reduce_at_point(challenge)); - } - } - - let claims_to_verify_by_instance = claims_to_verify_by_instance - .into_iter() - .map(Option::unwrap) - .collect(); - - Ok(GkrArtifact { - ood_point, - claims_to_verify_by_instance, - n_variables_by_instance: (0..n_instances).map(instance_n_layers).collect(), - }) -} diff --git a/crates/stark-backend/src/interaction/README.md b/crates/stark-backend/src/interaction/README.md deleted file mode 100644 index 1a2c82aa64..0000000000 --- a/crates/stark-backend/src/interaction/README.md +++ /dev/null @@ -1,102 +0,0 @@ -# AIR Interactions (Cross-table lookups) - -We explain the interface and implementation of the communication protocol between different AIR matrices introduced by Valida here. We note that this allows AIRs with matrices of -different heights to communicate. See [here](https://hackmd.io/@shuklaayush/rJHhuWGfR) for another reference. - -## Interface - -The main interface is controlled by the trait [`InteractionBuilder`](./mod.rs) - -```rust -pub trait InteractionBuilder: AirBuilder { - fn push_send>( - &mut self, - bus_index: usize, - fields: impl IntoIterator, - count: impl Into, - ); - - fn push_receive>( - &mut self, - bus_index: usize, - fields: impl IntoIterator, - count: impl Into, - ); -} -``` - -The `InteractionBuilder` trait is an extension of `AirBuilder`. You should use `impl Air for MyAir` to enable usage of the above API within the `Air::eval` function. -For a given AIR, the interface allows to specify sends and receives. A single interaction $\sigma$ specifies a [communication] bus -to communicate over -- this bus is an abstract concept that is not explicitly materialized. -The index of this bus is `bus_index`, which we call $i_\sigma$ in the following. -The interaction specifies `fields` $(f_j)$ and `count` $m$ where each $f_j$ and $m$ is a polynomial expression -on the main and preprocessed trace polynomials with rotations. This means that we want to send the tuple -$(f_1(\mathbf T),\dotsc,f_{len}(\mathbf T))$ to the $i$-th bus with multiplicity $m(\mathbf T)$, where $\mathbf T$ -refers to the trace (including preprocessed columns) as polynomials (as well as rotations). - -### Outcome - -If all row values for `count` for sends are small enough that the sum of all `count` values across all `sends` is strictly smaller than the field characteristic (so no overflows are possible), this enforces that: - -> for each bus, each unique row of `fields` occurs with the same total `count` in sends and receives across all chips. - -In other words, for each bus, there is a multiset equality between - -> the multiset union of the rows of `fields` with multiplicity `count` across all sends - -and - -> the multiset union of the rows of `fields` with multiplicity `count` across all receives. - -One important consequence is that: - -> for each bus, each row of a `fields` with non-zero `count` from a send coincides with some row of a `fields` of a receive (possibly in another chip). - -In other words, it enforces a cross-chip lookup of the rows of the send tables with non-zero `count` into the concatenation of the receive tables. - -### Conventions - -Following Valida, we will follow the convention that if an individual chip is the owner of some functionality, say `f(x) = y`, then the chip itself should add `receive` -interactions to _receive_ requests with fields `(x, y)` and constrain correctness of `f(x) = y`. Any other chip in a system that wants to use this functionality should -add `send` interactions to _send_ requests for this functionality. - -## Backend implementation via logUp - -The backend implementation of the prover will constrain the computation of a cumulative sum -_for just this AIR_ -$$\sum_r \left(\sum_\sigma sign(\sigma) \frac {m_\sigma[r]}{\alpha^{i_\sigma} + \sum_j \beta^j \cdot f_{\sigma,j}(\mathbf T[r])} \right)$$ -where $r$ sums over all row indices, $\sigma$ sums over all sends and receives, $sign(\sigma) = 1$ if $\sigma$ is a send, $sign(\sigma) = -1$ if $\sigma$ is a receive. - -- $\alpha,\beta$ are two random challenge extension field elements. -- The reciprocal is the logUp logarithmic derivative argument. -- $\alpha^{i_\sigma}$ is used to distinguish the bus index. -- $\sum_j \beta^j \cdot f_{\sigma,j}$ is the RLC of the $(f_{\sigma,j})$ tuple. -- Add the sends, subtract the receives. - -Globally, the prover will sum this per-AIR cumulative sum over all AIRs and lastly constrain that the sum is $0$. This will enforce that the sends and receives are balanced globally across all AIRs. Note that the multiplicity allows a single send to a bus to be received by multiple AIRs. - -### Virtual columns and constraints - -The $f_j, m$ can be any multi-variate polynomial expression, which is expressed via the `AB::Expr` type within the `Air::eval` function. - -For each send/receive interaction, we must add one virtual column $q_\sigma$ with row $r$ equal to -$$q_\sigma[r] = \frac {m_\sigma[r]}{\alpha^{i_\sigma} + \sum_j \beta^j \cdot f_{\sigma,j}(\mathbf T[r])}$$. - -The constraint is -$$q_\sigma \cdot \left(\alpha^{i_\sigma} + \sum_j \beta^j \cdot f_{\sigma,j}(\mathbf T) \right) = m_\sigma(\mathbf T)$$, -which has degree $max(1 + max_j deg(f_{\sigma,j}), deg(m_\sigma))$. - -Note: we could save columns by combining $q$ columns together, at the cost of increasing the constraint degree. - -We need one more virtual column $\phi$ for the cumulative sum of all sends and receives. The row $r$ of $\phi$ contains the partial sum of all reciprocals up to row $r$. -$$\phi[r] = \sum_{r' \leq r} \left(\sum_\sigma sign(\sigma) q_\sigma[r']\right)$$ - -The constraints are: - -- $sel_{first} \cdot \phi = sel_{first} \cdot \sum_\sigma sign(\sigma) q_\sigma$ -- $sel_{transition} \cdot (\phi' - \phi) = sel_{transition} \cdot \sum_\sigma sign(\sigma) q_\sigma'$ where $\phi'$ and $q'$ mean the next row (rotation by $1$). -- $sel_{last} \cdot \phi = sum$ - -where $sum$ is exposed to the verifier. - -In summarize, we need 1 additional virtual column for each send or receive interaction, and 1 additional virtual column to track the partial sum. These columns are all virtual in the sense that they are only materialized by the prover, after the main trace was committed, because a random challenge is needed. diff --git a/crates/stark-backend/src/interaction/debug.rs b/crates/stark-backend/src/interaction/debug.rs deleted file mode 100644 index 8bdf27fca1..0000000000 --- a/crates/stark-backend/src/interaction/debug.rs +++ /dev/null @@ -1,60 +0,0 @@ -use std::collections::{BTreeMap, HashMap}; - -use itertools::Itertools; -use p3_field::Field; -use p3_matrix::{dense::RowMajorMatrixView, Matrix}; - -use super::{trace::Evaluator, InteractionType, SymbolicInteraction}; -use crate::air_builders::symbolic::symbolic_expression::SymbolicEvaluator; - -/// The actual interactions that are sent/received during a single run -/// of trace generation. For debugging purposes only. -#[derive(Default, Clone, Debug)] -pub struct LogicalInteractions { - /// Bus index => (fields => (air_idx, interaction_type, count)) - #[allow(clippy::type_complexity)] - pub at_bus: BTreeMap, Vec<(usize, InteractionType, F)>>>, -} - -pub fn generate_logical_interactions( - air_idx: usize, - all_interactions: &[SymbolicInteraction], - preprocessed: &Option>, - partitioned_main: &[RowMajorMatrixView], - public_values: &[F], - logical_interactions: &mut LogicalInteractions, -) { - if all_interactions.is_empty() { - return; - } - - let height = partitioned_main[0].height(); - - for n in 0..height { - let evaluator = Evaluator { - preprocessed, - partitioned_main, - public_values, - height, - local_index: n, - }; - for interaction in all_interactions { - let fields = interaction - .fields - .iter() - .map(|expr| evaluator.eval_expr(expr)) - .collect_vec(); - let count = evaluator.eval_expr(&interaction.count); - if count.is_zero() { - continue; - } - logical_interactions - .at_bus - .entry(interaction.bus_index) - .or_default() - .entry(fields) - .or_default() - .push((air_idx, interaction.interaction_type, count)); - } - } -} diff --git a/crates/stark-backend/src/interaction/mod.rs b/crates/stark-backend/src/interaction/mod.rs deleted file mode 100644 index 097c282d05..0000000000 --- a/crates/stark-backend/src/interaction/mod.rs +++ /dev/null @@ -1,186 +0,0 @@ -use std::fmt::Debug; - -use p3_air::AirBuilder; -use p3_challenger::CanObserve; -use p3_matrix::dense::RowMajorMatrix; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; - -use crate::{ - air_builders::symbolic::{symbolic_expression::SymbolicExpression, SymbolicConstraints}, - interaction::stark_log_up::{STARK_LU_NUM_CHALLENGES, STARK_LU_NUM_EXPOSED_VALUES}, - prover::PairTraceView, -}; - -/// Interaction debugging tools -pub mod debug; -pub mod rap; -pub mod stark_log_up; -pub mod trace; -mod utils; - -#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq)] -pub enum InteractionType { - Send, - Receive, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct Interaction { - pub fields: Vec, - pub count: Expr, - pub bus_index: usize, - pub interaction_type: InteractionType, -} - -pub type SymbolicInteraction = Interaction>; - -/// An [AirBuilder] with additional functionality to build special logUp arguments for -/// communication between AIRs across buses. These arguments use randomness to -/// add additional trace columns (in the extension field) and constraints to the AIR. -/// -/// An interactive AIR is a AIR that can specify buses for sending and receiving data -/// to other AIRs. The original AIR is augmented by virtual columns determined by -/// the interactions to define a [RAP](crate::rap::Rap). -pub trait InteractionBuilder: AirBuilder { - /// Stores a new send interaction in the builder. - fn push_send>( - &mut self, - bus_index: usize, - fields: impl IntoIterator, - count: impl Into, - ) { - self.push_interaction(bus_index, fields, count, InteractionType::Send); - } - - /// Stores a new receive interaction in the builder. - fn push_receive>( - &mut self, - bus_index: usize, - fields: impl IntoIterator, - count: impl Into, - ) { - self.push_interaction(bus_index, fields, count, InteractionType::Receive); - } - - /// Stores a new interaction in the builder. - fn push_interaction>( - &mut self, - bus_index: usize, - fields: impl IntoIterator, - count: impl Into, - interaction_type: InteractionType, - ); - - /// Returns the current number of interactions. - fn num_interactions(&self) -> usize; - - /// Returns all interactions stored. - fn all_interactions(&self) -> &[Interaction]; -} - -pub struct RapPhaseProverData { - /// Challenges from the challenger in this phase that determine RAP constraints and exposed values. - pub challenges: Vec, - - /// After challenge trace per air computed as a function of `challenges`. - pub after_challenge_trace_per_air: Vec>>, - - /// Public values of the phase that are functions of `challenges`. - pub exposed_values_per_air: Vec>>, -} - -pub struct RapPhaseVerifierData { - /// Challenges from the challenger in this phase that determine RAP constraints and exposed values. - pub challenges_per_phase: Vec>, -} - -#[derive(Debug)] -pub struct RapPhaseShape { - pub num_challenges: usize, - - pub num_exposed_values: usize, - - /// Any additional rotations to open at in the permutation PCS round. - /// - /// Specifies that each `i` in `extra_opening_rots` should be opened at - /// `zeta * g^i` (in addition to `zeta` and `zeta * g`). - pub extra_opening_rots: Vec, -} - -/// Supported challenge phases in a RAP. -#[derive(Debug, Copy, Clone, Serialize, Deserialize)] -pub enum RapPhaseSeqKind { - GkrLogUp, - /// Up to one phase with prover/verifier given by [[stark_log_up::StarkLogUpPhase]] and - /// constraints given by [[stark_log_up::eval_stark_log_up_phase]]. - StarkLogUp, -} - -impl RapPhaseSeqKind { - pub fn shape(&self) -> Vec { - match self { - RapPhaseSeqKind::StarkLogUp => vec![RapPhaseShape { - num_challenges: STARK_LU_NUM_CHALLENGES, - num_exposed_values: STARK_LU_NUM_EXPOSED_VALUES, - extra_opening_rots: vec![], - }], - RapPhaseSeqKind::GkrLogUp => todo!(), - } - } -} - -pub trait HasInteractionChunkSize { - fn interaction_chunk_size(&self) -> usize; -} - -/// Defines a particular protocol for the "after challenge" phase in a RAP. -/// -/// A [RapPhaseSeq] is defined by the proving and verifying methods implemented in this trait, -/// as well as via some "eval" method that is determined by `RapPhaseId`. -pub trait RapPhaseSeq { - type PartialProof: Clone + Serialize + DeserializeOwned; - type ProvingKey: Clone + Serialize + DeserializeOwned + HasInteractionChunkSize; - type Error: Debug; - - const ID: RapPhaseSeqKind; - - /// The protocol parameters for the challenge phases may depend on the AIR constraints. - fn generate_pk_per_air( - &self, - symbolic_constraints_per_air: Vec>, - ) -> Vec; - - /// Partially prove the challenge phases, - /// - /// Samples challenges, generates after challenge traces and exposed values, and proves any - /// extra-STARK part of the protocol. - /// - /// "Partial" refers to the fact that some STARK parts of the protocol---namely, the constraints - /// on the after challenge traces returned in `RapPhaseProverData`---are handled external to - /// this function. - fn partially_prove( - &self, - challenger: &mut Challenger, - params_per_air: &[Self::ProvingKey], - constraints_per_air: &[&SymbolicConstraints], - trace_view_per_air: &[PairTraceView<'_, F>], - ) -> Option<(Self::PartialProof, RapPhaseProverData)>; - - /// Partially verifies the challenge phases. - /// - /// Assumes the shape of `exposed_values_per_air_per_phase` is verified externally. - /// - /// An implementation of this function must sample challenges for the challenge phases and then - /// observe the exposed values and commitment. - fn partially_verify( - &self, - challenger: &mut Challenger, - partial_proof: Option<&Self::PartialProof>, - exposed_values_per_air_per_phase: &[Vec>], - commitments_per_phase: &[Commitment], - // per commitment, per matrix, per rotation, per column - after_challenge_opened_values: &[Vec>>], - ) -> (RapPhaseVerifierData, Result<(), Self::Error>) - where - Challenger: CanObserve; -} diff --git a/crates/stark-backend/src/interaction/rap.rs b/crates/stark-backend/src/interaction/rap.rs deleted file mode 100644 index 7b33adcd77..0000000000 --- a/crates/stark-backend/src/interaction/rap.rs +++ /dev/null @@ -1,37 +0,0 @@ -//! An AIR with specified interactions can be augmented into a RAP. -//! This module auto-converts any [Air] implemented on an [InteractionBuilder] into a [Rap]. - -use p3_air::Air; - -use super::{InteractionBuilder, RapPhaseSeqKind}; -use crate::{ - interaction::stark_log_up::eval_stark_log_up_phase, - rap::{PermutationAirBuilderWithExposedValues, Rap}, -}; - -/// Used internally to select RAP phase evaluation function. -pub(crate) trait InteractionPhaseAirBuilder { - fn finalize_interactions(&mut self); - fn interaction_chunk_size(&self) -> usize; - fn rap_phase_seq_kind(&self) -> RapPhaseSeqKind; -} - -impl Rap for A -where - A: Air, - AB: InteractionBuilder + PermutationAirBuilderWithExposedValues + InteractionPhaseAirBuilder, -{ - fn eval(&self, builder: &mut AB) { - // Constraints for the main trace: - Air::eval(self, builder); - builder.finalize_interactions(); - if builder.num_interactions() != 0 { - match builder.rap_phase_seq_kind() { - RapPhaseSeqKind::StarkLogUp => { - eval_stark_log_up_phase(builder, builder.interaction_chunk_size()); - } - RapPhaseSeqKind::GkrLogUp => todo!(), - } - } - } -} diff --git a/crates/stark-backend/src/interaction/stark_log_up.rs b/crates/stark-backend/src/interaction/stark_log_up.rs deleted file mode 100644 index d00a14b529..0000000000 --- a/crates/stark-backend/src/interaction/stark_log_up.rs +++ /dev/null @@ -1,545 +0,0 @@ -use std::{array, borrow::Borrow, marker::PhantomData}; - -use itertools::{izip, Itertools}; -use p3_air::ExtensionBuilder; -use p3_challenger::{CanObserve, FieldChallenger}; -use p3_field::{AbstractField, ExtensionField, Field}; -use p3_matrix::{dense::RowMajorMatrix, Matrix}; -use p3_maybe_rayon::prelude::*; -use serde::{Deserialize, Serialize}; -use thiserror::Error; - -use crate::{ - air_builders::symbolic::{ - symbolic_expression::{SymbolicEvaluator, SymbolicExpression}, - SymbolicConstraints, - }, - interaction::{ - trace::Evaluator, - utils::{generate_betas, generate_rlc_elements}, - HasInteractionChunkSize, Interaction, InteractionBuilder, InteractionType, - RapPhaseProverData, RapPhaseSeq, RapPhaseSeqKind, RapPhaseVerifierData, - }, - parizip, - prover::PairTraceView, - rap::PermutationAirBuilderWithExposedValues, -}; - -#[derive(Default)] -pub struct StarkLogUpPhase { - _marker: PhantomData<(F, Challenge, Challenger)>, -} - -impl StarkLogUpPhase { - pub fn new() -> Self { - Self { - _marker: PhantomData, - } - } -} - -#[derive(Error, Debug)] -pub enum StarkLogUpError { - #[error("non-zero cumulative sum")] - NonZeroCumulativeSum, -} - -#[derive(Clone, Serialize, Deserialize)] -pub struct StarkLogUpProvingKey { - chunk_size: usize, -} - -impl HasInteractionChunkSize for StarkLogUpProvingKey { - fn interaction_chunk_size(&self) -> usize { - self.chunk_size - } -} - -impl RapPhaseSeq - for StarkLogUpPhase -where - F: Field, - Challenge: ExtensionField, - Challenger: FieldChallenger, -{ - type PartialProof = (); - type ProvingKey = StarkLogUpProvingKey; - type Error = StarkLogUpError; - const ID: RapPhaseSeqKind = RapPhaseSeqKind::StarkLogUp; - - fn generate_pk_per_air( - &self, - symbolic_constraints_per_air: Vec>, - ) -> Vec { - let global_max_constraint_degree = symbolic_constraints_per_air - .iter() - .map(|constraints| constraints.max_constraint_degree()) - .max() - .unwrap_or(0); - - symbolic_constraints_per_air - .iter() - .map(|constraints| { - let chunk_size = - find_interaction_chunk_size(constraints, global_max_constraint_degree); - StarkLogUpProvingKey { chunk_size } - }) - .collect_vec() - } - - fn partially_prove( - &self, - challenger: &mut Challenger, - rap_pk_per_air: &[Self::ProvingKey], - constraints_per_air: &[&SymbolicConstraints], - trace_view_per_air: &[PairTraceView<'_, F>], - ) -> Option<(Self::PartialProof, RapPhaseProverData)> { - let has_any_interactions = constraints_per_air - .iter() - .any(|constraints| !constraints.interactions.is_empty()); - - if !has_any_interactions { - return None; - } - - let challenges: [Challenge; STARK_LU_NUM_CHALLENGES] = - array::from_fn(|_| challenger.sample_ext_element::()); - - let after_challenge_trace_per_air = tracing::info_span!("generate permutation traces") - .in_scope(|| { - Self::generate_after_challenge_traces_per_air( - &challenges, - constraints_per_air, - rap_pk_per_air, - trace_view_per_air, - ) - }); - let cumulative_sum_per_air = Self::extract_cumulative_sums(&after_challenge_trace_per_air); - - // Challenger needs to observe what is exposed (cumulative_sums) - for cumulative_sum in cumulative_sum_per_air.iter().flatten() { - challenger.observe_slice(cumulative_sum.as_base_slice()); - } - - let exposed_values_per_air = cumulative_sum_per_air - .iter() - .map(|csum| csum.map(|csum| vec![csum])) - .collect_vec(); - - Some(( - (), - RapPhaseProverData { - challenges: challenges.to_vec(), - after_challenge_trace_per_air, - exposed_values_per_air, - }, - )) - } - - fn partially_verify( - &self, - challenger: &mut Challenger, - _partial_proof: Option<&Self::PartialProof>, - exposed_values_per_phase_per_air: &[Vec>], - commitment_per_phase: &[Commitment], - _permutation_opened_values: &[Vec>>], - ) -> (RapPhaseVerifierData, Result<(), Self::Error>) - where - Challenger: CanObserve, - { - if exposed_values_per_phase_per_air - .iter() - .all(|exposed_values_per_phase_per_air| exposed_values_per_phase_per_air.is_empty()) - { - return ( - RapPhaseVerifierData { - challenges_per_phase: vec![], - }, - Ok(()), - ); - } - - let challenges: [Challenge; STARK_LU_NUM_CHALLENGES] = - array::from_fn(|_| challenger.sample_ext_element::()); - - for exposed_values_per_phase in exposed_values_per_phase_per_air.iter() { - if let Some(exposed_values) = exposed_values_per_phase.first() { - for exposed_value in exposed_values { - challenger.observe_slice(exposed_value.as_base_slice()); - } - } - } - - challenger.observe(commitment_per_phase[0].clone()); - - let cumulative_sums = exposed_values_per_phase_per_air - .iter() - .map(|exposed_values_per_phase| { - assert!( - exposed_values_per_phase.len() <= 1, - "Verifier does not support more than 1 challenge phase" - ); - exposed_values_per_phase.first().map(|exposed_values| { - assert_eq!( - exposed_values.len(), - 1, - "Only exposed value should be cumulative sum" - ); - exposed_values[0] - }) - }) - .collect_vec(); - - // Check cumulative sum - let sum: Challenge = cumulative_sums - .into_iter() - .map(|c| c.unwrap_or(Challenge::ZERO)) - .sum(); - - let result = if sum == Challenge::ZERO { - Ok(()) - } else { - Err(Self::Error::NonZeroCumulativeSum) - }; - let verifier_data = RapPhaseVerifierData { - challenges_per_phase: vec![challenges.to_vec()], - }; - (verifier_data, result) - } -} - -pub const STARK_LU_NUM_CHALLENGES: usize = 2; -pub const STARK_LU_NUM_EXPOSED_VALUES: usize = 1; - -impl StarkLogUpPhase -where - F: Field, - Challenge: ExtensionField, - Challenger: FieldChallenger, -{ - /// Returns a list of optional tuples of (permutation trace,cumulative sum) for each AIR. - fn generate_after_challenge_traces_per_air( - challenges: &[Challenge; STARK_LU_NUM_CHALLENGES], - constraints_per_air: &[&SymbolicConstraints], - params_per_air: &[StarkLogUpProvingKey], - trace_view_per_air: &[PairTraceView<'_, F>], - ) -> Vec>> { - parizip!(constraints_per_air, trace_view_per_air, params_per_air) - .map(|(constraints, trace_view, params)| { - Self::generate_after_challenge_trace( - &constraints.interactions, - trace_view, - challenges, - params.chunk_size, - ) - }) - .collect::>() - } - - fn extract_cumulative_sums( - perm_traces: &[Option>], - ) -> Vec> { - perm_traces - .iter() - .map(|perm_trace| { - perm_trace.as_ref().map(|perm_trace| { - *perm_trace - .row_slice(perm_trace.height() - 1) - .last() - .unwrap() - }) - }) - .collect() - } - - // Copied from valida/machine/src/chip.rs, modified to allow partitioned main trace - /// Generate the permutation trace for a chip given the main trace. - /// The permutation randomness is only available after the main trace from all chips - /// involved in interactions have been committed. - /// - /// - `partitioned_main` is the main trace, partitioned into several matrices of the same height - /// - /// Returns the permutation trace as a matrix of extension field elements. - /// - /// ## Panics - /// - If `partitioned_main` is empty. - pub fn generate_after_challenge_trace( - all_interactions: &[Interaction>], - trace_view: &PairTraceView<'_, F>, - permutation_randomness: &[Challenge; STARK_LU_NUM_CHALLENGES], - interaction_chunk_size: usize, - ) -> Option> - where - F: Field, - Challenge: ExtensionField, - { - if all_interactions.is_empty() { - return None; - } - let &[alpha, beta] = permutation_randomness; - - let alphas = generate_rlc_elements(alpha, all_interactions); - let betas = generate_betas(beta, all_interactions); - - // Compute the reciprocal columns - // - // For every row we do the following - // We first compute the reciprocals: r_1, r_2, ..., r_n, where - // r_i = \frac{1}{\alpha^i + \sum_j \beta^j * f_{i, j}}, where - // f_{i, j} is the jth main trace column for the ith interaction - // - // We then bundle every interaction_chunk_size interactions together - // to get the value perm_i = \sum_{i \in bundle} r_i * m_i, where m_i - // is the signed count for the interaction. - // - // Finally, the last column, \phi, of every row is the running sum of - // all the previous perm values - // - // Row: | perm_1 | perm_2 | perm_3 | ... | perm_s | phi |, where s - // is the number of bundles - let num_interactions = all_interactions.len(); - let height = trace_view.partitioned_main[0].height(); - // To optimize memory and parallelism, we split the trace rows into chunks - // based on the number of cpu threads available, and then do all - // computations necessary for that chunk within a single thread. - let perm_width = num_interactions.div_ceil(interaction_chunk_size) + 1; - let mut perm_values = Challenge::zero_vec(height * perm_width); - debug_assert!( - trace_view - .partitioned_main - .iter() - .all(|m| m.height() == height), - "All main trace parts must have same height" - ); - - #[cfg(feature = "parallel")] - let num_threads = rayon::current_num_threads(); - #[cfg(not(feature = "parallel"))] - let num_threads = 1; - - let height_chunk_size = height.div_ceil(num_threads); - perm_values - .par_chunks_mut(height_chunk_size * perm_width) - .enumerate() - .for_each(|(chunk_idx, perm_values)| { - // perm_values is now local_height x perm_width row-major matrix - let num_rows = perm_values.len() / perm_width; - // the interaction chunking requires more memory because we must - // allocate separate memory for the denominators and reciprocals - let mut denoms = Challenge::zero_vec(num_rows * num_interactions); - let row_offset = chunk_idx * height_chunk_size; - // compute the denominators to be inverted: - for (n, denom_row) in denoms.chunks_exact_mut(num_interactions).enumerate() { - let evaluator = Evaluator { - preprocessed: trace_view.preprocessed, - partitioned_main: trace_view.partitioned_main, - public_values: trace_view.public_values, - height, - local_index: row_offset + n, - }; - for (denom, interaction) in denom_row.iter_mut().zip(all_interactions.iter()) { - let alpha = alphas[interaction.bus_index]; - debug_assert!(interaction.fields.len() <= betas.len()); - let mut fields = interaction.fields.iter(); - *denom = alpha - + evaluator - .eval_expr(fields.next().expect("fields should not be empty")); - for (expr, &beta) in fields.zip(betas.iter().skip(1)) { - *denom += beta * evaluator.eval_expr(expr); - } - } - } - - // Zero should be vanishingly unlikely if alpha, beta are properly pseudo-randomized - // The logup reciprocals should never be zero, so trace generation should panic if - // trying to divide by zero. - let reciprocals = p3_field::batch_multiplicative_inverse(&denoms); - drop(denoms); - // This block should already be in a single thread, but rayon is able - // to do more magic sometimes - perm_values - .par_chunks_exact_mut(perm_width) - .zip(reciprocals.par_chunks_exact(num_interactions)) - .enumerate() - .for_each(|(n, (perm_row, reciprocal_chunk))| { - debug_assert_eq!(perm_row.len(), perm_width); - debug_assert_eq!(reciprocal_chunk.len(), num_interactions); - - let evaluator = Evaluator { - preprocessed: trace_view.preprocessed, - partitioned_main: trace_view.partitioned_main, - public_values: trace_view.public_values, - height, - local_index: row_offset + n, - }; - - let mut row_sum = Challenge::ZERO; - for (perm_val, reciprocal_chunk, interaction_chunk) in izip!( - perm_row.iter_mut(), - reciprocal_chunk.chunks(interaction_chunk_size), - all_interactions.chunks(interaction_chunk_size) - ) { - for (reciprocal, interaction) in - izip!(reciprocal_chunk, interaction_chunk) - { - let mut interaction_val = - *reciprocal * evaluator.eval_expr(&interaction.count); - if interaction.interaction_type == InteractionType::Receive { - interaction_val = -interaction_val; - } - *perm_val += interaction_val; - } - row_sum += *perm_val; - } - - perm_row[perm_width - 1] = row_sum; - }); - }); - - // At this point, the trace matrix is complete except that the last column - // has the row sum but not the partial sum - tracing::trace_span!("compute logup partial sums").in_scope(|| { - let mut phi = Challenge::ZERO; - for perm_chunk in perm_values.chunks_exact_mut(perm_width) { - phi += *perm_chunk.last().unwrap(); - *perm_chunk.last_mut().unwrap() = phi; - } - }); - - Some(RowMajorMatrix::new(perm_values, perm_width)) - } -} - -// Initial version taken from valida/machine/src/chip.rs under MIT license. -/// The permutation row consists of 1 column for each bundle of interactions -/// and one column for the partial sum of log derivative. These columns are trace columns -/// "after challenge" phase 0, and they are valued in the extension field. -/// For more details, see the comment in the trace.rs file -pub fn eval_stark_log_up_phase(builder: &mut AB, interaction_chunk_size: usize) -where - AB: InteractionBuilder + PermutationAirBuilderWithExposedValues, -{ - let exposed_values = builder.permutation_exposed_values(); - // There are interactions, add constraints for the virtual columns - assert_eq!( - exposed_values.len(), - 1, - "Should have one exposed value for cumulative_sum" - ); - let cumulative_sum = exposed_values[0]; - - let rand_elems = builder.permutation_randomness(); - - let perm = builder.permutation(); - let (perm_local, perm_next) = (perm.row_slice(0), perm.row_slice(1)); - let perm_local: &[AB::VarEF] = (*perm_local).borrow(); - let perm_next: &[AB::VarEF] = (*perm_next).borrow(); - - let all_interactions = builder.all_interactions().to_vec(); - #[cfg(debug_assertions)] - { - let num_interactions = all_interactions.len(); - let perm_width = num_interactions.div_ceil(interaction_chunk_size) + 1; - assert_eq!(perm_width, perm_local.len()); - assert_eq!(perm_width, perm_next.len()); - } - let phi_local = *perm_local.last().unwrap(); - let phi_next = *perm_next.last().unwrap(); - - let alphas = generate_rlc_elements(rand_elems[0].into(), &all_interactions); - let betas = generate_betas(rand_elems[1].into(), &all_interactions); - - let phi_lhs = phi_next.into() - phi_local.into(); - let mut phi_rhs = AB::ExprEF::ZERO; - let mut phi_0 = AB::ExprEF::ZERO; - - for (chunk_idx, interaction_chunk) in - all_interactions.chunks(interaction_chunk_size).enumerate() - { - let interaction_chunk = interaction_chunk.to_vec(); - - let denoms_per_chunk = interaction_chunk - .iter() - .map(|interaction| { - assert!(!interaction.fields.is_empty(), "fields should not be empty"); - let mut field_hash = AB::ExprEF::ZERO; - for (field, beta) in interaction.fields.iter().zip(betas.iter()) { - field_hash += beta.clone() * field.clone(); - } - field_hash + alphas[interaction.bus_index].clone() - }) - .collect_vec(); - - let mut row_lhs: AB::ExprEF = perm_local[chunk_idx].into(); - for denom in denoms_per_chunk.iter() { - row_lhs *= denom.clone(); - } - - let mut row_rhs = AB::ExprEF::ZERO; - for (i, interaction) in interaction_chunk.into_iter().enumerate() { - let mut term: AB::ExprEF = interaction.count.into(); - if interaction.interaction_type == InteractionType::Receive { - term = -term; - } - for (j, denom) in denoms_per_chunk.iter().enumerate() { - if i != j { - term *= denom.clone(); - } - } - row_rhs += term; - } - - // Some analysis on the degrees of row_lhs and row_rhs: - // - // Let max_field_degree be the maximum degree of all fields across all interactions - // for the AIR. Define max_count_degree similarly for the counts of the interactions. - // - // By construction, the degree of row_lhs is bounded by 1 + max_field_degree * interaction_chunk_size, - // and the degree of row_rhs is bounded by max_count_degree + max_field_degree * (interaction_chunk_size-1) - builder.assert_eq_ext(row_lhs, row_rhs); - - phi_0 += perm_local[chunk_idx].into(); - phi_rhs += perm_next[chunk_idx].into(); - } - - // Running sum constraints - builder.when_transition().assert_eq_ext(phi_lhs, phi_rhs); - builder - .when_first_row() - .assert_eq_ext(*perm_local.last().unwrap(), phi_0); - builder - .when_last_row() - .assert_eq_ext(*perm_local.last().unwrap(), cumulative_sum); -} - -/// Computes the interaction chunk size for the AIR. -/// -/// `global_max_constraint_degree` is the maximum constraint degree across all AIRs. -/// The degree of the dominating logup constraint is bounded by -/// -/// logup_degree = max( -/// 1 + max_field_degree * interaction_chunk_size, -/// max_count_degree + max_field_degree * (interaction_chunk_size - 1) -/// ) -/// -/// More details about this can be found in the function [eval_stark_log_up_phase]. -/// -/// The goal is to pick `interaction_chunk_size` so that `logup_degree` does not -/// exceed `max_constraint_degree` (if possible), while maximizing `interaction_chunk_size`. -fn find_interaction_chunk_size( - constraints: &SymbolicConstraints, - global_max_constraint_degree: usize, -) -> usize { - let (max_field_degree, max_count_degree) = constraints.max_interaction_degrees(); - - if max_field_degree == 0 { - 1 - } else { - let mut interaction_chunk_size = (global_max_constraint_degree - 1) / max_field_degree; - interaction_chunk_size = interaction_chunk_size.min( - (global_max_constraint_degree - max_count_degree + max_field_degree) / max_field_degree, - ); - interaction_chunk_size = interaction_chunk_size.max(1); - interaction_chunk_size - } -} diff --git a/crates/stark-backend/src/interaction/trace.rs b/crates/stark-backend/src/interaction/trace.rs deleted file mode 100644 index 3faf080df9..0000000000 --- a/crates/stark-backend/src/interaction/trace.rs +++ /dev/null @@ -1,33 +0,0 @@ -use p3_field::Field; -use p3_matrix::{dense::RowMajorMatrixView, Matrix}; - -use crate::air_builders::symbolic::{ - symbolic_expression::SymbolicEvaluator, - symbolic_variable::{Entry, SymbolicVariable}, -}; - -pub(super) struct Evaluator<'a, F: Field> { - pub preprocessed: &'a Option>, - pub partitioned_main: &'a [RowMajorMatrixView<'a, F>], - pub public_values: &'a [F], - pub height: usize, - pub local_index: usize, -} - -impl SymbolicEvaluator for Evaluator<'_, F> { - fn eval_var(&self, symbolic_var: SymbolicVariable) -> F { - let n = self.local_index; - let height = self.height; - let index = symbolic_var.index; - match symbolic_var.entry { - Entry::Preprocessed { offset } => { - self.preprocessed.unwrap().get((n + offset) % height, index) - } - Entry::Main { part_index, offset } => { - self.partitioned_main[part_index].get((n + offset) % height, index) - } - Entry::Public => self.public_values[index], - _ => unreachable!("There should be no after challenge variables"), - } - } -} diff --git a/crates/stark-backend/src/interaction/utils.rs b/crates/stark-backend/src/interaction/utils.rs deleted file mode 100644 index 9b50288593..0000000000 --- a/crates/stark-backend/src/interaction/utils.rs +++ /dev/null @@ -1,59 +0,0 @@ -use p3_air::VirtualPairCol; -use p3_field::{AbstractField, ExtensionField, Field, Powers}; - -use super::Interaction; - -/// Returns [random_element, random_element^2, ..., random_element^{max_bus_index + 1}]. -pub fn generate_rlc_elements( - random_element: AF, - all_interactions: &[Interaction], -) -> Vec { - let max_bus_index = all_interactions - .iter() - .map(|interaction| interaction.bus_index) - .max() - .unwrap_or(0); - - random_element - .powers() - .skip(1) - .take(max_bus_index + 1) - .collect() -} - -/// Returns [beta^0, beta^1, ..., beta^{max_num_fields - 1}] -/// where max_num_fields is the maximum length of `fields` in any interaction. -pub fn generate_betas( - beta: AF, - all_interactions: &[Interaction], -) -> Vec { - let max_fields_len = all_interactions - .iter() - .map(|interaction| interaction.fields.len()) - .max() - .unwrap_or(0); - - beta.powers().take(max_fields_len).collect() -} - -// TODO: Use Var and Expr type bounds in place of concrete fields so that -// this function can be used in `eval_permutation_constraints`. -#[allow(dead_code)] -pub fn reduce_row( - preprocessed_row: &[F], - main_row: &[F], - fields: &[VirtualPairCol], - alpha: EF, - betas: Powers, -) -> EF -where - F: Field, - EF: ExtensionField, -{ - let mut rlc = EF::ZERO; - for (columns, beta) in fields.iter().zip(betas) { - rlc += beta * columns.apply::(preprocessed_row, main_row) - } - rlc += alpha; - rlc -} diff --git a/crates/stark-backend/src/keygen/mod.rs b/crates/stark-backend/src/keygen/mod.rs deleted file mode 100644 index 212d683cf5..0000000000 --- a/crates/stark-backend/src/keygen/mod.rs +++ /dev/null @@ -1,242 +0,0 @@ -use std::sync::Arc; - -use itertools::Itertools; -use p3_field::AbstractExtensionField; -use p3_matrix::Matrix; -use tracing::instrument; - -use crate::{ - air_builders::symbolic::{get_symbolic_builder, SymbolicRapBuilder}, - config::{RapPhaseSeqProvingKey, StarkGenericConfig, Val}, - interaction::{HasInteractionChunkSize, RapPhaseSeq, RapPhaseSeqKind}, - keygen::types::{ - MultiStarkProvingKey, ProverOnlySinglePreprocessedData, StarkProvingKey, StarkVerifyingKey, - TraceWidth, VerifierSinglePreprocessedData, - }, - prover::types::TraceCommitter, - rap::AnyRap, -}; - -pub mod types; -pub(crate) mod view; - -struct AirKeygenBuilder { - air: Arc>, - rap_phase_seq_kind: RapPhaseSeqKind, - prep_keygen_data: PrepKeygenData, -} - -/// Stateful builder to create multi-stark proving and verifying keys -/// for system of multiple RAPs with multiple multi-matrix commitments -pub struct MultiStarkKeygenBuilder<'a, SC: StarkGenericConfig> { - pub config: &'a SC, - /// Information for partitioned AIRs. - partitioned_airs: Vec>, -} - -impl<'a, SC: StarkGenericConfig> MultiStarkKeygenBuilder<'a, SC> { - pub fn new(config: &'a SC) -> Self { - Self { - config, - partitioned_airs: vec![], - } - } - - /// Default way to add a single Interactive AIR. - /// Returns `air_id` - #[instrument(level = "debug", skip_all)] - pub fn add_air(&mut self, air: Arc>) -> usize { - self.partitioned_airs.push(AirKeygenBuilder::new( - self.config.pcs(), - SC::RapPhaseSeq::ID, - air, - )); - self.partitioned_airs.len() - 1 - } - - /// Consume the builder and generate proving key. - /// The verifying key can be obtained from the proving key. - pub fn generate_pk(self) -> MultiStarkProvingKey { - let global_max_constraint_degree = self - .partitioned_airs - .iter() - .map(|keygen_builder| { - let max_constraint_degree = keygen_builder.max_constraint_degree(); - tracing::debug!( - "{} has constraint degree {}", - keygen_builder.air.name(), - max_constraint_degree - ); - max_constraint_degree - }) - .max() - .unwrap(); - tracing::info!( - "Max constraint (excluding logup constraints) degree across all AIRs: {}", - global_max_constraint_degree - ); - - let symbolic_constraints_per_air = self - .partitioned_airs - .iter() - .map(|keygen_builder| keygen_builder.get_symbolic_builder(None).constraints()) - .collect(); - let rap_phase_seq_pk_per_air = self - .config - .rap_phase_seq() - .generate_pk_per_air(symbolic_constraints_per_air); - - let pk_per_air: Vec<_> = self - .partitioned_airs - .into_iter() - .zip_eq(rap_phase_seq_pk_per_air) - .map(|(keygen_builder, params)| keygen_builder.generate_pk(params)) - .collect(); - - for pk in pk_per_air.iter() { - let width = &pk.vk.params.width; - tracing::info!("{:<20} | Quotient Deg = {:<2} | Prep Cols = {:<2} | Main Cols = {:<8} | Perm Cols = {:<4} | {:4} Constraints | {:3} Interactions On Buses {:?}", - pk.air_name, - pk.vk.quotient_degree, - width.preprocessed.unwrap_or(0), - format!("{:?}",width.main_widths()), - format!("{:?}",width.after_challenge.iter().map(|&x| x * >>::D).collect_vec()), - pk.vk.symbolic_constraints.constraints.len(), - pk.vk.symbolic_constraints.interactions.len(), - pk.vk - .symbolic_constraints - .interactions - .iter() - .map(|i| i.bus_index) - .collect_vec() - ); - #[cfg(feature = "bench-metrics")] - { - let labels = [("air_name", pk.air_name.clone())]; - metrics::counter!("quotient_deg", &labels).absolute(pk.vk.quotient_degree as u64); - // column info will be logged by prover later - metrics::counter!("constraints", &labels) - .absolute(pk.vk.symbolic_constraints.constraints.len() as u64); - metrics::counter!("interactions", &labels) - .absolute(pk.vk.symbolic_constraints.interactions.len() as u64); - } - } - - MultiStarkProvingKey { - per_air: pk_per_air, - max_constraint_degree: global_max_constraint_degree, - } - } -} - -impl AirKeygenBuilder { - fn new(pcs: &SC::Pcs, rap_phase_seq_kind: RapPhaseSeqKind, air: Arc>) -> Self { - let prep_keygen_data = compute_prep_data_for_air(pcs, air.as_ref()); - AirKeygenBuilder { - air, - rap_phase_seq_kind, - prep_keygen_data, - } - } - - fn max_constraint_degree(&self) -> usize { - self.get_symbolic_builder(None) - .constraints() - .max_constraint_degree() - } - - fn generate_pk(self, rap_phase_seq_pk: RapPhaseSeqProvingKey) -> StarkProvingKey { - let air_name = self.air.name(); - - let interaction_chunk_size = rap_phase_seq_pk.interaction_chunk_size(); - let symbolic_builder = self.get_symbolic_builder(Some(interaction_chunk_size)); - let params = symbolic_builder.params(); - let symbolic_constraints = symbolic_builder.constraints(); - let log_quotient_degree = symbolic_constraints.get_log_quotient_degree(); - let quotient_degree = 1 << log_quotient_degree; - - let Self { - prep_keygen_data: - PrepKeygenData { - verifier_data: prep_verifier_data, - prover_data: prep_prover_data, - }, - .. - } = self; - - let vk = StarkVerifyingKey { - preprocessed_data: prep_verifier_data, - params, - symbolic_constraints, - quotient_degree, - rap_phase_seq_kind: self.rap_phase_seq_kind, - }; - StarkProvingKey { - air_name, - vk, - preprocessed_data: prep_prover_data, - rap_phase_seq_pk, - } - } - - fn get_symbolic_builder( - &self, - interaction_chunk_size: Option, - ) -> SymbolicRapBuilder> { - let width = TraceWidth { - preprocessed: self.prep_keygen_data.width(), - cached_mains: self.air.cached_main_widths(), - common_main: self.air.common_main_width(), - after_challenge: vec![], - }; - get_symbolic_builder( - self.air.as_ref(), - &width, - &[], - &[], - SC::RapPhaseSeq::ID, - interaction_chunk_size.unwrap_or(1), - ) - } -} - -pub(super) struct PrepKeygenData { - pub verifier_data: Option>, - pub prover_data: Option>, -} - -impl PrepKeygenData { - pub fn width(&self) -> Option { - self.prover_data.as_ref().map(|d| d.trace.width()) - } -} - -fn compute_prep_data_for_air( - pcs: &SC::Pcs, - air: &dyn AnyRap, -) -> PrepKeygenData { - let preprocessed_trace = air.preprocessed_trace(); - let vpdata_opt = preprocessed_trace.map(|trace| { - let trace_committer = TraceCommitter::::new(pcs); - let data = trace_committer.commit(vec![trace.clone()]); - let vdata = VerifierSinglePreprocessedData { - commit: data.commit, - }; - let pdata = ProverOnlySinglePreprocessedData { - trace, - data: data.data, - }; - (vdata, pdata) - }); - if let Some((vdata, pdata)) = vpdata_opt { - PrepKeygenData { - prover_data: Some(pdata), - verifier_data: Some(vdata), - } - } else { - PrepKeygenData { - prover_data: None, - verifier_data: None, - } - } -} diff --git a/crates/stark-backend/src/keygen/types.rs b/crates/stark-backend/src/keygen/types.rs deleted file mode 100644 index f0bba742c7..0000000000 --- a/crates/stark-backend/src/keygen/types.rs +++ /dev/null @@ -1,171 +0,0 @@ -// Keygen V2 API for STARK backend -// Changes: -// - All AIRs can be optional -use std::sync::Arc; - -use derivative::Derivative; -use p3_matrix::dense::RowMajorMatrix; -use serde::{Deserialize, Serialize}; - -use crate::{ - air_builders::symbolic::SymbolicConstraints, - config::{Com, PcsProverData, RapPhaseSeqProvingKey, StarkGenericConfig, Val}, - interaction::RapPhaseSeqKind, -}; - -/// Widths of different parts of trace matrix -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct TraceWidth { - pub preprocessed: Option, - pub cached_mains: Vec, - pub common_main: usize, - /// Width counted by extension field elements, _not_ base field elements - pub after_challenge: Vec, -} - -impl TraceWidth { - /// Returns the widths of all main traces, including the common main trace if it exists. - pub fn main_widths(&self) -> Vec { - let mut ret = self.cached_mains.clone(); - if self.common_main != 0 { - ret.push(self.common_main); - } - ret - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct StarkVerifyingParams { - /// Trace sub-matrix widths - pub width: TraceWidth, - /// Number of public values for this STARK only - pub num_public_values: usize, - /// Number of values to expose to verifier in each trace challenge phase - pub num_exposed_values_after_challenge: Vec, - /// For only this RAP, how many challenges are needed in each trace challenge phase - pub num_challenges_to_sample: Vec, -} - -/// Verifying key for a single STARK (corresponding to single AIR matrix) -#[derive(Derivative, Serialize, Deserialize)] -#[derivative(Clone(bound = "Com: Clone"))] -#[serde(bound( - serialize = "Com: Serialize", - deserialize = "Com: Deserialize<'de>" -))] -pub struct StarkVerifyingKey { - /// Preprocessed trace data, if any - pub preprocessed_data: Option>, - /// Parameters of the STARK - pub params: StarkVerifyingParams, - /// Symbolic constraints of the AIR in all challenge phases. This is - /// a serialization of the constraints in the AIR. - pub symbolic_constraints: SymbolicConstraints>, - /// The factor to multiple the trace degree by to get the degree of the quotient polynomial. Determined from the max constraint degree of the AIR constraints. - /// This is equivalently the number of chunks the quotient polynomial is split into. - pub quotient_degree: usize, - pub rap_phase_seq_kind: RapPhaseSeqKind, -} - -/// Common verifying key for multiple AIRs. -/// -/// This struct contains the necessary data for the verifier to verify proofs generated for -/// multiple AIRs using a single verifying key. -#[derive(Derivative, Serialize, Deserialize)] -#[derivative(Clone(bound = "Com: Clone"))] -#[serde(bound( - serialize = "Com: Serialize", - deserialize = "Com: Deserialize<'de>" -))] -pub struct MultiStarkVerifyingKey { - pub per_air: Vec>, -} - -/// Proving key for a single STARK (corresponding to single AIR matrix) -#[derive(Serialize, Deserialize, Derivative)] -#[derivative(Clone(bound = "Com: Clone"))] -#[serde(bound( - serialize = "PcsProverData: Serialize", - deserialize = "PcsProverData: Deserialize<'de>" -))] -pub struct StarkProvingKey { - /// Type name of the AIR, for display purposes only - pub air_name: String, - /// Verifying key - pub vk: StarkVerifyingKey, - /// Prover only data for preprocessed trace - pub preprocessed_data: Option>, - pub rap_phase_seq_pk: RapPhaseSeqProvingKey, -} - -/// Common proving key for multiple AIRs. -/// -/// This struct contains the necessary data for the prover to generate proofs for multiple AIRs -/// using a single proving key. -#[derive(Serialize, Deserialize, Derivative)] -#[derivative(Clone(bound = "Com: Clone"))] -#[serde(bound( - serialize = "PcsProverData: Serialize", - deserialize = "PcsProverData: Deserialize<'de>" -))] -pub struct MultiStarkProvingKey { - pub per_air: Vec>, - /// Maximum degree of constraints (excluding logup constraints) across all AIRs - pub max_constraint_degree: usize, -} - -impl StarkVerifyingKey { - pub fn num_cached_mains(&self) -> usize { - self.params.width.cached_mains.len() - } - - pub fn has_common_main(&self) -> bool { - self.params.width.common_main != 0 - } - - pub fn has_interaction(&self) -> bool { - !self.symbolic_constraints.interactions.is_empty() - } -} - -impl MultiStarkProvingKey { - pub fn get_vk(&self) -> MultiStarkVerifyingKey { - MultiStarkVerifyingKey { - per_air: self.per_air.iter().map(|pk| pk.vk.clone()).collect(), - } - } -} -impl MultiStarkVerifyingKey { - pub fn num_challenges_per_phase(&self) -> Vec { - self.full_view().num_challenges_per_phase() - } -} - -/// Prover only data for preprocessed trace for a single AIR. -/// Currently assumes each AIR has it's own preprocessed commitment -#[derive(Serialize, Deserialize, Derivative)] -#[derivative(Clone(bound = "Com: Clone"))] -#[serde(bound( - serialize = "PcsProverData: Serialize", - deserialize = "PcsProverData: Deserialize<'de>" -))] -pub struct ProverOnlySinglePreprocessedData { - /// Preprocessed trace matrix. - pub trace: RowMajorMatrix>, - /// Prover data, such as a Merkle tree, for the trace commitment. - pub data: Arc>, -} - -/// Verifier data for preprocessed trace for a single AIR. -/// -/// Currently assumes each AIR has it's own preprocessed commitment -#[derive(Derivative, Serialize, Deserialize)] -#[derivative(Clone(bound = "Com: Clone"))] -#[serde(bound( - serialize = "Com: Serialize", - deserialize = "Com: Deserialize<'de>" -))] -pub struct VerifierSinglePreprocessedData { - /// Commitment to the preprocessed trace. - pub commit: Com, -} diff --git a/crates/stark-backend/src/keygen/view.rs b/crates/stark-backend/src/keygen/view.rs deleted file mode 100644 index ba92b9f9cb..0000000000 --- a/crates/stark-backend/src/keygen/view.rs +++ /dev/null @@ -1,92 +0,0 @@ -use itertools::Itertools; - -use crate::{ - config::{Com, StarkGenericConfig}, - keygen::types::{ - MultiStarkProvingKey, MultiStarkVerifyingKey, StarkProvingKey, StarkVerifyingKey, - }, -}; - -pub(crate) struct MultiStarkVerifyingKeyView<'a, SC: StarkGenericConfig> { - pub per_air: Vec<&'a StarkVerifyingKey>, -} - -pub(crate) struct MultiStarkProvingKeyView<'a, SC: StarkGenericConfig> { - pub air_ids: Vec, - pub per_air: Vec<&'a StarkProvingKey>, -} - -impl MultiStarkVerifyingKey { - /// Returns a view with all airs. - pub(crate) fn full_view(&self) -> MultiStarkVerifyingKeyView { - self.view(&(0..self.per_air.len()).collect_vec()) - } - pub(crate) fn view(&self, air_ids: &[usize]) -> MultiStarkVerifyingKeyView { - MultiStarkVerifyingKeyView { - per_air: air_ids.iter().map(|&id| &self.per_air[id]).collect(), - } - } -} -impl MultiStarkProvingKey { - pub(crate) fn view(&self, air_ids: Vec) -> MultiStarkProvingKeyView { - let per_air = air_ids.iter().map(|&id| &self.per_air[id]).collect(); - MultiStarkProvingKeyView { air_ids, per_air } - } -} - -impl MultiStarkVerifyingKeyView<'_, SC> { - /// Returns the preprocessed commit of each AIR. If the AIR does not have a preprocessed trace, returns None. - pub fn preprocessed_commits(&self) -> Vec>> { - self.per_air - .iter() - .map(|vk| { - vk.preprocessed_data - .as_ref() - .map(|data| data.commit.clone()) - }) - .collect() - } - - /// Returns all non-empty preprocessed commits. - pub fn flattened_preprocessed_commits(&self) -> Vec> { - self.preprocessed_commits().into_iter().flatten().collect() - } - - pub fn num_phases(&self) -> usize { - self.per_air - .iter() - .map(|vk| { - // Consistency check - let num = vk.params.width.after_challenge.len(); - assert_eq!(num, vk.params.num_challenges_to_sample.len()); - assert_eq!(num, vk.params.num_exposed_values_after_challenge.len()); - num - }) - .max() - .unwrap_or(0) - } - - pub fn num_challenges_per_phase(&self) -> Vec { - let num_phases = self.num_phases(); - (0..num_phases) - .map(|phase_idx| self.num_challenges_in_phase(phase_idx)) - .collect() - } - - pub fn num_challenges_in_phase(&self, phase_idx: usize) -> usize { - self.per_air - .iter() - .flat_map(|vk| vk.params.num_challenges_to_sample.get(phase_idx)) - .copied() - .max() - .unwrap_or_else(|| panic!("No challenges used in challenge phase {phase_idx}")) - } -} - -impl MultiStarkProvingKeyView<'_, SC> { - pub fn vk_view(&self) -> MultiStarkVerifyingKeyView { - MultiStarkVerifyingKeyView { - per_air: self.per_air.iter().map(|pk| &pk.vk).collect(), - } - } -} diff --git a/crates/stark-backend/src/lib.rs b/crates/stark-backend/src/lib.rs deleted file mode 100644 index eefb83a979..0000000000 --- a/crates/stark-backend/src/lib.rs +++ /dev/null @@ -1,53 +0,0 @@ -//! Backend for proving and verifying mixed-matrix STARKs with univariate polynomial commitment scheme. - -// Re-export all Plonky3 crates -pub use p3_air; -pub use p3_challenger; -pub use p3_commit; -pub use p3_field; -pub use p3_matrix; -pub use p3_maybe_rayon; -pub use p3_util; - -/// AIR builders for prover and verifier, including support for cross-matrix permutation arguments. -pub mod air_builders; -/// Trait for stateful chip that owns trace generation -mod chip; -/// API trait for circuit prover/verifier. -pub mod circuit_api; -/// Types for tracking matrix in system with multiple commitments, each to multiple matrices. -pub mod commit; -/// Helper types associated to generic STARK config. -pub mod config; -/// Trait for STARK backend engine proving keygen, proviing, verifying API functions. -pub mod engine; -/// GKR batch prover for Grand Product and LogUp lookup arguments. -pub mod gkr; -/// Log-up permutation argument implementation as RAP. -pub mod interaction; -/// Proving and verifying key generation -pub mod keygen; -/// Polynomials -pub mod poly; -/// Prover implementation for partitioned multi-matrix AIRs. -pub mod prover; -/// Trait for RAP (Randomized AIR with Preprocessing) -pub mod rap; -/// Sum-check protocol -pub mod sumcheck; -/// Utility functions -pub mod utils; -/// Verifier implementation -pub mod verifier; - -pub use chip::{Chip, ChipUsageGetter}; - -// Use jemalloc as global allocator for performance -#[cfg(all(feature = "jemalloc", unix, not(test)))] -#[global_allocator] -static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc; - -// Use mimalloc as global allocator -#[cfg(all(feature = "mimalloc", not(test)))] -#[global_allocator] -static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; diff --git a/crates/stark-backend/src/poly/mod.rs b/crates/stark-backend/src/poly/mod.rs deleted file mode 100644 index 8ebccebf46..0000000000 --- a/crates/stark-backend/src/poly/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod multi; -pub mod uni; diff --git a/crates/stark-backend/src/poly/multi.rs b/crates/stark-backend/src/poly/multi.rs deleted file mode 100644 index b51d838b30..0000000000 --- a/crates/stark-backend/src/poly/multi.rs +++ /dev/null @@ -1,266 +0,0 @@ -//! Copied from starkware-libs/stwo under Apache-2.0 license. -use std::{ - iter::zip, - ops::{Deref, DerefMut}, -}; - -use p3_field::{ExtensionField, Field}; - -use super::uni::UnivariatePolynomial; - -/// Represents a multivariate polynomial `g(x_1, ..., x_n)`. -pub trait MultivariatePolyOracle { - /// For an n-variate polynomial, returns n. - fn arity(&self) -> usize; - - /// Returns the sum of `g(x_1, x_2, ..., x_n)` over all `(x_2, ..., x_n)` in `{0, 1}^(n-1)` as a polynomial in `x_1`. - fn marginalize_first(&self, claim: F) -> UnivariatePolynomial; - - /// Returns the multivariate polynomial `h(x_2, ..., x_n) = g(alpha, x_2, ..., x_n)`. - fn partial_evaluation(self, alpha: F) -> Self; -} - -/// Multilinear extension of the function defined on the boolean hypercube. -/// -/// The evaluations are stored in lexicographic order. -#[derive(Debug, Clone)] -pub struct Mle { - evals: Vec, -} - -impl Mle { - /// Creates a [`Mle`] from evaluations of a multilinear polynomial on the boolean hypercube. - /// - /// # Panics - /// - /// Panics if the number of evaluations is not a power of two. - pub fn new(evals: Vec) -> Self { - assert!(evals.len().is_power_of_two()); - Self { evals } - } - - pub fn into_evals(self) -> Vec { - self.evals - } -} - -impl MultivariatePolyOracle for Mle { - fn arity(&self) -> usize { - self.evals.len().ilog2() as usize - } - - fn marginalize_first(&self, claim: F) -> UnivariatePolynomial { - let x0 = F::ZERO; - let x1 = F::ONE; - - let y0 = self[0..self.len() / 2] - .iter() - .fold(F::ZERO, |acc, x| acc + *x); - let y1 = claim - y0; - - UnivariatePolynomial::from_interpolation(&[(x0, y0), (x1, y1)]) - } - - fn partial_evaluation(self, alpha: F) -> Self { - let midpoint = self.len() / 2; - let (lhs_evals, rhs_evals) = self.split_at(midpoint); - - let res = zip(lhs_evals, rhs_evals) - .map(|(&lhs_eval, &rhs_eval)| alpha * (rhs_eval - lhs_eval) + lhs_eval) - .collect(); - - Mle::new(res) - } -} - -impl Deref for Mle { - type Target = [F]; - - fn deref(&self) -> &Self::Target { - &self.evals - } -} - -impl DerefMut for Mle { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.evals - } -} - -/// Evaluates the boolean Lagrange basis polynomial `eq(x, y)`. -/// -/// Formally, the boolean Lagrange basis polynomial is defined as: -/// ```text -/// eq(x_1, \dots, x_n, y_1, \dots, y_n) = \prod_{i=1}^n (x_i * y_i + (1 - x_i) * (1 - y_i)). -/// ``` -/// For boolean vectors `x` and `y`, the function returns `1` if `x` equals `y` and `0` otherwise. -/// -/// # Panics -/// - Panics if `x` and `y` have different lengths. -pub fn hypercube_eq(x: &[F], y: &[F]) -> F { - assert_eq!(x.len(), y.len()); - zip(x, y) - .map(|(&xi, &yi)| xi * yi + (xi - F::ONE) * (yi - F::ONE)) - .product() -} - -/// Computes `hypercube_eq(0, assignment) * eval0 + hypercube_eq(1, assignment) * eval1`. -pub fn fold_mle_evals(assignment: EF, eval0: F, eval1: F) -> EF -where - F: Field, - EF: ExtensionField, -{ - assignment * (eval1 - eval0) + eval0 -} - -#[cfg(test)] -mod test { - use p3_baby_bear::BabyBear; - use p3_field::{AbstractField, Field}; - - use super::*; - - impl Mle { - /// Evaluates the multilinear polynomial at `point`. - pub(crate) fn eval(&self, point: &[F]) -> F { - pub fn eval_rec(mle_evals: &[F], p: &[F]) -> F { - match p { - [] => mle_evals[0], - &[p_i, ref p @ ..] => { - let (lhs, rhs) = mle_evals.split_at(mle_evals.len() / 2); - let lhs_eval = eval_rec(lhs, p); - let rhs_eval = eval_rec(rhs, p); - // Equivalent to `eq(0, p_i) * lhs_eval + eq(1, p_i) * rhs_eval`. - p_i * (rhs_eval - lhs_eval) + lhs_eval - } - } - } - - let mle_evals = self.clone().into_evals(); - eval_rec(&mle_evals, point) - } - } - - #[test] - fn test_mle_evaluation() { - let evals = vec![ - BabyBear::from_canonical_u32(1), - BabyBear::from_canonical_u32(2), - BabyBear::from_canonical_u32(3), - BabyBear::from_canonical_u32(4), - ]; - // (1 - x_1)(1 - x_2) + 2 (1 - x_1) x_2 + 3 x_1 (1 - x_2) + 4 x_1 x_2 - let mle = Mle::new(evals); - let point = vec![ - BabyBear::from_canonical_u32(0), - BabyBear::from_canonical_u32(0), - ]; - assert_eq!(mle.eval(&point), BabyBear::from_canonical_u32(1)); - - let point = vec![ - BabyBear::from_canonical_u32(0), - BabyBear::from_canonical_u32(1), - ]; - assert_eq!(mle.eval(&point), BabyBear::from_canonical_u32(2)); - - let point = vec![ - BabyBear::from_canonical_u32(1), - BabyBear::from_canonical_u32(0), - ]; - assert_eq!(mle.eval(&point), BabyBear::from_canonical_u32(3)); - - let point = vec![ - BabyBear::from_canonical_u32(1), - BabyBear::from_canonical_u32(1), - ]; - assert_eq!(mle.eval(&point), BabyBear::from_canonical_u32(4)); - - // Out of domain evaluation - let point = vec![ - BabyBear::from_canonical_u32(2), - BabyBear::from_canonical_u32(2), - ]; - assert_eq!(mle.eval(&point), BabyBear::from_canonical_u32(7)); - } - - #[test] - fn test_mle_marginalize_first() { - let evals = vec![ - BabyBear::from_canonical_u32(1), - BabyBear::from_canonical_u32(2), - BabyBear::from_canonical_u32(3), - BabyBear::from_canonical_u32(4), - ]; - let sum = BabyBear::from_canonical_u32(10); - - // (1 - x_1)(1 - x_2) + 2 (1 - x_1) x_2 + 3 x_1 (1 - x_2) + 4 x_1 x_2 - let mle = Mle::new(evals); - // (1 - x_1) + 2 (1 - x_1) + 3 x_1 + 4 x_1 - let poly = mle.marginalize_first(sum); - - assert_eq!( - poly.evaluate(BabyBear::ZERO), - BabyBear::from_canonical_u32(3) - ); - assert_eq!( - poly.evaluate(BabyBear::ONE), - BabyBear::from_canonical_u32(7) - ); - } - - #[test] - fn test_mle_partial_evaluation() { - let evals = vec![ - BabyBear::from_canonical_u32(1), - BabyBear::from_canonical_u32(2), - BabyBear::from_canonical_u32(3), - BabyBear::from_canonical_u32(4), - ]; - // (1 - x_1)(1 - x_2) + 2 (1 - x_1) x_2 + 3 x_1 (1 - x_2) + 4 x_1 x_2 - let mle = Mle::new(evals); - let alpha = BabyBear::from_canonical_u32(2); - // -(1 - x_2) - 2 x_2 + 6 (1 - x_2) + 8 x_2 = x_2 + 5 - let partial_eval = mle.partial_evaluation(alpha); - - assert_eq!( - partial_eval.eval(&[BabyBear::ZERO]), - BabyBear::from_canonical_u32(5) - ); - assert_eq!( - partial_eval.eval(&[BabyBear::ONE]), - BabyBear::from_canonical_u32(6) - ); - } - - #[test] - fn eq_identical_hypercube_points_returns_one() { - let zero = BabyBear::ZERO; - let one = BabyBear::ONE; - let a = &[one, zero, one]; - - let eq_eval = hypercube_eq(a, a); - - assert_eq!(eq_eval, one); - } - - #[test] - fn eq_different_hypercube_points_returns_zero() { - let zero = BabyBear::ZERO; - let one = BabyBear::ONE; - let a = &[one, zero, one]; - let b = &[one, zero, zero]; - - let eq_eval = hypercube_eq(a, b); - - assert_eq!(eq_eval, zero); - } - - #[test] - #[should_panic] - fn eq_different_size_points() { - let zero = BabyBear::ZERO; - let one = BabyBear::ONE; - - hypercube_eq(&[zero, one], &[zero]); - } -} diff --git a/crates/stark-backend/src/poly/uni.rs b/crates/stark-backend/src/poly/uni.rs deleted file mode 100644 index 6fa2504ceb..0000000000 --- a/crates/stark-backend/src/poly/uni.rs +++ /dev/null @@ -1,330 +0,0 @@ -//! Copied from starkware-libs/stwo under Apache-2.0 license. -use std::{ - iter::Sum, - ops::{Add, Deref, Mul, Neg, Sub}, -}; - -use p3_field::Field; - -#[derive(Debug, Clone)] -pub struct UnivariatePolynomial { - coeffs: Vec, -} - -impl UnivariatePolynomial { - /// Creates a new univariate polynomial from a vector of coefficients. - pub fn from_coeffs(coeffs: Vec) -> Self { - let mut polynomial = Self { coeffs }; - polynomial.trim_leading_zeroes(); - polynomial - } - - pub fn zero() -> Self { - Self { coeffs: vec![] } - } - - fn one() -> Self { - Self { - coeffs: vec![F::ONE], - } - } - - fn is_zero(&self) -> bool { - self.coeffs.iter().all(F::is_zero) - } - - pub fn evaluate(&self, x: F) -> F { - self.coeffs - .iter() - .rfold(F::ZERO, |acc, coeff| acc * x + *coeff) - } - - pub fn degree(&self) -> usize { - self.coeffs.iter().rposition(|&v| !v.is_zero()).unwrap_or(0) - } - - /// Interpolates `points` via Lagrange interpolation. - /// - /// # Panics - /// - /// Panics if `points` contains duplicate x-coordinates. - pub fn from_interpolation(points: &[(F, F)]) -> Self { - let mut coeffs = Self::zero(); - - for (i, &(xi, yi)) in points.iter().enumerate() { - let mut num = UnivariatePolynomial::one(); - let mut denom = F::ONE; - - for (j, &(xj, _)) in points.iter().enumerate() { - if i != j { - num = num * (Self::identity() - xj.into()); - denom *= xi - xj; - } - } - - let selector = num * denom.inverse(); - coeffs = coeffs + selector * yi; - } - - coeffs.trim_leading_zeroes(); - coeffs - } - - fn identity() -> Self { - Self { - coeffs: vec![F::ZERO, F::ONE], - } - } - - fn trim_leading_zeroes(&mut self) { - if let Some(non_zero_idx) = self.coeffs.iter().rposition(|&coeff| !coeff.is_zero()) { - self.coeffs.truncate(non_zero_idx + 1); - } else { - self.coeffs.clear(); - } - } - - pub fn into_coeffs(self) -> Vec { - self.coeffs - } -} - -impl Default for UnivariatePolynomial { - fn default() -> Self { - Self::zero() - } -} - -impl From for UnivariatePolynomial { - fn from(value: F) -> Self { - Self::from_coeffs(vec![value]) - } -} - -impl Mul for UnivariatePolynomial { - type Output = Self; - - fn mul(mut self, rhs: F) -> Self { - for coeff in &mut self.coeffs { - *coeff *= rhs; - } - self - } -} - -impl Mul for UnivariatePolynomial { - type Output = Self; - - fn mul(mut self, mut rhs: Self) -> Self { - if self.is_zero() || rhs.is_zero() { - return Self::zero(); - } - - self.trim_leading_zeroes(); - rhs.trim_leading_zeroes(); - - let mut res = vec![F::ZERO; self.coeffs.len() + rhs.coeffs.len() - 1]; - - for (i, coeff_a) in self.coeffs.into_iter().enumerate() { - for (j, coeff_b) in rhs.coeffs.iter().enumerate() { - res[i + j] += coeff_a * *coeff_b; - } - } - - Self::from_coeffs(res) - } -} - -impl Add for UnivariatePolynomial { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - let n = self.coeffs.len().max(rhs.coeffs.len()); - let mut coeffs = Vec::with_capacity(n); - - for i in 0..n { - let a = self.coeffs.get(i).copied().unwrap_or(F::ZERO); - let b = rhs.coeffs.get(i).copied().unwrap_or(F::ZERO); - coeffs.push(a + b); - } - - Self { coeffs } - } -} - -impl Sub for UnivariatePolynomial { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self + (-rhs) - } -} - -impl Neg for UnivariatePolynomial { - type Output = Self; - - fn neg(self) -> Self { - Self { - coeffs: self.coeffs.into_iter().map(|v| -v).collect(), - } - } -} - -impl Deref for UnivariatePolynomial { - type Target = [F]; - - fn deref(&self) -> &Self::Target { - &self.coeffs - } -} - -/// Evaluates a polynomial represented by coefficients in a slice at a given point `x`. -pub fn evaluate_on_slice(coeffs: &[F], x: F) -> F { - coeffs.iter().rfold(F::ZERO, |acc, &coeff| acc * x + coeff) -} - -/// Returns `v_0 + alpha * v_1 + ... + alpha^(n-1) * v_{n-1}`. -pub fn random_linear_combination(v: &[F], alpha: F) -> F { - evaluate_on_slice(v, alpha) -} - -/// Projective fraction. -#[derive(Debug, Clone, Copy)] -pub struct Fraction { - pub numerator: T, - pub denominator: T, -} - -impl Fraction { - pub const fn new(numerator: T, denominator: T) -> Self { - Self { - numerator, - denominator, - } - } -} - -impl + Mul> Add for Fraction { - type Output = Fraction; - - fn add(self, rhs: Self) -> Fraction { - Fraction { - numerator: rhs.denominator.clone() * self.numerator.clone() - + self.denominator.clone() * rhs.numerator.clone(), - denominator: self.denominator * rhs.denominator, - } - } -} - -impl Fraction { - const ZERO: Self = Self::new(F::ZERO, F::ONE); - - pub fn is_zero(&self) -> bool { - self.numerator.is_zero() && !self.denominator.is_zero() - } -} - -impl Sum for Fraction { - fn sum>(iter: I) -> Self { - iter.fold(Self::ZERO, |a, b| a + b) - } -} - -#[cfg(test)] -mod tests { - use std::iter::zip; - - use itertools::Itertools; - use p3_baby_bear::BabyBear; - use p3_field::AbstractField; - - use super::*; - - macro_rules! bbvec { - [$($x:expr),*] => { - vec![$(BabyBear::from_canonical_u32($x)),*] - } - } - - #[test] - fn test_interpolate() { - let xs = bbvec![5, 1, 3, 9]; - let ys = bbvec![1, 2, 3, 4]; - let points = zip(&xs, &ys).map(|(x, y)| (*x, *y)).collect_vec(); - - let poly = UnivariatePolynomial::from_interpolation(&points); - - for (x, y) in zip(xs, ys) { - assert_eq!(poly.evaluate(x), y, "mismatch for x={x}"); - } - } - - #[test] - fn test_eval() { - let coeffs = bbvec![9, 2, 3]; - let x = BabyBear::from_canonical_u32(7); - - let eval = UnivariatePolynomial::from_coeffs(coeffs.clone()).evaluate(x); - - assert_eq!(eval, coeffs[0] + coeffs[1] * x + coeffs[2] * x.square()); - } - - #[test] - fn test_fractional_addition() { - let a = Fraction::new(BabyBear::ONE, BabyBear::from_canonical_u32(3)); - let b = Fraction::new(BabyBear::TWO, BabyBear::from_canonical_u32(6)); - - let Fraction { - numerator, - denominator, - } = a + b; - - assert_eq!( - numerator / denominator, - BabyBear::TWO / BabyBear::from_canonical_u32(3) - ); - } - - #[test] - fn test_degree() { - // Case 1: Zero polynomial (expect degree 0 for a polynomial with no terms) - let poly_zero = UnivariatePolynomial::::from_coeffs(vec![]); - assert_eq!( - poly_zero.degree(), - 0, - "Zero polynomial should have degree 0" - ); - - // Case 2: Polynomial with only a constant term (degree should be 0) - let poly_constant = UnivariatePolynomial::from_coeffs(bbvec![5]); - assert_eq!( - poly_constant.degree(), - 0, - "Constant polynomial should have degree 0" - ); - - // Case 3: Linear polynomial (degree 1, e.g., 3x + 5) - let poly_linear = UnivariatePolynomial::from_coeffs(bbvec![5, 3]); - assert_eq!( - poly_linear.degree(), - 1, - "Linear polynomial should have degree 1" - ); - - // Case 4: Quadratic polynomial with trailing zeros (degree should ignore trailing zeros) - let poly_quadratic = UnivariatePolynomial::from_coeffs(bbvec![2, 0, 4, 0, 0]); - assert_eq!( - poly_quadratic.degree(), - 2, - "Quadratic polynomial with trailing zeros should have degree 2" - ); - - // Case 5: High-degree polynomial without trailing zeros - let poly_high_degree = UnivariatePolynomial::from_coeffs(bbvec![1, 0, 0, 0, 5]); - assert_eq!( - poly_high_degree.degree(), - 4, - "Polynomial of degree 4 should have degree 4" - ); - } -} diff --git a/crates/stark-backend/src/prover/helper.rs b/crates/stark-backend/src/prover/helper.rs deleted file mode 100644 index 11ce5068e6..0000000000 --- a/crates/stark-backend/src/prover/helper.rs +++ /dev/null @@ -1,86 +0,0 @@ -use std::sync::Arc; - -use itertools::izip; -use p3_matrix::{dense::RowMajorMatrix, Matrix}; - -use crate::{ - config::{StarkGenericConfig, Val}, - prover::types::{AirProofInput, AirProofRawInput}, - rap::AnyRap, -}; - -/// Test helper trait for AirProofInput -/// Don't use this trait in production code -pub trait AirProofInputTestHelper { - fn cached_traces_no_pis( - air: Arc>, - cached_traces: Vec>>, - common_trace: RowMajorMatrix>, - ) -> Self; -} - -impl AirProofInputTestHelper for AirProofInput { - fn cached_traces_no_pis( - air: Arc>, - cached_traces: Vec>>, - common_trace: RowMajorMatrix>, - ) -> Self { - Self { - air, - cached_mains_pdata: vec![], - raw: AirProofRawInput { - cached_mains: cached_traces.into_iter().map(Arc::new).collect(), - common_main: Some(common_trace), - public_values: vec![], - }, - } - } -} -impl AirProofInput { - pub fn simple( - air: Arc>, - trace: RowMajorMatrix>, - public_values: Vec>, - ) -> Self { - Self { - air, - cached_mains_pdata: vec![], - raw: AirProofRawInput { - cached_mains: vec![], - common_main: Some(trace), - public_values, - }, - } - } - pub fn simple_no_pis(air: Arc>, trace: RowMajorMatrix>) -> Self { - Self::simple(air, trace, vec![]) - } - - pub fn multiple_simple( - airs: Vec>>, - traces: Vec>>, - public_values: Vec>>, - ) -> Vec { - izip!(airs, traces, public_values) - .map(|(air, trace, pis)| AirProofInput::simple(air, trace, pis)) - .collect() - } - - pub fn multiple_simple_no_pis( - airs: Vec>>, - traces: Vec>>, - ) -> Vec { - izip!(airs, traces) - .map(|(air, trace)| AirProofInput::simple_no_pis(air, trace)) - .collect() - } - /// Return the height of the main trace. - pub fn main_trace_height(&self) -> usize { - if self.raw.cached_mains.is_empty() { - // An AIR must have a main trace. - self.raw.common_main.as_ref().unwrap().height() - } else { - self.raw.cached_mains[0].height() - } - } -} diff --git a/crates/stark-backend/src/prover/metrics.rs b/crates/stark-backend/src/prover/metrics.rs deleted file mode 100644 index 5d45847f88..0000000000 --- a/crates/stark-backend/src/prover/metrics.rs +++ /dev/null @@ -1,154 +0,0 @@ -use std::fmt::Display; - -use itertools::Itertools; -use p3_field::AbstractExtensionField; -use serde::{Deserialize, Serialize}; - -use crate::{ - config::{StarkGenericConfig, Val}, - keygen::types::{StarkProvingKey, TraceWidth}, -}; - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct TraceMetrics { - pub per_air: Vec, - /// Total base field cells from all traces, excludes preprocessed. - pub total_cells: usize, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct SingleTraceMetrics { - pub air_name: String, - pub height: usize, - /// The after challenge width is adjusted to be in terms of **base field** elements. - pub width: TraceWidth, - pub cells: TraceCells, - /// Omitting preprocessed trace, the total base field cells from main and after challenge - /// traces. - pub total_cells: usize, -} - -/// Trace cells, counted in terms of number of **base field** elements. -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct TraceCells { - pub preprocessed: Option, - pub cached_mains: Vec, - pub common_main: usize, - pub after_challenge: Vec, -} - -impl Display for TraceMetrics { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - writeln!( - f, - "Total Cells: {} (excluding preprocessed)", - format_number_with_underscores(self.total_cells) - )?; - for trace_metrics in &self.per_air { - writeln!(f, "{}", trace_metrics)?; - } - Ok(()) - } -} - -impl Display for SingleTraceMetrics { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "{:<20} | Rows = {:<10} | Cells = {:<11} | Prep Cols = {:<5} | Main Cols = {:<5} | Perm Cols = {:<5}", - self.air_name, format_number_with_underscores(self.height), format_number_with_underscores(self.total_cells), self.width.preprocessed.unwrap_or(0), - format!("{:?}", self.width.main_widths()), - format!("{:?}",self.width.after_challenge), - )?; - Ok(()) - } -} - -/// heights are the trace heights for each air -pub fn trace_metrics( - pk: &[&StarkProvingKey], - heights: &[usize], -) -> TraceMetrics { - let per_air: Vec<_> = pk - .iter() - .zip_eq(heights) - .map(|(pk, &height)| { - let air_name = pk.air_name.clone(); - let mut width = pk.vk.params.width.clone(); - let ext_degree = >>::D; - for w in &mut width.after_challenge { - *w *= ext_degree; - } - let cells = TraceCells { - preprocessed: width.preprocessed.map(|w| w * height), - cached_mains: width.cached_mains.iter().map(|w| w * height).collect(), - common_main: width.common_main * height, - after_challenge: width.after_challenge.iter().map(|w| w * height).collect(), - }; - let total_cells = cells - .cached_mains - .iter() - .chain([&cells.common_main]) - .chain(cells.after_challenge.iter()) - .sum::(); - SingleTraceMetrics { - air_name, - height, - width, - cells, - total_cells, - } - }) - .collect(); - let total_cells = per_air.iter().map(|m| m.total_cells).sum(); - TraceMetrics { - per_air, - total_cells, - } -} - -pub fn format_number_with_underscores(n: usize) -> String { - let num_str = n.to_string(); - let mut result = String::new(); - - // Start adding characters from the end of num_str - for (i, c) in num_str.chars().rev().enumerate() { - if i > 0 && i % 3 == 0 { - result.push('_'); - } - result.push(c); - } - - // Reverse the result to get the correct order - result.chars().rev().collect() -} - -#[cfg(feature = "bench-metrics")] -mod emit { - use metrics::counter; - - use super::{SingleTraceMetrics, TraceMetrics}; - - impl TraceMetrics { - pub fn emit(&self) { - for trace_metrics in &self.per_air { - trace_metrics.emit(); - } - counter!("total_cells").absolute(self.total_cells as u64); - } - } - - impl SingleTraceMetrics { - pub fn emit(&self) { - let labels = [("air_name", self.air_name.clone())]; - counter!("rows", &labels).absolute(self.height as u64); - counter!("cells", &labels).absolute(self.total_cells as u64); - counter!("prep_cols", &labels).absolute(self.width.preprocessed.unwrap_or(0) as u64); - counter!("main_cols", &labels).absolute( - (self.width.cached_mains.iter().sum::() + self.width.common_main) as u64, - ); - counter!("perm_cols", &labels) - .absolute(self.width.after_challenge.iter().sum::() as u64); - } - } -} diff --git a/crates/stark-backend/src/prover/mod.rs b/crates/stark-backend/src/prover/mod.rs deleted file mode 100644 index dbc138c144..0000000000 --- a/crates/stark-backend/src/prover/mod.rs +++ /dev/null @@ -1,503 +0,0 @@ -use std::{ - iter, - sync::{Arc, Mutex}, -}; - -use itertools::{izip, multiunzip, Itertools}; -use p3_challenger::{CanObserve, FieldChallenger}; -use p3_commit::{Pcs, PolynomialSpace}; -use p3_field::AbstractField; -use p3_matrix::{ - dense::{RowMajorMatrix, RowMajorMatrixView}, - Matrix, -}; -use p3_util::log2_strict_usize; -use tracing::instrument; - -use crate::{ - air_builders::debug::check_constraints::{check_constraints, check_logup}, - config::{Domain, StarkGenericConfig, Val}, - interaction::RapPhaseSeqKind, - keygen::{types::MultiStarkProvingKey, view::MultiStarkProvingKeyView}, - prover::{ - metrics::trace_metrics, - opener::OpeningProver, - quotient::ProverQuotientData, - trace::{commit_quotient_traces, ProverTraceData, TraceCommitter}, - types::{AirProofData, Commitments, Proof, ProofInput}, - }, - rap::AnyRap, -}; - -pub mod helper; -/// Metrics about trace and other statistics related to prover performance -pub mod metrics; -/// Polynomial opening proofs -pub mod opener; -/// Computation of DEEP quotient polynomial and commitment -pub mod quotient; -/// Trace commitment computation -mod trace; -pub mod types; - -pub use trace::PairTraceView; - -use crate::{config::RapPhaseSeqPartialProof, interaction::RapPhaseSeq}; - -thread_local! { - pub static USE_DEBUG_BUILDER: Arc> = Arc::new(Mutex::new(true)); -} - -/// Proves multiple chips with interactions together. -/// This prover implementation is specialized for Interactive AIRs. -pub struct MultiTraceStarkProver<'c, SC: StarkGenericConfig> { - pub config: &'c SC, -} - -impl<'c, SC: StarkGenericConfig> MultiTraceStarkProver<'c, SC> { - pub fn new(config: &'c SC) -> Self { - Self { config } - } - - pub fn pcs(&self) -> &SC::Pcs { - self.config.pcs() - } - - pub fn committer(&self) -> TraceCommitter { - TraceCommitter::new(self.pcs()) - } - - /// Specialized prove for InteractiveAirs. - /// Handles trace generation of the permutation traces. - /// Assumes the main traces have been generated and committed already. - /// - /// Public values: for each AIR, a separate list of public values. - /// The prover can support global public values that are shared among all AIRs, - /// but we currently split public values per-AIR for modularity. - #[instrument(name = "MultiTraceStarkProver::prove", level = "info", skip_all)] - pub fn prove<'a>( - &self, - challenger: &mut SC::Challenger, - mpk: &'a MultiStarkProvingKey, - proof_input: ProofInput, - ) -> Proof { - assert!(mpk.validate(&proof_input), "Invalid proof input"); - let pcs = self.config.pcs(); - let rap_phase_seq = self.config.rap_phase_seq(); - - let (air_ids, air_inputs): (Vec<_>, Vec<_>) = proof_input.per_air.into_iter().unzip(); - let ( - airs, - cached_mains_pdata_per_air, - cached_mains_per_air, - common_main_per_air, - pvs_per_air, - ): (Vec<_>, Vec<_>, Vec<_>, Vec<_>, Vec<_>) = - multiunzip(air_inputs.into_iter().map(|input| { - ( - input.air, - input.cached_mains_pdata, - input.raw.cached_mains, - input.raw.common_main, - input.raw.public_values, - ) - })); - assert_eq!(cached_mains_pdata_per_air.len(), cached_mains_per_air.len()); - - let num_air = air_ids.len(); - // Ignore unused AIRs. - let mpk = mpk.view(air_ids); - - // Challenger must observe public values - for pvs in &pvs_per_air { - challenger.observe_slice(pvs); - } - - let preprocessed_commits = mpk.vk_view().flattened_preprocessed_commits(); - challenger.observe_slice(&preprocessed_commits); - - // Commit all common main traces in a commitment. Traces inside are ordered by AIR id. - let (common_main_trace_views, common_main_prover_data) = { - let committer = TraceCommitter::::new(pcs); - let (trace_views, traces): (Vec<_>, Vec<_>) = common_main_per_air - .iter() - .filter_map(|cm: &Option>| cm.as_ref()) - .map(|m| (m.as_view(), m.clone())) - .unzip(); - - (trace_views, committer.commit(traces)) - }; - - // Commitments order: - // - for each air: - // - for each cached main trace - // - 1 commitment - // - 1 commitment of all common main traces - let main_trace_commitments: Vec<_> = cached_mains_pdata_per_air - .iter() - .flatten() - .map(|pdata| &pdata.commit) - .chain(iter::once(&common_main_prover_data.commit)) - .cloned() - .collect(); - challenger.observe_slice(&main_trace_commitments); - - let mut common_main_idx = 0; - let mut degree_per_air = Vec::with_capacity(num_air); - let mut main_views_per_air = Vec::with_capacity(num_air); - for (pk, cached_mains) in mpk.per_air.iter().zip(&cached_mains_per_air) { - let mut main_views: Vec<_> = cached_mains.iter().map(|m| m.as_view()).collect(); - if pk.vk.has_common_main() { - main_views.push(common_main_trace_views[common_main_idx].as_view()); - common_main_idx += 1; - } - degree_per_air.push(main_views[0].height()); - main_views_per_air.push(main_views); - } - challenger.observe_slice( - °ree_per_air - .iter() - .map(|&d| Val::::from_canonical_usize(log2_strict_usize(d))) - .collect::>(), - ); - let domain_per_air: Vec<_> = degree_per_air - .iter() - .map(|°ree| pcs.natural_domain_for_degree(degree)) - .collect(); - - let preprocessed_trace_per_air = mpk - .per_air - .iter() - .map(|pk| pk.preprocessed_data.as_ref().map(|d| d.trace.as_view())) - .collect_vec(); - let trace_view_per_air = izip!( - preprocessed_trace_per_air.iter(), - main_views_per_air.iter(), - pvs_per_air.iter() - ) - .map(|(preprocessed, main, pvs)| PairTraceView { - preprocessed, - partitioned_main: main, - public_values: pvs, - }) - .collect_vec(); - - let (constraints_per_air, rap_pk_per_air): (Vec<_>, Vec<_>) = mpk - .per_air - .iter() - .map(|pk| (&pk.vk.symbolic_constraints, pk.rap_phase_seq_pk.clone())) - .unzip(); - - let (rap_phase_seq_proof, rap_phase_seq_data) = rap_phase_seq - .partially_prove( - challenger, - &rap_pk_per_air, - &constraints_per_air, - &trace_view_per_air, - ) - .map_or((None, None), |(p, d)| (Some(p), Some(d))); - - let (perm_trace_per_air, exposed_values_after_challenge, challenges) = - if let Some(phase_data) = rap_phase_seq_data { - assert_eq!(mpk.vk_view().num_phases(), 1); - assert_eq!( - mpk.vk_view().num_challenges_in_phase(0), - phase_data.challenges.len() - ); - ( - phase_data.after_challenge_trace_per_air, - phase_data - .exposed_values_per_air - .into_iter() - .map(|v| v.into_iter().collect_vec()) - .collect(), - vec![phase_data.challenges], - ) - } else { - assert_eq!(mpk.vk_view().num_phases(), 0); - (vec![None; num_air], vec![vec![]; num_air], vec![]) - }; - - #[cfg(debug_assertions)] - debug_constraints_and_interactions( - &airs, - &mpk, - &main_views_per_air, - &pvs_per_air, - &perm_trace_per_air, - &exposed_values_after_challenge, - &challenges, - SC::RapPhaseSeq::ID, - ); - - // Commit to permutation traces: this means only 1 challenge round right now - // One shared commit for all permutation traces - let perm_prover_data = tracing::info_span!("commit to permutation traces") - .in_scope(|| commit_perm_traces::(pcs, perm_trace_per_air, &domain_per_air)); - - // Challenger observes commitment if exists - if let Some(data) = &perm_prover_data { - challenger.observe(data.commit.clone()); - } - // Generate `alpha` challenge - let alpha: SC::Challenge = challenger.sample_ext_element(); - tracing::debug!("alpha: {alpha:?}"); - - let quotient_data = commit_quotient_traces( - pcs, - &mpk, - alpha, - &challenges, - airs, - &pvs_per_air, - domain_per_air.clone(), - &cached_mains_pdata_per_air, - &common_main_prover_data, - &perm_prover_data, - exposed_values_after_challenge.clone(), - ); - - let main_prover_data: Vec<_> = cached_mains_pdata_per_air - .into_iter() - .flatten() - .chain(iter::once(common_main_prover_data)) - .collect(); - prove_raps_with_committed_traces( - pcs, - challenger, - mpk, - &main_prover_data, - perm_prover_data, - exposed_values_after_challenge, - quotient_data, - domain_per_air, - pvs_per_air, - rap_phase_seq_proof, - ) - } -} - -/// Proves general RAPs after all traces have been committed. -/// Soundness depends on `challenger` having already observed -/// public values, exposed values after challenge, and all -/// trace commitments. -/// -/// - `challenges`: for each trace challenge phase, the challenges sampled -/// -/// ## Assumptions -/// - `raps, trace_views, public_values` have same length and same order -/// - per challenge round, shared commitment for -/// all trace matrices, with matrices in increasing order of air index -#[allow(clippy::too_many_arguments)] -#[instrument(level = "info", skip_all)] -fn prove_raps_with_committed_traces<'a, SC: StarkGenericConfig>( - pcs: &SC::Pcs, - challenger: &mut SC::Challenger, - mpk: MultiStarkProvingKeyView, - main_prover_data: &[ProverTraceData], - perm_prover_data: Option>, - exposed_values_after_challenge: Vec>>, - quotient_data: ProverQuotientData, - domain_per_air: Vec>, - public_values_per_air: Vec>>, - rap_phase_seq_proof: Option>, -) -> Proof { - // Observe quotient commitment - challenger.observe(quotient_data.commit.clone()); - - let after_challenge_commitments: Vec<_> = perm_prover_data - .iter() - .map(|data| data.commit.clone()) - .collect(); - // Collect the commitments - let commitments = Commitments { - main_trace: main_prover_data - .iter() - .map(|data| data.commit.clone()) - .collect(), - after_challenge: after_challenge_commitments, - quotient: quotient_data.commit.clone(), - }; - - // Draw `zeta` challenge - let zeta: SC::Challenge = challenger.sample_ext_element(); - tracing::debug!("zeta: {zeta:?}"); - - // Open all polynomials at random points using pcs - let opener = OpeningProver::new(pcs, zeta); - let preprocessed_data: Vec<_> = mpk - .per_air - .iter() - .zip_eq(&domain_per_air) - .flat_map(|(pk, domain)| { - pk.preprocessed_data - .as_ref() - .map(|prover_data| (prover_data.data.as_ref(), *domain)) - }) - .collect(); - - let mut main_prover_data_idx = 0; - let mut main_data = Vec::with_capacity(main_prover_data.len()); - let mut common_main_domains = Vec::with_capacity(mpk.per_air.len()); - for (air_id, pk) in mpk.per_air.iter().enumerate() { - for _ in 0..pk.vk.num_cached_mains() { - main_data.push(( - main_prover_data[main_prover_data_idx].data.as_ref(), - vec![domain_per_air[air_id]], - )); - main_prover_data_idx += 1; - } - if pk.vk.has_common_main() { - common_main_domains.push(domain_per_air[air_id]); - } - } - main_data.push(( - main_prover_data[main_prover_data_idx].data.as_ref(), - common_main_domains, - )); - - // ASSUMING: per challenge round, shared commitment for all trace matrices, with matrices in increasing order of air index - let after_challenge_data = if let Some(perm_prover_data) = &perm_prover_data { - let mut domains = Vec::new(); - for (air_id, pk) in mpk.per_air.iter().enumerate() { - if pk.vk.has_interaction() { - domains.push(domain_per_air[air_id]); - } - } - vec![(perm_prover_data.data.as_ref(), domains)] - } else { - vec![] - }; - - let quotient_degrees = mpk - .per_air - .iter() - .map(|pk| pk.vk.quotient_degree) - .collect_vec(); - let opening = opener.open( - challenger, - preprocessed_data, - main_data, - after_challenge_data, - "ient_data.data, - "ient_degrees, - ); - - let degrees = domain_per_air - .iter() - .map(|domain| domain.size()) - .collect_vec(); - - tracing::info!("{}", trace_metrics(&mpk.per_air, °rees)); - #[cfg(feature = "bench-metrics")] - trace_metrics(&mpk.per_air, °rees).emit(); - - Proof { - commitments, - opening, - per_air: izip!( - mpk.air_ids, - degrees, - exposed_values_after_challenge, - public_values_per_air - ) - .map( - |(air_id, degree, exposed_values, public_values)| AirProofData { - air_id, - degree, - public_values, - exposed_values_after_challenge: exposed_values, - }, - ) - .collect(), - rap_phase_seq_proof, - } -} - -fn commit_perm_traces( - pcs: &SC::Pcs, - perm_traces: Vec>>, - domain_per_air: &[Domain], -) -> Option> { - let flattened_traces_with_domains: Vec<_> = perm_traces - .into_iter() - .zip_eq(domain_per_air) - .flat_map(|(perm_trace, domain)| perm_trace.map(|trace| (*domain, trace.flatten_to_base()))) - .collect(); - // Only commit if there are permutation traces - if !flattened_traces_with_domains.is_empty() { - let (commit, data) = pcs.commit(flattened_traces_with_domains); - Some(ProverTraceData { - commit, - data: data.into(), - }) - } else { - None - } -} - -#[allow(dead_code)] -#[allow(clippy::too_many_arguments)] -fn debug_constraints_and_interactions( - raps: &[Arc>], - mpk: &MultiStarkProvingKeyView, - main_views_per_air: &[Vec>>], - public_values_per_air: &[Vec>], - perm_trace_per_air: &[Option>], - exposed_values_after_challenge: &[Vec>], - challenges: &[Vec], - rap_phase_seq_kind: RapPhaseSeqKind, -) { - USE_DEBUG_BUILDER.with(|debug| { - if *debug.lock().unwrap() { - let preprocessed = izip!( - raps, - &mpk.per_air, - main_views_per_air, - public_values_per_air, - perm_trace_per_air, - exposed_values_after_challenge - ) - .map( - |(rap, pk, main, public_values, perm_trace, exposed_values_after_challenge)| { - let preprocessed_trace = pk - .preprocessed_data - .as_ref() - .map(|data| data.trace.as_view()); - tracing::debug!("Checking constraints for {}", rap.name()); - check_constraints( - rap.as_ref(), - &rap.name(), - &preprocessed_trace, - main, - &perm_trace.iter().map(|m| m.as_view()).collect_vec(), - challenges, - public_values, - exposed_values_after_challenge, - rap_phase_seq_kind, - ); - preprocessed_trace - }, - ) - .collect_vec(); - - let (air_names, interactions): (Vec<_>, Vec<_>) = mpk - .per_air - .iter() - .map(|pk| { - ( - pk.air_name.clone(), - &pk.vk.symbolic_constraints.interactions[..], - ) - }) - .unzip(); - check_logup( - &air_names, - &interactions, - &preprocessed, - main_views_per_air, - public_values_per_air, - ); - } - }); -} diff --git a/crates/stark-backend/src/prover/opener.rs b/crates/stark-backend/src/prover/opener.rs deleted file mode 100644 index 0b14fe3440..0000000000 --- a/crates/stark-backend/src/prover/opener.rs +++ /dev/null @@ -1,174 +0,0 @@ -use std::fmt::Debug; - -use derivative::Derivative; -use itertools::Itertools; -use p3_commit::{Pcs, PolynomialSpace}; -use serde::{Deserialize, Serialize}; -use tracing::instrument; - -use crate::config::{Domain, PcsProof, PcsProverData, StarkGenericConfig}; - -pub struct OpeningProver<'pcs, SC: StarkGenericConfig> { - pcs: &'pcs SC::Pcs, - zeta: SC::Challenge, -} - -impl<'pcs, SC: StarkGenericConfig> OpeningProver<'pcs, SC> { - pub fn new(pcs: &'pcs SC::Pcs, zeta: SC::Challenge) -> Self { - Self { pcs, zeta } - } - - /// Opening proof for multiple RAP matrices, where - /// - (for now) each preprocessed trace matrix has a separate commitment - /// - main trace matrices can have multiple commitments - /// - for each after_challenge phase, all matrices in the phase share a commitment - /// - quotient poly chunks are all committed together - #[instrument(name = "FRI opening proofs", skip_all)] - pub fn open( - &self, - challenger: &mut SC::Challenger, - // For each preprocessed trace commitment, the prover data and - // the domain of the matrix, in order - preprocessed: Vec<(&PcsProverData, Domain)>, - // For each main trace commitment, the prover data and - // the domain of each matrix, in order - main: Vec<(&PcsProverData, Vec>)>, - // after_challenge[i] has shared commitment prover data for all matrices in that phase, and domains of those matrices, in order - after_challenge: Vec<(&PcsProverData, Vec>)>, - // Quotient poly commitment prover data - quotient_data: &PcsProverData, - // Quotient degree for each RAP committed in quotient_data, in order - quotient_degrees: &[usize], - ) -> OpeningProof { - let preprocessed: Vec<_> = preprocessed - .into_iter() - .map(|(data, domain)| (data, vec![domain])) - .collect(); - - let zeta = self.zeta; - let mut rounds = preprocessed - .iter() - .chain(main.iter()) - .chain(after_challenge.iter()) - .map(|(data, domains)| { - let points_per_mat = domains - .iter() - .map(|domain| vec![zeta, domain.next_point(zeta).unwrap()]) - .collect_vec(); - (*data, points_per_mat) - }) - .collect_vec(); - - // open every quotient chunk at zeta - let num_chunks: usize = quotient_degrees.iter().sum(); - let quotient_opening_points = vec![vec![zeta]; num_chunks]; - rounds.push((quotient_data, quotient_opening_points)); - - let (mut opening_values, opening_proof) = self.pcs.open(rounds, challenger); - - // Unflatten opening_values - let mut quotient_openings = opening_values.pop().expect("Should have quotient opening"); - - let num_after_challenge = after_challenge.len(); - let after_challenge_openings = opening_values - .split_off(opening_values.len() - num_after_challenge) - .into_iter() - .map(collect_trace_openings) - .collect_vec(); - assert_eq!( - after_challenge_openings.len(), - num_after_challenge, - "Incorrect number of after challenge trace openings" - ); - - let main_openings = opening_values - .split_off(preprocessed.len()) - .into_iter() - .map(collect_trace_openings) - .collect_vec(); - assert_eq!( - main_openings.len(), - main.len(), - "Incorrect number of main trace openings" - ); - - let preprocessed_openings = opening_values - .into_iter() - .map(|values| { - let mut openings = collect_trace_openings(values); - openings - .pop() - .expect("Preprocessed trace should be opened at 1 point") - }) - .collect_vec(); - assert_eq!( - preprocessed_openings.len(), - preprocessed.len(), - "Incorrect number of preprocessed trace openings" - ); - - // Unflatten quotient openings - let quotient_openings = quotient_degrees - .iter() - .map(|&chunk_size| { - quotient_openings - .drain(..chunk_size) - .map(|mut op| { - op.pop() - .expect("quotient chunk should be opened at 1 point") - }) - .collect_vec() - }) - .collect_vec(); - - OpeningProof { - proof: opening_proof, - values: OpenedValues { - preprocessed: preprocessed_openings, - main: main_openings, - after_challenge: after_challenge_openings, - quotient: quotient_openings, - }, - } - } -} - -fn collect_trace_openings( - ops: Vec>>, -) -> Vec> { - ops.into_iter() - .map(|op| { - let [local, next] = op.try_into().expect("Should have 2 openings"); - AdjacentOpenedValues { local, next } - }) - .collect() -} - -/// PCS opening proof with opened values for multi-matrix AIR. -#[derive(Serialize, Deserialize, Derivative)] -#[serde(bound = "")] -#[derivative(Clone(bound = "SC::Challenge: Clone"))] -pub struct OpeningProof { - pub proof: PcsProof, - pub values: OpenedValues, -} - -#[derive(Clone, Serialize, Deserialize)] -pub struct OpenedValues { - /// For each preprocessed trace commitment, the opened values - pub preprocessed: Vec>, - /// For each main trace commitment, for each matrix in commitment, the - /// opened values - pub main: Vec>>, - /// For each phase after challenge, there is shared commitment. - /// For each commitment, if any, for each matrix in the commitment, the opened values, - pub after_challenge: Vec>>, - /// For each RAP, for each quotient chunk in quotient poly, the opened values - pub quotient: Vec>>, -} - -#[derive(Clone, Serialize, Deserialize)] -pub struct AdjacentOpenedValues { - pub local: Vec, - pub next: Vec, -} diff --git a/crates/stark-backend/src/prover/quotient/helper.rs b/crates/stark-backend/src/prover/quotient/helper.rs deleted file mode 100644 index 10fa231935..0000000000 --- a/crates/stark-backend/src/prover/quotient/helper.rs +++ /dev/null @@ -1,30 +0,0 @@ -use crate::{ - config::StarkGenericConfig, - interaction::HasInteractionChunkSize, - keygen::types::{MultiStarkProvingKey, StarkProvingKey}, - prover::quotient::QuotientVkData, -}; - -pub(crate) trait QuotientVkDataHelper { - fn get_quotient_vk_data(&self) -> QuotientVkData; -} - -impl QuotientVkDataHelper for StarkProvingKey { - fn get_quotient_vk_data(&self) -> QuotientVkData { - QuotientVkData { - quotient_degree: self.vk.quotient_degree, - rap_phase_seq_kind: self.vk.rap_phase_seq_kind, - interaction_chunk_size: self.rap_phase_seq_pk.interaction_chunk_size(), - symbolic_constraints: &self.vk.symbolic_constraints, - } - } -} - -impl MultiStarkProvingKey { - pub fn get_quotient_vk_data_per_air(&self) -> Vec> { - self.per_air - .iter() - .map(|pk| pk.get_quotient_vk_data()) - .collect() - } -} diff --git a/crates/stark-backend/src/prover/quotient/mod.rs b/crates/stark-backend/src/prover/quotient/mod.rs deleted file mode 100644 index b45367c4be..0000000000 --- a/crates/stark-backend/src/prover/quotient/mod.rs +++ /dev/null @@ -1,244 +0,0 @@ -use itertools::{izip, Itertools}; -use p3_commit::{Pcs, PolynomialSpace}; -use p3_field::AbstractField; -use p3_matrix::{dense::RowMajorMatrix, Matrix}; -use tracing::instrument; - -use self::single::compute_single_rap_quotient_values; -use super::trace::SingleRapCommittedTraceView; -use crate::{ - air_builders::{prover::ProverConstraintFolder, symbolic::SymbolicConstraints}, - config::{Com, Domain, PackedChallenge, PcsProverData, StarkGenericConfig, Val}, - interaction::RapPhaseSeqKind, - rap::{AnyRap, PartitionedBaseAir, Rap}, -}; - -pub(crate) mod helper; -pub mod single; - -pub struct QuotientCommitter<'pcs, SC: StarkGenericConfig> { - pcs: &'pcs SC::Pcs, - /// For each challenge round, the challenges drawn - challenges: Vec>>, - alpha: SC::Challenge, -} - -impl<'pcs, SC: StarkGenericConfig> QuotientCommitter<'pcs, SC> { - pub fn new( - pcs: &'pcs SC::Pcs, - challenges: &[Vec], - alpha: SC::Challenge, - ) -> Self { - let packed_challenges = challenges - .iter() - .map(|challenges| { - challenges - .iter() - .map(|c| PackedChallenge::::from_f(*c)) - .collect_vec() - }) - .collect_vec(); - Self { - pcs, - challenges: packed_challenges, - alpha, - } - } - - /// Constructs quotient domains and computes the evaluation of the quotient polynomials - /// on the quotient domains of each RAP. - /// - /// ## Assumptions - /// - `raps`, `traces`, `quotient_degrees` are all the same length and in the same order. - /// - `quotient_degrees` is the factor to **multiply** the trace degree by to get the degree - /// of the quotient polynomial. This should be determined from the constraint degree - /// of the RAP. - #[instrument(name = "compute quotient values", skip_all)] - pub fn quotient_values<'a>( - &self, - raps: Vec>>, - qvks: &[QuotientVkData<'a, SC>], - traces: &[SingleRapCommittedTraceView<'a, SC>], - public_values: &'a [Vec>], - ) -> QuotientData { - let raps = raps.iter().map(|rap| rap.as_ref()).collect_vec(); - let inner = izip!(raps, qvks, traces, public_values) - .map(|(rap, qvk, trace, pis)| self.single_rap_quotient_values(rap, qvk, trace, pis)) - .collect(); - QuotientData { inner } - } - - pub(crate) fn single_rap_quotient_values<'a, R>( - &self, - rap: &'a R, - qvk: &QuotientVkData<'a, SC>, - trace: &SingleRapCommittedTraceView<'a, SC>, - public_values: &'a [Val], - ) -> SingleQuotientData - where - R: for<'b> Rap> - + PartitionedBaseAir> - + Sync - + ?Sized, - { - let quotient_degree = qvk.quotient_degree; - let trace_domain = trace.domain; - let quotient_domain = - trace_domain.create_disjoint_domain(trace_domain.size() * quotient_degree); - // Empty matrix if no preprocessed trace - let preprocessed_lde_on_quotient_domain = if let Some(view) = trace.preprocessed.as_ref() { - self.pcs - .get_evaluations_on_domain(view.data, view.matrix_index, quotient_domain) - .to_row_major_matrix() - } else { - RowMajorMatrix::new(vec![], 0) - }; - let partitioned_main_lde_on_quotient_domain: Vec<_> = trace - .partitioned_main - .iter() - .map(|view| { - self.pcs - .get_evaluations_on_domain(view.data, view.matrix_index, quotient_domain) - .to_row_major_matrix() - }) - .collect(); - - let (after_challenge_lde_on_quotient_domain, exposed_values_after_challenge): ( - Vec<_>, - Vec<_>, - ) = trace - .after_challenge - .iter() - .map(|(view, exposed_values)| { - ( - self.pcs - .get_evaluations_on_domain(view.data, view.matrix_index, quotient_domain) - .to_row_major_matrix(), - exposed_values - .iter() - .map(|x| PackedChallenge::::from_f(*x)) - .collect_vec(), - ) - }) - .unzip(); - - let quotient_values = compute_single_rap_quotient_values( - rap, - qvk.symbolic_constraints, - trace_domain, - quotient_domain, - preprocessed_lde_on_quotient_domain, - partitioned_main_lde_on_quotient_domain, - after_challenge_lde_on_quotient_domain, - &self.challenges, - self.alpha, - public_values, - &exposed_values_after_challenge - .iter() - .map(|v| v.as_slice()) - .collect_vec(), - qvk.rap_phase_seq_kind, - qvk.interaction_chunk_size, - ); - SingleQuotientData { - quotient_degree, - quotient_domain, - quotient_values, - } - } - - #[instrument(name = "commit to quotient poly chunks", skip_all)] - pub fn commit(&self, data: QuotientData) -> ProverQuotientData { - let quotient_degrees = data.inner.iter().map(|d| d.quotient_degree).collect(); - let quotient_domains_and_chunks = data - .split() - .into_iter() - .map(|q| (q.domain, q.chunk)) - .collect(); - let (commit, data) = self.pcs.commit(quotient_domains_and_chunks); - ProverQuotientData { - quotient_degrees, - commit, - data, - } - } -} - -/// Prover data for multi-matrix quotient polynomial commitment. -/// Quotient polynomials for multiple RAP matrices are committed together into a single commitment. -/// The quotient polynomials can be committed together even if the corresponding trace matrices -/// are committed separately. -pub struct ProverQuotientData { - /// For each AIR, the number of quotient chunks that were committed. - pub quotient_degrees: Vec, - /// Quotient commitment - pub commit: Com, - /// Prover data for the quotient commitment - pub data: PcsProverData, -} - -/// The quotient polynomials from multiple RAP matrices. -pub struct QuotientData { - inner: Vec>, -} - -impl QuotientData { - /// Splits the quotient polynomials from multiple AIRs into chunks of size equal to the trace domain size. - pub fn split(self) -> impl IntoIterator> { - self.inner.into_iter().flat_map(|data| data.split()) - } -} - -/// The quotient polynomial from a single matrix RAP, evaluated on the quotient domain. -pub struct SingleQuotientData { - /// The factor by which the trace degree was multiplied to get the - /// quotient domain size. - quotient_degree: usize, - /// Quotient domain - quotient_domain: Domain, - /// Evaluations of the quotient polynomial on the quotient domain - quotient_values: Vec, -} - -impl SingleQuotientData { - /// The vector of evaluations of the quotient polynomial on the quotient domain, - /// first flattened from vector of extension field elements to matrix of base field elements, - /// and then split into chunks of size equal to the trace domain size (quotient domain size - /// divided by `quotient_degree`). - pub fn split(self) -> impl IntoIterator> { - let quotient_degree = self.quotient_degree; - let quotient_domain = self.quotient_domain; - // Flatten from extension field elements to base field elements - let quotient_flat = RowMajorMatrix::new_col(self.quotient_values).flatten_to_base(); - let quotient_chunks = quotient_domain.split_evals(quotient_degree, quotient_flat); - let qc_domains = quotient_domain.split_domains(quotient_degree); - qc_domains - .into_iter() - .zip_eq(quotient_chunks) - .map(|(domain, chunk)| QuotientChunk { domain, chunk }) - } -} - -/// The vector of evaluations of the quotient polynomial on the quotient domain, -/// split into chunks of size equal to the trace domain size (quotient domain size -/// divided by `quotient_degree`). -/// -/// This represents a single chunk, where the vector of extension field elements is -/// further flattened to a matrix of base field elements. -pub struct QuotientChunk { - /// Chunk of quotient domain, which is a coset of the trace domain - pub domain: Domain, - /// Matrix with number of rows equal to trace domain size, - /// and number of columns equal to extension field degree. - pub chunk: RowMajorMatrix>, -} - -/// All necessary data from VK to compute ProverQuotientData -pub struct QuotientVkData<'a, SC: StarkGenericConfig> { - pub quotient_degree: usize, - pub rap_phase_seq_kind: RapPhaseSeqKind, - pub interaction_chunk_size: usize, - /// Symbolic constraints of the AIR in all challenge phases. This is - /// a serialization of the constraints in the AIR. - pub symbolic_constraints: &'a SymbolicConstraints>, -} diff --git a/crates/stark-backend/src/prover/quotient/single.rs b/crates/stark-backend/src/prover/quotient/single.rs deleted file mode 100644 index 07c3a03004..0000000000 --- a/crates/stark-backend/src/prover/quotient/single.rs +++ /dev/null @@ -1,195 +0,0 @@ -use std::cmp::min; - -use itertools::Itertools; -use p3_commit::PolynomialSpace; -use p3_field::{AbstractExtensionField, AbstractField, PackedValue}; -use p3_matrix::{dense::RowMajorMatrixView, stack::VerticalPair, Matrix}; -use p3_maybe_rayon::prelude::*; -use p3_util::log2_strict_usize; -use tracing::instrument; - -use crate::{ - air_builders::{prover::ProverConstraintFolder, symbolic::SymbolicConstraints}, - config::{Domain, PackedChallenge, PackedVal, StarkGenericConfig, Val}, - interaction::RapPhaseSeqKind, - rap::{PartitionedBaseAir, Rap}, -}; - -// Starting reference: p3_uni_stark::prover::quotient_values -// TODO: make this into a trait that is auto-implemented so we can dynamic dispatch the trait -/// Computes evaluation of DEEP quotient polynomial on the quotient domain for a single RAP (single trace matrix). -/// -/// Designed to be general enough to support RAP with multiple rounds of challenges. -#[allow(clippy::too_many_arguments)] -#[instrument( - name = "compute single RAP quotient polynomial", - level = "trace", - skip_all -)] -pub fn compute_single_rap_quotient_values<'a, SC, R, Mat>( - rap: &'a R, - symbolic_constraints: &SymbolicConstraints>, - trace_domain: Domain, - quotient_domain: Domain, - preprocessed_trace_on_quotient_domain: Mat, - partitioned_main_lde_on_quotient_domain: Vec, - after_challenge_lde_on_quotient_domain: Vec, - // For each challenge round, the challenges drawn - challenges: &[Vec>], - alpha: SC::Challenge, - public_values: &'a [Val], - // Values exposed to verifier after challenge round i - exposed_values_after_challenge: &'a [&'a [PackedChallenge]], - rap_phase_seq_kind: RapPhaseSeqKind, - interaction_chunk_size: usize, -) -> Vec -where - // TODO: avoid ?Sized to prevent dynamic dispatching because `eval` is called many many times - R: for<'b> Rap> + PartitionedBaseAir> + Sync + ?Sized, - SC: StarkGenericConfig, - Mat: Matrix> + Sync, -{ - let quotient_size = quotient_domain.size(); - let preprocessed_width = preprocessed_trace_on_quotient_domain.width(); - let mut sels = trace_domain.selectors_on_coset(quotient_domain); - - let qdb = log2_strict_usize(quotient_size) - log2_strict_usize(trace_domain.size()); - let next_step = 1 << qdb; - - let ext_degree = SC::Challenge::D; - - let mut alpha_powers = alpha - .powers() - .take(symbolic_constraints.constraints.len()) - .collect_vec(); - alpha_powers.reverse(); - - // assert!(quotient_size >= PackedVal::::WIDTH); - // We take PackedVal::::WIDTH worth of values at a time from a quotient_size slice, so we need to - // pad with default values in the case where quotient_size is smaller than PackedVal::::WIDTH. - for _ in quotient_size..PackedVal::::WIDTH { - sels.is_first_row.push(Val::::default()); - sels.is_last_row.push(Val::::default()); - sels.is_transition.push(Val::::default()); - sels.inv_zeroifier.push(Val::::default()); - } - - (0..quotient_size) - .into_par_iter() - .step_by(PackedVal::::WIDTH) - .flat_map_iter(|i_start| { - let wrap = |i| i % quotient_size; - let i_range = i_start..i_start + PackedVal::::WIDTH; - - let is_first_row = *PackedVal::::from_slice(&sels.is_first_row[i_range.clone()]); - let is_last_row = *PackedVal::::from_slice(&sels.is_last_row[i_range.clone()]); - let is_transition = *PackedVal::::from_slice(&sels.is_transition[i_range.clone()]); - let inv_zeroifier = *PackedVal::::from_slice(&sels.inv_zeroifier[i_range.clone()]); - - let [preprocessed_local, preprocessed_next] = [0, 1].map(|step_idx| { - (0..preprocessed_width) - .map(|col| { - PackedVal::::from_fn(|offset| { - Matrix::get( - &preprocessed_trace_on_quotient_domain, - wrap(i_start + offset + step_idx * next_step), - col, - ) - }) - }) - .collect_vec() - }); - - let partitioned_main_pairs = partitioned_main_lde_on_quotient_domain - .iter() - .map(|lde| { - let width = lde.width(); - [0, 1].map(|step_idx| { - (0..width) - .map(|col| { - PackedVal::::from_fn(|offset| { - lde.get(wrap(i_start + offset + step_idx * next_step), col) - }) - }) - .collect_vec() - }) - }) - .collect_vec(); - - let after_challenge_pairs = after_challenge_lde_on_quotient_domain - .iter() - .map(|lde| { - // Width in base field with extension field elements flattened - let base_width = lde.width(); - [0, 1].map(|step_idx| { - (0..base_width) - .step_by(ext_degree) - .map(|col| { - PackedChallenge::::from_base_fn(|i| { - PackedVal::::from_fn(|offset| { - lde.get( - wrap(i_start + offset + step_idx * next_step), - col + i, - ) - }) - }) - }) - .collect_vec() - }) - }) - .collect_vec(); - - let accumulator = PackedChallenge::::ZERO; - let mut folder = ProverConstraintFolder { - preprocessed: VerticalPair::new( - RowMajorMatrixView::new_row(&preprocessed_local), - RowMajorMatrixView::new_row(&preprocessed_next), - ), - partitioned_main: partitioned_main_pairs - .iter() - .map(|[local, next]| { - VerticalPair::new( - RowMajorMatrixView::new_row(local), - RowMajorMatrixView::new_row(next), - ) - }) - .collect(), - after_challenge: after_challenge_pairs - .iter() - .map(|[local, next]| { - VerticalPair::new( - RowMajorMatrixView::new_row(local), - RowMajorMatrixView::new_row(next), - ) - }) - .collect(), - challenges, - is_first_row, - is_last_row, - is_transition, - alpha_powers: &alpha_powers, - accumulator, - public_values, - exposed_values_after_challenge, - interactions: vec![], - interaction_chunk_size, - rap_phase_seq_kind, - has_common_main: rap.common_main_width() > 0, - constraint_index: 0, - }; - rap.eval(&mut folder); - - // quotient(x) = constraints(x) / Z_H(x) - let quotient = folder.accumulator * inv_zeroifier; - - // "Transpose" D packed base coefficients into WIDTH scalar extension coefficients. - let width = min(PackedVal::::WIDTH, quotient_size); - (0..width).map(move |idx_in_packing| { - let quotient_value = (0..>>::D) - .map(|coeff_idx| quotient.as_base_slice()[coeff_idx].as_slice()[idx_in_packing]) - .collect::>(); - SC::Challenge::from_base_slice("ient_value) - }) - }) - .collect() -} diff --git a/crates/stark-backend/src/prover/trace.rs b/crates/stark-backend/src/prover/trace.rs deleted file mode 100644 index c265ca9ec1..0000000000 --- a/crates/stark-backend/src/prover/trace.rs +++ /dev/null @@ -1,203 +0,0 @@ -use std::sync::Arc; - -use derivative::Derivative; -use itertools::{izip, Itertools}; -use p3_commit::Pcs; -use p3_matrix::{ - dense::{RowMajorMatrix, RowMajorMatrixView}, - Matrix, -}; -use serde::{Deserialize, Serialize}; -use tracing::info_span; - -use crate::{ - commit::CommittedSingleMatrixView, - config::{Com, Domain, PcsProverData, StarkGenericConfig, Val}, - keygen::view::MultiStarkProvingKeyView, - prover::quotient::{helper::QuotientVkDataHelper, ProverQuotientData, QuotientCommitter}, - rap::AnyRap, -}; - -#[allow(clippy::too_many_arguments)] -pub(super) fn commit_quotient_traces<'a, SC: StarkGenericConfig>( - pcs: &SC::Pcs, - mpk: &MultiStarkProvingKeyView, - alpha: SC::Challenge, - challenges: &[Vec], - raps: Vec>>, - public_values_per_air: &[Vec>], - domain_per_air: Vec>, - cached_mains_pdata_per_air: &'a [Vec>], - common_main_prover_data: &'a ProverTraceData, - perm_prover_data: &'a Option>, - exposed_values_after_challenge: Vec>>, -) -> ProverQuotientData { - let trace_views = create_trace_view_per_air( - domain_per_air, - cached_mains_pdata_per_air, - mpk, - exposed_values_after_challenge, - common_main_prover_data, - perm_prover_data, - ); - let quotient_committer = QuotientCommitter::new(pcs, challenges, alpha); - let qvks = mpk - .per_air - .iter() - .map(|pk| pk.get_quotient_vk_data()) - .collect_vec(); - let quotient_values = - quotient_committer.quotient_values(raps, &qvks, &trace_views, public_values_per_air); - // Commit to quotient polynomias. One shared commit for all quotient polynomials - quotient_committer.commit(quotient_values) -} - -fn create_trace_view_per_air<'a, SC: StarkGenericConfig>( - domain_per_air: Vec>, - cached_mains_pdata_per_air: &'a [Vec>], - mpk: &'a MultiStarkProvingKeyView, - exposed_values_after_challenge: Vec>>, - common_main_prover_data: &'a ProverTraceData, - perm_prover_data: &'a Option>, -) -> Vec> { - let mut common_main_idx = 0; - let mut after_challenge_idx = 0; - izip!( - domain_per_air, - cached_mains_pdata_per_air, - &mpk.per_air, - exposed_values_after_challenge, - ).map(|(domain, cached_mains_pdata, pk, exposed_values)| { - // The AIR will be treated as the full RAP with virtual columns after this - let preprocessed = pk.preprocessed_data.as_ref().map(|p| { - // TODO: currently assuming each chip has it's own preprocessed commitment - CommittedSingleMatrixView::::new(p.data.as_ref(), 0) - }); - let mut partitioned_main: Vec<_> = cached_mains_pdata - .iter() - .map(|pdata| CommittedSingleMatrixView::new(pdata.data.as_ref(), 0)) - .collect(); - if pk.vk.has_common_main() { - partitioned_main.push(CommittedSingleMatrixView::new( - common_main_prover_data.data.as_ref(), - common_main_idx, - )); - common_main_idx += 1; - } - - let after_challenge = exposed_values - .into_iter() - .map(|exposed_values| { - let matrix = CommittedSingleMatrixView::new( - perm_prover_data - .as_ref() - .expect("AIR exposes after_challenge values but has no permutation trace commitment") - .data - .as_ref(), - after_challenge_idx, - ); - after_challenge_idx += 1; - (matrix, exposed_values) - }) - .collect(); - - SingleRapCommittedTraceView { - domain, - preprocessed, - partitioned_main, - after_challenge, - } - }).collect() -} - -/// Prover that commits to a batch of trace matrices, possibly of different heights. -pub struct TraceCommitter<'pcs, SC: StarkGenericConfig> { - pcs: &'pcs SC::Pcs, -} - -impl Clone for TraceCommitter<'_, SC> { - fn clone(&self) -> Self { - Self { pcs: self.pcs } - } -} - -impl<'pcs, SC: StarkGenericConfig> TraceCommitter<'pcs, SC> { - pub fn new(pcs: &'pcs SC::Pcs) -> Self { - Self { pcs } - } - - /// Uses the PCS to commit to a sequence of trace matrices. - /// The commitment will depend on the order of the matrices. - /// The matrices may be of different heights. - pub fn commit(&self, traces: Vec>>) -> ProverTraceData { - info_span!("commit to trace data").in_scope(|| { - let traces_with_domains: Vec<_> = traces - .into_iter() - .map(|matrix| { - let height = matrix.height(); - // Recomputing the domain is lightweight - let domain = self.pcs.natural_domain_for_degree(height); - (domain, matrix) - }) - .collect(); - let (commit, data) = self.pcs.commit(traces_with_domains); - ProverTraceData { - commit, - data: Arc::new(data), - } - }) - } -} - -/// Prover data for multi-matrix trace commitments. -/// The data is for the traces committed into a single commitment. -#[derive(Derivative, Serialize, Deserialize)] -#[derivative(Clone(bound = "Com: Clone"))] -#[serde(bound( - serialize = "Com: Serialize, PcsProverData: Serialize", - deserialize = "Com: Deserialize<'de>, PcsProverData: Deserialize<'de>" -))] -pub struct ProverTraceData { - /// Commitment to the trace matrices. - pub commit: Com, - /// Prover data, such as a Merkle tree, for the trace commitment. - /// The data is stored as a thread-safe smart [Arc] pointer because [PcsProverData] does - /// not implement clone and should not be cloned. The prover only needs a reference to - /// this data, so we use a smart pointer to elide lifetime concerns. - pub data: Arc>, -} - -/// A view of just the preprocessed AIR, without any after challenge columns. -pub struct PairTraceView<'a, F> { - pub preprocessed: &'a Option>, - pub partitioned_main: &'a [RowMajorMatrixView<'a, F>], - pub public_values: &'a [F], -} - -/// The full RAP trace consists of horizontal concatenation of multiple matrices of the same height: -/// - preprocessed trace matrix -/// - the main trace matrix is horizontally partitioned into multiple matrices, -/// where each matrix can belong to a separate matrix commitment. -/// - after each round of challenges, a trace matrix for trace allowed to use those challenges -/// -/// Each of these matrices is allowed to be in a separate commitment. -/// -/// Only the main trace matrix is allowed to be partitioned, so that different parts may belong to -/// different commitments. We do not see any use cases where the `preprocessed` or `after_challenge` -/// matrices need to be partitioned. -#[derive(Derivative)] -#[derivative(Clone(bound = ""))] -pub struct SingleRapCommittedTraceView<'a, SC: StarkGenericConfig> { - /// Domain of the trace matrices - pub domain: Domain, - // Maybe public values should be included in this struct - /// Preprocessed trace data, if any - pub preprocessed: Option>, - /// Main trace data, horizontally partitioned into multiple matrices - pub partitioned_main: Vec>, - /// `after_challenge[i] = (matrix, exposed_values)` - /// where `matrix` is the trace matrix which uses challenges drawn - /// after observing commitments to `preprocessed`, `partitioned_main`, and `after_challenge[..i]`, - /// and `exposed_values` are certain values in this phase that are exposed to the verifier. - pub after_challenge: Vec<(CommittedSingleMatrixView<'a, SC>, Vec)>, -} diff --git a/crates/stark-backend/src/prover/types.rs b/crates/stark-backend/src/prover/types.rs deleted file mode 100644 index 276db1de0f..0000000000 --- a/crates/stark-backend/src/prover/types.rs +++ /dev/null @@ -1,180 +0,0 @@ -use std::sync::Arc; - -use derivative::Derivative; -use itertools::Itertools; -use p3_field::Field; -use p3_matrix::{dense::RowMajorMatrix, Matrix}; -use serde::{Deserialize, Serialize}; - -pub use super::trace::{ProverTraceData, TraceCommitter}; -use crate::{ - config::{Com, RapPhaseSeqPartialProof, StarkGenericConfig, Val}, - keygen::types::{MultiStarkProvingKey, MultiStarkVerifyingKey}, - prover::opener::OpeningProof, - rap::AnyRap, -}; - -/// All commitments to a multi-matrix STARK that are not preprocessed. -#[derive(Serialize, Deserialize, Derivative)] -#[serde(bound( - serialize = "Com: Serialize", - deserialize = "Com: Deserialize<'de>" -))] -#[derivative(Clone(bound = "Com: Clone"))] -pub struct Commitments { - /// Multiple commitments for the main trace. - /// For each RAP, each part of a partitioned matrix trace matrix - /// must belong to one of these commitments. - pub main_trace: Vec>, - /// One shared commitment for all trace matrices across all RAPs - /// in a single challenge phase `i` after observing the commits to - /// `preprocessed`, `main_trace`, and `after_challenge[..i]` - pub after_challenge: Vec>, - /// Shared commitment for all quotient polynomial evaluations - pub quotient: Com, -} - -/// The full proof for multiple RAPs where trace matrices are committed into -/// multiple commitments, where each commitment is multi-matrix. -/// -/// Includes the quotient commitments and FRI opening proofs for the constraints as well. -#[derive(Serialize, Deserialize, Derivative)] -#[serde(bound = "")] -#[derivative(Clone(bound = "Com: Clone"))] -pub struct Proof { - /// The PCS commitments - pub commitments: Commitments, - /// Opening proofs separated by partition, but this may change - pub opening: OpeningProof, - /// Proof data for each AIR - pub per_air: Vec>, - /// Partial proof for rap phase if it exists - pub rap_phase_seq_proof: Option>, -} - -#[derive(Serialize, Deserialize, Derivative)] -#[serde(bound = "")] -#[derivative(Clone(bound = "SC::Challenge: Clone"))] -pub struct AirProofData { - pub air_id: usize, - /// height of trace matrix. - pub degree: usize, - /// For each challenge phase with trace, the values to expose to the verifier in that phase - pub exposed_values_after_challenge: Vec>, - // The public values to expose to the verifier - pub public_values: Vec>, -} - -/// Proof input -pub struct ProofInput { - /// (AIR id, AIR input) - pub per_air: Vec<(usize, AirProofInput)>, -} - -impl ProofInput { - pub fn new(per_air: Vec<(usize, AirProofInput)>) -> Self { - Self { per_air } - } - pub fn into_air_proof_input_vec(self) -> Vec> { - self.per_air.into_iter().map(|(_, x)| x).collect() - } -} - -#[derive(Serialize, Deserialize, Derivative)] -#[serde(bound( - serialize = "ProverTraceData: Serialize", - deserialize = "ProverTraceData: Deserialize<'de>" -))] -#[derivative(Clone(bound = "Com: Clone"))] -pub struct CommittedTraceData { - pub raw_data: Arc>>, - pub prover_data: ProverTraceData, -} - -/// Necessary input for proving a single AIR. -#[derive(Derivative)] -#[derivative(Clone(bound = "Com: Clone"))] -pub struct AirProofInput { - pub air: Arc>, - /// Prover data for cached main traces - pub cached_mains_pdata: Vec>, - pub raw: AirProofRawInput>, -} - -/// Raw input for proving a single AIR. -#[derive(Clone, Debug)] -pub struct AirProofRawInput { - /// Cached main trace matrices - pub cached_mains: Vec>>, - /// Common main trace matrix - pub common_main: Option>, - /// Public values - pub public_values: Vec, -} - -impl Proof { - pub fn get_air_ids(&self) -> Vec { - self.per_air.iter().map(|p| p.air_id).collect() - } - pub fn get_public_values(&self) -> Vec>> { - self.per_air - .iter() - .map(|p| p.public_values.clone()) - .collect() - } -} - -impl ProofInput { - pub fn sort(&mut self) { - self.per_air.sort_by_key(|p| p.0); - } -} - -impl MultiStarkVerifyingKey { - pub fn validate(&self, proof_input: &ProofInput) -> bool { - if !proof_input - .per_air - .iter() - .all(|input| input.0 < self.per_air.len()) - { - return false; - } - if !proof_input - .per_air - .iter() - .tuple_windows() - .all(|(a, b)| a.0 < b.0) - { - return false; - } - true - } -} - -impl MultiStarkProvingKey { - pub fn validate(&self, proof_input: &ProofInput) -> bool { - self.get_vk().validate(proof_input) - } -} - -impl AirProofRawInput { - pub fn height(&self) -> usize { - let mut height = None; - for m in self.cached_mains.iter() { - if let Some(h) = height { - assert_eq!(h, m.height()); - } else { - height = Some(m.height()); - } - } - let common_h = self.common_main.as_ref().map(|trace| trace.height()); - if let Some(h) = height { - if let Some(common_h) = common_h { - assert_eq!(h, common_h); - } - h - } else { - common_h.unwrap_or(0) - } - } -} diff --git a/crates/stark-backend/src/rap.rs b/crates/stark-backend/src/rap.rs deleted file mode 100644 index f81c4fd337..0000000000 --- a/crates/stark-backend/src/rap.rs +++ /dev/null @@ -1,124 +0,0 @@ -//! # RAP (Randomized Air with Preprocessing) -//! See for formal definition. - -use std::any::{type_name, Any}; - -use p3_air::{BaseAir, PermutationAirBuilder}; - -use crate::{ - air_builders::{ - debug::DebugConstraintBuilder, prover::ProverConstraintFolder, symbolic::SymbolicRapBuilder, - }, - config::{StarkGenericConfig, Val}, -}; - -/// An AIR with 0 or more public values. -/// This trait will be merged into Plonky3 in PR: https://github.com/Plonky3/Plonky3/pull/470 -pub trait BaseAirWithPublicValues: BaseAir { - fn num_public_values(&self) -> usize { - 0 - } -} - -/// An AIR with 1 or more main trace partitions. -pub trait PartitionedBaseAir: BaseAir { - /// By default, an AIR has no cached main trace. - fn cached_main_widths(&self) -> Vec { - vec![] - } - /// By default, an AIR has only one private main trace. - fn common_main_width(&self) -> usize { - self.width() - } -} - -/// An AIR that works with a particular `AirBuilder` which allows preprocessing -/// and injected randomness. -/// -/// Currently this is not a fully general RAP. Only the following phases are allowed: -/// - Preprocessing -/// - Main trace generation and commitment -/// - Permutation trace generation and commitment -/// -/// Randomness is drawn after the main trace commitment phase, and used in the permutation trace. -/// -/// Does not inherit [Air](p3_air::Air) trait to allow overrides for technical reasons -/// around dynamic dispatch. -pub trait Rap: Sync -where - AB: PermutationAirBuilder, -{ - fn eval(&self, builder: &mut AB); -} - -/// Permutation AIR builder that exposes certain values to both prover and verifier -/// _after_ the permutation challenges are drawn. These can be thought of as -/// "public values" known after the challenges are drawn. -/// -/// Exposed values are used internally by the prover and verifier -/// in cross-table permutation arguments. -pub trait PermutationAirBuilderWithExposedValues: PermutationAirBuilder { - fn permutation_exposed_values(&self) -> &[Self::VarEF]; -} - -/// RAP trait for all-purpose dynamic dispatch use. -/// This trait is auto-implemented if you implement `Air` and `BaseAirWithPublicValues` and `PartitionedBaseAir` traits. -pub trait AnyRap: -Rap>> // for keygen to extract fixed data about the RAP - + for<'a> Rap> // for prover quotient polynomial calculation - + for<'a> Rap> // for debugging - + BaseAirWithPublicValues> - + PartitionedBaseAir> - + Send + Sync -{ - fn as_any(&self) -> &dyn Any; - /// Name for display purposes - fn name(&self) -> String; -} - -impl AnyRap for T -where - SC: StarkGenericConfig, - T: Rap>> - + for<'a> Rap> - + for<'a> Rap> - + BaseAirWithPublicValues> - + PartitionedBaseAir> - + Send - + Sync - + 'static, -{ - fn as_any(&self) -> &dyn Any { - self - } - - fn name(&self) -> String { - get_air_name(self) - } -} - -/// Automatically derives the AIR name from the type name for pretty display purposes. -pub fn get_air_name(_rap: &T) -> String { - let full_name = type_name::().to_string(); - // Split the input by the first '<' to separate the main type from its generics - if let Some((main_part, generics_part)) = full_name.split_once('<') { - // Extract the last segment of the main type - let main_type = main_part.split("::").last().unwrap_or(""); - - // Remove the trailing '>' from the generics part and split by ", " to handle multiple generics - let generics: Vec = generics_part - .trim_end_matches('>') - .split(", ") - .map(|generic| { - // For each generic type, extract the last segment after "::" - generic.split("::").last().unwrap_or("").to_string() - }) - .collect(); - - // Join the simplified generics back together with ", " and format the result - format!("{}<{}>", main_type, generics.join(", ")) - } else { - // If there's no generic part, just return the last segment after "::" - full_name.split("::").last().unwrap_or("").to_string() - } -} diff --git a/crates/stark-backend/src/sumcheck.rs b/crates/stark-backend/src/sumcheck.rs deleted file mode 100644 index 169ec8601b..0000000000 --- a/crates/stark-backend/src/sumcheck.rs +++ /dev/null @@ -1,329 +0,0 @@ -//! Copied from starkware-libs/stwo under Apache-2.0 license. -//! -//! Sum-check protocol that proves and verifies claims about `sum_x g(x)` for all x in `{0, 1}^n`. -//! -//! [`MultivariatePolyOracle`] provides methods for evaluating sums and making transformations on -//! `g` in the context of the protocol. It is intended to be used in conjunction with -//! [`prove_batch()`] to generate proofs. - -use std::iter::zip; - -use itertools::Itertools; -use p3_challenger::FieldChallenger; -use p3_field::Field; -use thiserror::Error; - -use crate::poly::{multi::MultivariatePolyOracle, uni::UnivariatePolynomial}; - -pub struct SumcheckArtifacts { - pub evaluation_point: Vec, - pub constant_poly_oracles: Vec, - pub claimed_evals: Vec, -} - -/// Performs sum-check on a random linear combinations of multiple multivariate polynomials. -/// -/// Let the multivariate polynomials be `g_0, ..., g_{n-1}`. A single sum-check is performed on -/// multivariate polynomial `h = g_0 + lambda * g_1 + ... + lambda^(n-1) * g_{n-1}`. The `g_i`s do -/// not need to have the same number of variables. `g_i`s with less variables are folded in the -/// latest possible round of the protocol. For instance with `g_0(x, y, z)` and `g_1(x, y)` -/// sum-check is performed on `h(x, y, z) = g_0(x, y, z) + lambda * g_1(y, z)`. Claim `c_i` should -/// equal the claimed sum of `g_i(x_0, ..., x_{j-1})` over all `(x_0, ..., x_{j-1})` in `{0, 1}^j`. -/// -/// The degree of each `g_i` should not exceed [`MAX_DEGREE`] in any variable. The sum-check proof -/// of `h`, list of challenges (variable assignment) and the constant oracles (i.e. the `g_i` with -/// all variables fixed to their corresponding challenges) are returned. -/// -/// Output is of the form: `(proof, artifacts)`. -/// -/// # Panics -/// -/// Panics if: -/// - No multivariate polynomials are provided. -/// - There aren't the same number of multivariate polynomials and claims. -/// - The degree of any multivariate polynomial exceeds [`MAX_DEGREE`] in any variable. -/// - The round polynomials are inconsistent with their corresponding claimed sum on `0` and `1`. -// TODO: Consider returning constant oracles as separate type. -pub fn prove_batch>( - mut claims: Vec, - mut polys: Vec, - lambda: F, - challenger: &mut impl FieldChallenger, -) -> (SumcheckProof, SumcheckArtifacts) { - let n_variables = polys.iter().map(O::arity).max().unwrap(); - assert_eq!(claims.len(), polys.len()); - - let mut round_polys = vec![]; - let mut evaluation_point = vec![]; - - // Update the claims for the sum over `h`'s hypercube. - for (claim, multivariate_poly) in zip(&mut claims, &polys) { - let n_unused_variables = n_variables - multivariate_poly.arity(); - *claim *= F::from_canonical_u32(1 << n_unused_variables); - } - - // Prove sum-check rounds - for round in 0..n_variables { - let n_remaining_rounds = n_variables - round; - - let this_round_polys = zip(&polys, &claims) - .enumerate() - .map(|(i, (multivariate_poly, &claim))| { - let round_poly = if n_remaining_rounds == multivariate_poly.arity() { - multivariate_poly.marginalize_first(claim) - } else { - claim.halve().into() - }; - - let eval_at_0 = round_poly.evaluate(F::ZERO); - let eval_at_1 = round_poly.evaluate(F::ONE); - - assert_eq!( - eval_at_0 + eval_at_1, - claim, - "Round {round}, poly {i}: eval(0) + eval(1) != claim ({} != {claim})", - eval_at_0 + eval_at_1, - ); - assert!( - round_poly.degree() <= MAX_DEGREE, - "Round {round}, poly {i}: degree {} > max {MAX_DEGREE}", - round_poly.degree(), - ); - - round_poly - }) - .collect_vec(); - - let round_poly = random_linear_combination(&this_round_polys, lambda); - - challenger.observe_slice(&round_poly); - - let challenge = challenger.sample_ext_element(); - - claims = this_round_polys - .iter() - .map(|round_poly| round_poly.evaluate(challenge)) - .collect(); - - polys = polys - .into_iter() - .map(|multivariate_poly| { - if n_remaining_rounds != multivariate_poly.arity() { - multivariate_poly - } else { - multivariate_poly.partial_evaluation(challenge) - } - }) - .collect(); - - round_polys.push(round_poly); - evaluation_point.push(challenge); - } - - let proof = SumcheckProof { round_polys }; - let artifacts = SumcheckArtifacts { - evaluation_point, - constant_poly_oracles: polys, - claimed_evals: claims, - }; - - (proof, artifacts) -} - -/// Returns `p_0 + alpha * p_1 + ... + alpha^(n-1) * p_{n-1}`. -#[allow(dead_code)] -fn random_linear_combination( - polys: &[UnivariatePolynomial], - alpha: F, -) -> UnivariatePolynomial { - polys - .iter() - .rfold(UnivariatePolynomial::::zero(), |acc, poly| { - acc * alpha + poly.clone() - }) -} - -/// Partially verifies a sum-check proof. -/// -/// Only "partial" since it does not fully verify the prover's claimed evaluation on the variable -/// assignment but checks if the sum of the round polynomials evaluated on `0` and `1` matches the -/// claim for each round. If the proof passes these checks, the variable assignment and the prover's -/// claimed evaluation are returned for the caller to validate otherwise an [`Err`] is returned. -/// -/// Output is of the form `(variable_assignment, claimed_eval)`. -pub fn partially_verify( - mut claim: F, - proof: &SumcheckProof, - challenger: &mut impl FieldChallenger, -) -> Result<(Vec, F), SumcheckError> { - let mut assignment = Vec::new(); - - for (round, round_poly) in proof.round_polys.iter().enumerate() { - if round_poly.degree() > MAX_DEGREE { - return Err(SumcheckError::DegreeInvalid { round }); - } - - // TODO: optimize this by sending one less coefficient, and computing it from the - // claim, instead of checking the claim. (Can also be done by quotienting). - let sum = round_poly.evaluate(F::ZERO) + round_poly.evaluate(F::ONE); - - if claim != sum { - return Err(SumcheckError::SumInvalid { claim, sum, round }); - } - - challenger.observe_slice(round_poly); - let challenge = challenger.sample_ext_element(); - - claim = round_poly.evaluate(challenge); - assignment.push(challenge); - } - - Ok((assignment, claim)) -} - -#[derive(Debug, Clone)] -pub struct SumcheckProof { - pub round_polys: Vec>, -} - -/// Max degree of polynomials the verifier accepts in each round of the protocol. -pub const MAX_DEGREE: usize = 3; - -/// Sum-check protocol verification error. -#[derive(Error, Debug)] -pub enum SumcheckError { - #[error("degree of the polynomial in round {round} is too high")] - DegreeInvalid { round: RoundIndex }, - #[error("sum does not match the claim in round {round} (sum {sum}, claim {claim})")] - SumInvalid { claim: F, sum: F, round: RoundIndex }, -} - -/// Sum-check round index where 0 corresponds to the first round. -pub type RoundIndex = usize; - -#[cfg(test)] -mod tests { - use openvm_stark_sdk::{ - config::baby_bear_blake3::default_engine, engine::StarkEngine, utils::create_seeded_rng, - }; - use p3_baby_bear::BabyBear; - use p3_field::AbstractField; - use rand::Rng; - - use super::*; - use crate::poly::multi::Mle; - - #[test] - fn sumcheck_works() { - type F = BabyBear; - - let engine = default_engine(); - - let mut rng = create_seeded_rng(); - let values: Vec = (0..32).map(|_| rng.gen()).collect(); - let claim = values.iter().copied().sum(); - - let mle = Mle::new(values); - - let lambda = F::ONE; - - let (proof, _) = prove_batch( - vec![claim], - vec![mle.clone()], - lambda, - &mut engine.new_challenger(), - ); - let (assignment, eval) = - partially_verify(claim, &proof, &mut engine.new_challenger()).unwrap(); - - assert_eq!(eval, mle.eval(&assignment)); - } - - #[test] - fn batch_sumcheck_works() { - type F = BabyBear; - - let engine = default_engine(); - let mut rng = create_seeded_rng(); - - let values0: Vec = (0..32).map(|_| rng.gen()).collect(); - let values1: Vec = (0..32).map(|_| rng.gen()).collect(); - let claim0 = values0.iter().copied().sum(); - let claim1 = values1.iter().copied().sum(); - - let mle0 = Mle::new(values0.clone()); - let mle1 = Mle::new(values1.clone()); - - let lambda: F = rng.gen(); - - let claims = vec![claim0, claim1]; - let mles = vec![mle0.clone(), mle1.clone()]; - let (proof, _) = prove_batch(claims, mles, lambda, &mut engine.new_challenger()); - - let claim = claim0 + lambda * claim1; - let (assignment, eval) = - partially_verify(claim, &proof, &mut engine.new_challenger()).unwrap(); - - let eval0 = mle0.eval(&assignment); - let eval1 = mle1.eval(&assignment); - assert_eq!(eval, eval0 + lambda * eval1); - } - - #[test] - fn batch_sumcheck_with_different_n_variables() { - type F = BabyBear; - - let engine = default_engine(); - let mut rng = create_seeded_rng(); - - let values0: Vec = (0..64).map(|_| rng.gen()).collect(); - let values1: Vec = (0..32).map(|_| rng.gen()).collect(); - - let claim0 = values0.iter().copied().sum(); - let claim1 = values1.iter().copied().sum(); - - let mle0 = Mle::new(values0.clone()); - let mle1 = Mle::new(values1.clone()); - - let lambda: F = rng.gen(); - - let claims = vec![claim0, claim1]; - let mles = vec![mle0.clone(), mle1.clone()]; - let (proof, _) = prove_batch(claims, mles, lambda, &mut engine.new_challenger()); - - let claim = claim0 + lambda * claim1.double(); - let (assignment, eval) = - partially_verify(claim, &proof, &mut engine.new_challenger()).unwrap(); - - let eval0 = mle0.eval(&assignment); - let eval1 = mle1.eval(&assignment[1..]); - assert_eq!(eval, eval0 + lambda * eval1); - } - - #[test] - fn invalid_sumcheck_proof_fails() { - type F = BabyBear; - - let engine = default_engine(); - let mut rng = create_seeded_rng(); - - let values: Vec = (0..8).map(|_| rng.gen()).collect(); - let claim = values.iter().copied().sum(); - - let lambda = F::ONE; - - // Compromise the first value. - let mut invalid_values = values; - invalid_values[0] += F::ONE; - let invalid_claim = claim + F::ONE; - let invalid_mle = Mle::new(invalid_values.clone()); - let (invalid_proof, _) = prove_batch( - vec![invalid_claim], - vec![invalid_mle], - lambda, - &mut engine.new_challenger(), - ); - - assert!(partially_verify(claim, &invalid_proof, &mut engine.new_challenger()).is_err()); - } -} diff --git a/crates/stark-backend/src/utils.rs b/crates/stark-backend/src/utils.rs deleted file mode 100644 index c1d2e5b4bc..0000000000 --- a/crates/stark-backend/src/utils.rs +++ /dev/null @@ -1,58 +0,0 @@ -use p3_field::Field; -use tracing::instrument; - -use crate::prover::USE_DEBUG_BUILDER; - -// Copied from valida-util -/// Calculates and returns the multiplicative inverses of each field element, with zero -/// values remaining unchanged. -#[instrument(name = "batch_multiplicative_inverse", level = "info", skip_all)] -pub fn batch_multiplicative_inverse_allowing_zero(values: Vec) -> Vec { - // Check if values are zero, and construct a new vector with only nonzero values - let mut nonzero_values = Vec::with_capacity(values.len()); - let mut indices = Vec::with_capacity(values.len()); - for (i, value) in values.iter().cloned().enumerate() { - if value.is_zero() { - continue; - } - nonzero_values.push(value); - indices.push(i); - } - - // Compute the multiplicative inverse of nonzero values - let inverse_nonzero_values = p3_field::batch_multiplicative_inverse(&nonzero_values); - - // Reconstruct the original vector - let mut result = values.clone(); - for (i, index) in indices.into_iter().enumerate() { - result[index] = inverse_nonzero_values[i]; - } - - result -} - -/// Disables the debug builder so there are not debug assert panics. -/// Commonly used in negative tests to prevent panics. -pub fn disable_debug_builder() { - USE_DEBUG_BUILDER.with(|debug| { - *debug.lock().unwrap() = false; - }); -} - -#[macro_export] -#[cfg(feature = "parallel")] -macro_rules! parizip { - ( $first:expr $( , $rest:expr )* $(,)* ) => { - { - use rayon::iter::*; - (( $first $( , $rest)* )).into_par_iter() - } - }; -} -#[macro_export] -#[cfg(not(feature = "parallel"))] -macro_rules! parizip { - ( $first:expr $( , $rest:expr )* $(,)* ) => { - itertools::izip!( $first $( , $rest)* ) - }; -} diff --git a/crates/stark-backend/src/verifier/constraints.rs b/crates/stark-backend/src/verifier/constraints.rs deleted file mode 100644 index 78e0e279eb..0000000000 --- a/crates/stark-backend/src/verifier/constraints.rs +++ /dev/null @@ -1,140 +0,0 @@ -use std::marker::PhantomData; - -use itertools::Itertools; -use p3_commit::PolynomialSpace; -use p3_field::{AbstractExtensionField, AbstractField, Field}; -use p3_matrix::{dense::RowMajorMatrixView, stack::VerticalPair}; -use tracing::instrument; - -use super::error::VerificationError; -use crate::{ - air_builders::{ - symbolic::symbolic_expression::SymbolicExpression, - verifier::{GenericVerifierConstraintFolder, VerifierConstraintFolder}, - }, - config::{Domain, StarkGenericConfig, Val}, - prover::opener::AdjacentOpenedValues, -}; - -#[allow(clippy::too_many_arguments)] -#[instrument(skip_all, level = "trace")] -pub fn verify_single_rap_constraints( - constraints: &[SymbolicExpression>], - preprocessed_values: Option<&AdjacentOpenedValues>, - partitioned_main_values: Vec<&AdjacentOpenedValues>, - after_challenge_values: Vec<&AdjacentOpenedValues>, - quotient_chunks: &[Vec], - domain: Domain, // trace domain - qc_domains: &[Domain], - zeta: SC::Challenge, - alpha: SC::Challenge, - challenges: &[Vec], - public_values: &[Val], - exposed_values_after_challenge: &[Vec], -) -> Result<(), VerificationError> -where - SC: StarkGenericConfig, -{ - let zps = qc_domains - .iter() - .enumerate() - .map(|(i, domain)| { - qc_domains - .iter() - .enumerate() - .filter(|(j, _)| *j != i) - .map(|(_, other_domain)| { - other_domain.zp_at_point(zeta) - * other_domain.zp_at_point(domain.first_point()).inverse() - }) - .product::() - }) - .collect_vec(); - - let quotient = quotient_chunks - .iter() - .enumerate() - .map(|(ch_i, ch)| { - ch.iter() - .enumerate() - .map(|(e_i, &c)| zps[ch_i] * SC::Challenge::monomial(e_i) * c) - .sum::() - }) - .sum::(); - - let unflatten = |v: &[SC::Challenge]| { - v.chunks_exact(SC::Challenge::D) - .map(|chunk| { - chunk - .iter() - .enumerate() - .map(|(e_i, &c)| SC::Challenge::monomial(e_i) * c) - .sum() - }) - .collect::>() - }; - - let sels = domain.selectors_at_point(zeta); - - let (preprocessed_local, preprocessed_next) = preprocessed_values - .as_ref() - .map(|values| (values.local.as_slice(), values.next.as_slice())) - .unwrap_or((&[], &[])); - let preprocessed = VerticalPair::new( - RowMajorMatrixView::new_row(preprocessed_local), - RowMajorMatrixView::new_row(preprocessed_next), - ); - - let partitioned_main: Vec<_> = partitioned_main_values - .into_iter() - .map(|values| { - VerticalPair::new( - RowMajorMatrixView::new_row(&values.local), - RowMajorMatrixView::new_row(&values.next), - ) - }) - .collect(); - - let after_challenge_ext_values: Vec<_> = after_challenge_values - .into_iter() - .map(|values| { - let [local, next] = [&values.local, &values.next] - .map(|flattened_ext_values| unflatten(flattened_ext_values)); - (local, next) - }) - .collect(); - let after_challenge = after_challenge_ext_values - .iter() - .map(|(local, next)| { - VerticalPair::new( - RowMajorMatrixView::new_row(local), - RowMajorMatrixView::new_row(next), - ) - }) - .collect(); - - let mut folder: VerifierConstraintFolder<'_, SC> = GenericVerifierConstraintFolder { - preprocessed, - partitioned_main, - after_challenge, - is_first_row: sels.is_first_row, - is_last_row: sels.is_last_row, - is_transition: sels.is_transition, - alpha, - accumulator: SC::Challenge::ZERO, - challenges, - public_values, - exposed_values_after_challenge, - _marker: PhantomData, - }; - folder.eval_constraints(constraints); - - let folded_constraints = folder.accumulator; - // Finally, check that - // folded_constraints(zeta) / Z_H(zeta) = quotient(zeta) - if folded_constraints * sels.inv_zeroifier != quotient { - return Err(VerificationError::OodEvaluationMismatch); - } - - Ok(()) -} diff --git a/crates/stark-backend/src/verifier/error.rs b/crates/stark-backend/src/verifier/error.rs deleted file mode 100644 index fb94fbe249..0000000000 --- a/crates/stark-backend/src/verifier/error.rs +++ /dev/null @@ -1,16 +0,0 @@ -use thiserror::Error; - -#[derive(Debug, Error, PartialEq, Eq)] -pub enum VerificationError { - #[error("invalid proof shape")] - InvalidProofShape, - /// An error occurred while verifying the claimed openings. - #[error("invalid opening argument: {0}")] - InvalidOpeningArgument(String), - /// Out-of-domain evaluation mismatch, i.e. `constraints(zeta)` did not match - /// `quotient(zeta) Z_H(zeta)`. - #[error("out-of-domain evaluation mismatch")] - OodEvaluationMismatch, - #[error("challenge phase error")] - ChallengePhaseError, -} diff --git a/crates/stark-backend/src/verifier/mod.rs b/crates/stark-backend/src/verifier/mod.rs deleted file mode 100644 index dd7e330341..0000000000 --- a/crates/stark-backend/src/verifier/mod.rs +++ /dev/null @@ -1,302 +0,0 @@ -use itertools::{izip, Itertools}; -use p3_challenger::{CanObserve, FieldChallenger}; -use p3_commit::{Pcs, PolynomialSpace}; -use p3_field::AbstractField; -use p3_util::log2_strict_usize; -use tracing::instrument; - -use crate::{ - config::{Domain, StarkGenericConfig, Val}, - interaction::RapPhaseSeq, - keygen::{types::MultiStarkVerifyingKey, view::MultiStarkVerifyingKeyView}, - prover::{opener::AdjacentOpenedValues, types::Proof}, - verifier::constraints::verify_single_rap_constraints, -}; - -pub mod constraints; -mod error; - -pub use error::*; - -/// Verifies a partitioned proof of multi-matrix AIRs. -pub struct MultiTraceStarkVerifier<'c, SC: StarkGenericConfig> { - config: &'c SC, -} - -impl<'c, SC: StarkGenericConfig> MultiTraceStarkVerifier<'c, SC> { - pub fn new(config: &'c SC) -> Self { - Self { config } - } - /// Verify collection of InteractiveAIRs and check the permutation - /// cumulative sum is equal to zero across all AIRs. - #[instrument(name = "MultiTraceStarkVerifier::verify", level = "debug", skip_all)] - pub fn verify( - &self, - challenger: &mut SC::Challenger, - mvk: &MultiStarkVerifyingKey, - proof: &Proof, - ) -> Result<(), VerificationError> { - let mvk = mvk.view(&proof.get_air_ids()); - self.verify_raps(challenger, &mvk, proof)?; - Ok(()) - } - - /// Verify general RAPs without checking any relations (e.g., cumulative sum) between exposed values of different RAPs. - /// - /// Public values is a global list shared across all AIRs. - /// - /// - `num_challenges_to_sample[i]` is the number of challenges to sample in the trace challenge phase corresponding to `proof.commitments.after_challenge[i]`. This must have length equal - /// to `proof.commitments.after_challenge`. - #[instrument(level = "debug", skip_all)] - pub fn verify_raps( - &self, - challenger: &mut SC::Challenger, - mvk: &MultiStarkVerifyingKeyView, - proof: &Proof, - ) -> Result<(), VerificationError> { - let public_values = proof.get_public_values(); - // Challenger must observe public values - for pis in &public_values { - challenger.observe_slice(pis); - } - - // TODO: valid shape check from verifying key - for preprocessed_commit in mvk.flattened_preprocessed_commits() { - challenger.observe(preprocessed_commit); - } - - // Observe main trace commitments - challenger.observe_slice(&proof.commitments.main_trace); - challenger.observe_slice( - &proof - .per_air - .iter() - .map(|ap| Val::::from_canonical_usize(log2_strict_usize(ap.degree))) - .collect_vec(), - ); - - // Verification of challenge phase (except openings, which are done next). - let rap_phase = self.config.rap_phase_seq(); - let exposed_values_per_air_per_phase = proof - .per_air - .iter() - .map(|proof| proof.exposed_values_after_challenge.clone()) - .collect_vec(); - let permutation_opened_values = proof - .opening - .values - .after_challenge - .iter() - .map(|after_challenge_per_matrix| { - after_challenge_per_matrix - .iter() - .map(|after_challenge| { - vec![after_challenge.local.clone(), after_challenge.next.clone()] - }) - .collect_vec() - }) - .collect_vec(); - - assert!( - proof.commitments.after_challenge.len() <= 1, - "at most one challenge phase currently supported" - ); - - let (after_challenge_data, rap_phase_seq_result) = rap_phase.partially_verify( - challenger, - proof.rap_phase_seq_proof.as_ref(), - &exposed_values_per_air_per_phase, - &proof.commitments.after_challenge, - &permutation_opened_values, - ); - // We don't want to bail on error yet; `OodEvaluationMismatch` should take precedence over - // `ChallengePhaseError`, but we won't know if the former happens until later. - let rap_phase_seq_result = - rap_phase_seq_result.map_err(|_| VerificationError::ChallengePhaseError); - - // Draw `alpha` challenge - let alpha: SC::Challenge = challenger.sample_ext_element(); - tracing::debug!("alpha: {alpha:?}"); - - // Observe quotient commitments - challenger.observe(proof.commitments.quotient.clone()); - - // Draw `zeta` challenge - let zeta: SC::Challenge = challenger.sample_ext_element(); - tracing::debug!("zeta: {zeta:?}"); - - let pcs = self.config.pcs(); - // Build domains - let (domains, quotient_chunks_domains): (Vec<_>, Vec>) = mvk - .per_air - .iter() - .zip_eq(&proof.per_air) - .map(|(vk, air_proof)| { - let degree = air_proof.degree; - let quotient_degree = vk.quotient_degree; - let domain = pcs.natural_domain_for_degree(degree); - let quotient_domain = domain.create_disjoint_domain(degree * quotient_degree); - let qc_domains = quotient_domain.split_domains(quotient_degree); - (domain, qc_domains) - }) - .unzip(); - // Verify all opening proofs - let opened_values = &proof.opening.values; - let trace_domain_and_openings = - |domain: Domain, - zeta: SC::Challenge, - values: &AdjacentOpenedValues| { - ( - domain, - vec![ - (zeta, values.local.clone()), - (domain.next_point(zeta).unwrap(), values.next.clone()), - ], - ) - }; - // Build the opening rounds - // 1. First the preprocessed trace openings - // Assumption: each AIR with preprocessed trace has its own commitment and opening values - let mut rounds: Vec<_> = mvk - .preprocessed_commits() - .into_iter() - .zip_eq(&domains) - .flat_map(|(commit, domain)| commit.map(|commit| (commit, *domain))) - .zip_eq(&opened_values.preprocessed) - .map(|((commit, domain), values)| { - let domain_and_openings = trace_domain_and_openings(domain, zeta, values); - (commit, vec![domain_and_openings]) - }) - .collect(); - - // 2. Then the main trace openings - - let num_main_commits = opened_values.main.len(); - assert_eq!(num_main_commits, proof.commitments.main_trace.len()); - let mut main_commit_idx = 0; - // All commits except the last one are cached main traces. - izip!(&mvk.per_air, &domains).for_each(|(vk, domain)| { - for _ in 0..vk.num_cached_mains() { - let commit = proof.commitments.main_trace[main_commit_idx].clone(); - let value = &opened_values.main[main_commit_idx][0]; - let domains_and_openings = vec![trace_domain_and_openings(*domain, zeta, value)]; - rounds.push((commit.clone(), domains_and_openings)); - main_commit_idx += 1; - } - }); - // In the last commit, each matrix corresponds to an AIR with a common main trace. - { - let values_per_mat = &opened_values.main[main_commit_idx]; - let commit = proof.commitments.main_trace[main_commit_idx].clone(); - let domains_and_openings = mvk - .per_air - .iter() - .zip_eq(&domains) - .filter_map(|(vk, domain)| vk.has_common_main().then_some(*domain)) - .zip_eq(values_per_mat) - .map(|(domain, values)| trace_domain_and_openings(domain, zeta, values)) - .collect_vec(); - rounds.push((commit.clone(), domains_and_openings)); - } - - // 3. Then after_challenge trace openings, at most 1 phase for now. - // All AIRs with interactions should an after challenge trace. - let after_challenge_domain_per_air = mvk - .per_air - .iter() - .zip_eq(&domains) - .filter_map(|(vk, domain)| vk.has_interaction().then_some(*domain)) - .collect_vec(); - if after_challenge_domain_per_air.is_empty() { - assert_eq!(proof.commitments.after_challenge.len(), 0); - assert_eq!(opened_values.after_challenge.len(), 0); - } else { - let after_challenge_commit = proof.commitments.after_challenge[0].clone(); - let domains_and_openings = after_challenge_domain_per_air - .into_iter() - .zip_eq(&opened_values.after_challenge[0]) - .map(|(domain, values)| trace_domain_and_openings(domain, zeta, values)) - .collect_vec(); - rounds.push((after_challenge_commit, domains_and_openings)); - } - - let quotient_domains_and_openings = opened_values - .quotient - .iter() - .zip_eq("ient_chunks_domains) - .flat_map(|(chunk, quotient_chunks_domains_per_air)| { - chunk - .iter() - .zip_eq(quotient_chunks_domains_per_air) - .map(|(values, &domain)| (domain, vec![(zeta, values.clone())])) - }) - .collect_vec(); - rounds.push(( - proof.commitments.quotient.clone(), - quotient_domains_and_openings, - )); - - pcs.verify(rounds, &proof.opening.proof, challenger) - .map_err(|e| VerificationError::InvalidOpeningArgument(format!("{:?}", e)))?; - - let mut preprocessed_idx = 0usize; // preprocessed commit idx - let num_phases = mvk.num_phases(); - let mut after_challenge_idx = vec![0usize; num_phases]; - let mut cached_main_commit_idx = 0; - let mut common_main_matrix_idx = 0; - - // Verify each RAP's constraints - for (domain, qc_domains, quotient_chunks, vk, air_proof) in izip!( - domains, - quotient_chunks_domains, - &opened_values.quotient, - &mvk.per_air, - &proof.per_air - ) { - let preprocessed_values = vk.preprocessed_data.as_ref().map(|_| { - let values = &opened_values.preprocessed[preprocessed_idx]; - preprocessed_idx += 1; - values - }); - let mut partitioned_main_values = Vec::with_capacity(vk.num_cached_mains()); - for _ in 0..vk.num_cached_mains() { - partitioned_main_values.push(&opened_values.main[cached_main_commit_idx][0]); - cached_main_commit_idx += 1; - } - if vk.has_common_main() { - partitioned_main_values - .push(&opened_values.main.last().unwrap()[common_main_matrix_idx]); - common_main_matrix_idx += 1; - } - // loop through challenge phases of this single RAP - let after_challenge_values = if vk.has_interaction() { - (0..num_phases) - .map(|phase_idx| { - let matrix_idx = after_challenge_idx[phase_idx]; - after_challenge_idx[phase_idx] += 1; - &opened_values.after_challenge[phase_idx][matrix_idx] - }) - .collect_vec() - } else { - vec![] - }; - verify_single_rap_constraints::( - &vk.symbolic_constraints.constraints, - preprocessed_values, - partitioned_main_values, - after_challenge_values, - quotient_chunks, - domain, - &qc_domains, - zeta, - alpha, - &after_challenge_data.challenges_per_phase, - &air_proof.public_values, - &air_proof.exposed_values_after_challenge, - )?; - } - - // If we made it this far, use the `rap_phase_result` as the final result. - rap_phase_seq_result - } -} diff --git a/crates/stark-backend/tests/cached_lookup/instrumented.rs b/crates/stark-backend/tests/cached_lookup/instrumented.rs deleted file mode 100644 index 1ee3bf3d03..0000000000 --- a/crates/stark-backend/tests/cached_lookup/instrumented.rs +++ /dev/null @@ -1,148 +0,0 @@ -use std::fs::{self, File}; - -use openvm_stark_backend::{ - config::StarkGenericConfig, keygen::types::MultiStarkVerifyingKey, prover::types::Proof, - verifier::VerificationError, -}; -use openvm_stark_sdk::{ - config::{ - baby_bear_poseidon2::{self, engine_from_perm}, - fri_params::standard_fri_params_with_100_bits_conjectured_security, - }, - dummy_airs::interaction::dummy_interaction_air::DummyInteractionAir, - engine::StarkEngineWithHashInstrumentation, -}; -use p3_util::log2_ceil_usize; -use rand::{rngs::StdRng, SeedableRng}; -use serde::{Deserialize, Serialize}; - -use super::prove::{get_data_sizes, prove, BenchParams}; -use crate::{ - cached_lookup::prove::generate_random_trace, - config::{ - instrument::{HashStatistics, StarkHashStatistics}, - FriParameters, - }, -}; - -fn instrumented_verify>( - engine: &mut E, - vk: MultiStarkVerifyingKey, - air: &DummyInteractionAir, - proof: Proof, -) -> StarkHashStatistics { - let degree = proof.per_air[0].degree; - let log_degree = log2_ceil_usize(degree); - - engine.clear_instruments(); - let mut challenger = engine.new_challenger(); - let verifier = engine.verifier(); - // Do not check cumulative sum - let res = verifier.verify(&mut challenger, &vk, &proof); - if matches!(res, Err(ref err) if err != &VerificationError::ChallengePhaseError) { - panic!("{res:?}"); - }; - - let bench_params = BenchParams { - field_width: air.field_width(), - log_degree, - }; - engine.stark_hash_statistics(bench_params) -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct VerifierStatistics { - /// Identifier for the hash permutation - pub name: String, - pub fri_params: FriParameters, - pub bench_params: BenchParams, - pub without_ct: HashStatistics, - pub with_ct: HashStatistics, -} - -fn instrumented_prove_and_verify( - fri_params: FriParameters, - trace: Vec<(u32, Vec)>, - partition: bool, -) -> StarkHashStatistics { - let instr_perm = baby_bear_poseidon2::random_instrumented_perm(); - let mut engine = engine_from_perm(instr_perm, fri_params); - engine.perm.is_on = false; - - let (vk, air, proof, _) = prove(&engine, trace, partition); - engine.perm.is_on = true; - instrumented_verify(&mut engine, vk, &air, proof) -} - -fn instrumented_verifier_comparison( - fri_params: FriParameters, - field_width: usize, - log_degree: usize, -) -> VerifierStatistics { - let rng = StdRng::seed_from_u64(0); - let trace = generate_random_trace(rng, field_width, 1 << log_degree); - println!("Without cached trace:"); - let without_ct = instrumented_prove_and_verify(fri_params, trace.clone(), false); - - println!("With cached trace:"); - let with_ct = instrumented_prove_and_verify(fri_params, trace, true); - - VerifierStatistics { - name: without_ct.name, - fri_params: without_ct.fri_params, - bench_params: without_ct.custom, - without_ct: without_ct.stats, - with_ct: with_ct.stats, - } -} - -// Run with `RUSTFLAGS="-Ctarget-cpu=native" cargo t --release -- --ignored --nocapture instrument_cached_trace_verifier` -#[test] -#[ignore = "bench"] -fn instrument_cached_trace_verifier() -> eyre::Result<()> { - let fri_params = [1, 2, 3, 4] - .map(standard_fri_params_with_100_bits_conjectured_security) - .to_vec(); - let data_sizes = get_data_sizes(); - - // Write to csv as we go - let cargo_manifest_dir = env!("CARGO_MANIFEST_DIR"); - let _ = fs::create_dir_all(format!("{}/data", cargo_manifest_dir)); - let csv_path = format!( - "{}/data/cached_trace_instrumented_verifier.csv", - cargo_manifest_dir - ); - let mut wtr = csv::WriterBuilder::new() - .has_headers(false) - .from_path(csv_path)?; - // Manually write record because header cannot handle nested struct well - wtr.write_record([ - "permutation_name", - "log_blowup", - "num_queries", - "proof_of_work_bits", - "page_width", - "log_degree", - "without_ct.permutations", - "with_ct.permutations", - ])?; - - let mut all_stats = vec![]; - for fri_param in fri_params { - for (field_width, log_degree) in &data_sizes { - let stats = instrumented_verifier_comparison(fri_param, *field_width, *log_degree); - wtr.serialize(&stats)?; - wtr.flush()?; - all_stats.push(stats); - } - } - - let json_path = format!( - "{}/data/cached_trace_instrumented_verifier.json", - cargo_manifest_dir - ); - let file = File::create(json_path)?; - serde_json::to_writer(file, &all_stats)?; - - Ok(()) -} diff --git a/crates/stark-backend/tests/cached_lookup/mod.rs b/crates/stark-backend/tests/cached_lookup/mod.rs deleted file mode 100644 index 2dec1ae6aa..0000000000 --- a/crates/stark-backend/tests/cached_lookup/mod.rs +++ /dev/null @@ -1,124 +0,0 @@ -use openvm_stark_backend::{ - config::StarkGenericConfig, engine::StarkEngine, prover::USE_DEBUG_BUILDER, - verifier::VerificationError, Chip, -}; -use openvm_stark_sdk::{ - config::{baby_bear_poseidon2::BabyBearPoseidon2Engine, FriParameters}, - dummy_airs::interaction::dummy_interaction_air::{DummyInteractionChip, DummyInteractionData}, - engine::StarkFriEngine, -}; - -mod instrumented; -pub mod prove; - -// Lookup table is cached, everything else (including counts) is committed together -pub fn prove_and_verify_indexless_lookups( - sender: Vec<(u32, Vec)>, - receiver: Vec<(u32, Vec)>, -) -> Result<(), VerificationError> { - let engine = BabyBearPoseidon2Engine::new(FriParameters::standard_fast()); - - let mut sender_chip = DummyInteractionChip::new_without_partition(sender[0].1.len(), true, 0); - let mut receiver_chip = DummyInteractionChip::new_with_partition( - engine.config().pcs(), - receiver[0].1.len(), - false, - 0, - ); - { - let (count, fields): (Vec<_>, Vec<_>) = sender.into_iter().unzip(); - sender_chip.load_data(DummyInteractionData { count, fields }); - } - { - let (count, fields): (Vec<_>, Vec<_>) = receiver.into_iter().unzip(); - receiver_chip.load_data(DummyInteractionData { count, fields }); - } - engine - .run_test(vec![ - receiver_chip.generate_air_proof_input(), - sender_chip.generate_air_proof_input(), - ]) - .map(|_| ()) -} - -/// tests for cached_lookup -#[test] -fn test_interaction_cached_trace_happy_path() { - // count fields - // 0 1 1 - // 7 4 2 - // 3 5 1 - // 546 889 4 - let sender = vec![ - (0, vec![1, 1]), - (7, vec![4, 2]), - (3, vec![5, 1]), - (546, vec![889, 4]), - ]; - - // count fields - // 1 5 1 - // 3 4 2 - // 4 4 2 - // 2 5 1 - // 0 123 3 - // 545 889 4 - // 1 889 4 - // 0 456 5 - let receiver = vec![ - (1, vec![5, 1]), - (3, vec![4, 2]), - (4, vec![4, 2]), - (2, vec![5, 1]), - (0, vec![123, 3]), - (545, vec![889, 4]), - (1, vec![889, 4]), - (0, vec![456, 5]), - ]; - - prove_and_verify_indexless_lookups(sender, receiver).expect("Verification failed"); -} - -#[test] -fn test_interaction_cached_trace_neg() { - // count fields - // 0 1 1 - // 7 4 2 - // 3 5 1 - // 546 889 4 - let sender = vec![ - (0, vec![1, 1]), - (7, vec![4, 2]), - (3, vec![5, 1]), - (546, vec![889, 4]), - ]; - - // field [889, 4] has count 545 != 546 in sender - // count fields - // 1 5 1 - // 3 4 2 - // 4 4 2 - // 2 5 1 - // 0 123 3 - // 545 889 4 - // 1 889 10 - // 0 456 5 - let receiver = vec![ - (1, vec![5, 1]), - (3, vec![4, 2]), - (4, vec![4, 2]), - (2, vec![5, 1]), - (0, vec![123, 3]), - (545, vec![889, 4]), - (1, vec![889, 10]), - (0, vec![456, 5]), - ]; - - USE_DEBUG_BUILDER.with(|debug| { - *debug.lock().unwrap() = false; - }); - assert_eq!( - prove_and_verify_indexless_lookups(sender, receiver).err(), - Some(VerificationError::ChallengePhaseError) - ); -} diff --git a/crates/stark-backend/tests/cached_lookup/prove.rs b/crates/stark-backend/tests/cached_lookup/prove.rs deleted file mode 100644 index 04e53a3fb1..0000000000 --- a/crates/stark-backend/tests/cached_lookup/prove.rs +++ /dev/null @@ -1,206 +0,0 @@ -use std::{ - fs::{self, File}, - sync::Arc, - time::Instant, -}; - -use openvm_stark_backend::{ - config::StarkGenericConfig, - keygen::types::MultiStarkVerifyingKey, - prover::types::{Proof, ProofInput}, - utils::disable_debug_builder, - Chip, -}; -use openvm_stark_sdk::{ - config::{ - baby_bear_poseidon2::{engine_from_perm, random_perm}, - fri_params::standard_fri_params_with_100_bits_conjectured_security, - FriParameters, - }, - dummy_airs::interaction::dummy_interaction_air::{ - DummyInteractionAir, DummyInteractionChip, DummyInteractionData, - }, - engine::StarkEngine, -}; -use rand::{rngs::StdRng, Rng, SeedableRng}; -use serde::{Deserialize, Serialize}; - -// Lookup table is cached, everything else (including counts) is committed together -#[allow(clippy::type_complexity)] -pub fn prove>( - engine: &E, - trace: Vec<(u32, Vec)>, - partition: bool, -) -> ( - MultiStarkVerifyingKey, - Arc, - Proof, - ProverBenchmarks, -) { - let mut chip = - DummyInteractionChip::new_with_partition(engine.config().pcs(), trace[0].1.len(), false, 0); - let (count, fields): (Vec<_>, Vec<_>) = trace.into_iter().unzip(); - let data = DummyInteractionData { count, fields }; - chip.load_data(data); - - let mut keygen_builder = engine.keygen_builder(); - let air_id = keygen_builder.add_air(chip.air()); - let pk = keygen_builder.generate_pk(); - let vk = pk.get_vk(); - - let mut benchmarks = ProverBenchmarks::default(); - let prover = engine.prover(); - let air = Arc::new(chip.air); - // Must add trace matrices in the same order as above - let mut start; - let air_proof_input = if partition { - start = Instant::now(); - // Receiver fields table is cached - let ret = chip.generate_air_proof_input_with_id(air_id); - benchmarks.cached_commit_time = start.elapsed().as_micros(); - ret - } else { - chip.generate_air_proof_input_with_id(air_id) - }; - let proof_input = ProofInput { - per_air: vec![air_proof_input], - }; - start = Instant::now(); - - // Disable debug prover since we don't balance the buses - disable_debug_builder(); - let mut challenger = engine.new_challenger(); - let proof = prover.prove(&mut challenger, &pk, proof_input); - benchmarks.prove_time_without_trace_gen = start.elapsed().as_micros(); - - (vk, air, proof, benchmarks) -} - -#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize)] -pub struct ProverBenchmarks { - pub cached_commit_time: u128, - /// Includes common main trace commitment time. - pub prove_time_without_trace_gen: u128, -} - -#[derive(Clone, Copy, Debug, Serialize, Deserialize)] -pub struct BenchParams { - pub field_width: usize, - pub log_degree: usize, -} - -pub fn generate_random_trace( - mut rng: impl Rng, - field_width: usize, - height: usize, -) -> Vec<(u32, Vec)> { - (0..height) - .map(|_| { - ( - rng.gen_range(0..1000), - (0..field_width).map(|_| rng.gen()).collect(), - ) - }) - .collect() -} - -pub fn get_data_sizes() -> Vec<(usize, usize)> { - let format_data_sizes = - |field_widths: &[usize], log_degrees: &[usize]| -> Vec<(usize, usize)> { - field_widths - .iter() - .flat_map(|field_width| { - log_degrees - .iter() - .map(|log_degree| (*field_width, *log_degree)) - }) - .collect::>() - }; - let mut data_sizes: Vec<(usize, usize)> = - format_data_sizes(&[1, 2, 5, 10, 50, 100], &[3, 5, 10, 13, 15, 16, 18, 20]); - data_sizes.extend(format_data_sizes(&[200, 500, 1000], &[1, 2, 3, 5, 10])); - data_sizes -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct ProverStatistics { - /// Identifier for the hash permutation - pub name: String, - pub fri_params: FriParameters, - pub bench_params: BenchParams, - pub without_ct: ProverBenchmarks, - pub with_ct: ProverBenchmarks, -} - -fn compare_provers( - fri_params: FriParameters, - field_width: usize, - log_degree: usize, -) -> ProverStatistics { - let rng = StdRng::seed_from_u64(0); - let trace = generate_random_trace(rng, field_width, 1 << log_degree); - let engine = engine_from_perm(random_perm(), fri_params); - let (_, _, _, without_ct) = prove(&engine, trace.clone(), false); - - let (_, _, _, with_ct) = prove(&engine, trace, true); - - ProverStatistics { - name: "Poseidon2Perm16".to_string(), - fri_params, - bench_params: BenchParams { - field_width, - log_degree, - }, - without_ct, - with_ct, - } -} - -// Run with `RUSTFLAGS="-Ctarget-cpu=native" cargo t --release -- --ignored --nocapture bench_cached_trace_prover` -#[test] -#[ignore = "bench"] -fn bench_cached_trace_prover() -> eyre::Result<()> { - let fri_params = [1, 2, 3, 4] - .map(standard_fri_params_with_100_bits_conjectured_security) - .to_vec(); - let data_sizes = get_data_sizes(); - - // Write to csv as we go - let cargo_manifest_dir = env!("CARGO_MANIFEST_DIR"); - let _ = fs::create_dir_all(format!("{}/data", cargo_manifest_dir)); - let csv_path = format!("{}/data/cached_trace_prover.csv", cargo_manifest_dir); - let mut wtr = csv::WriterBuilder::new() - .has_headers(false) - .from_path(csv_path)?; - // Manually write record because header cannot handle nested struct well - wtr.write_record([ - "permutation_name", - "log_blowup", - "num_queries", - "proof_of_work_bits", - "page_width", - "log_degree", - "", - "without_ct.main_commit_time(µs)", - "without_ct.prove_time(µs)", - "with_ct.cache_commit_time(µs)", - "with_ct.main_commit_time(µs)", - "with_ct.prove_time(µs)", - ])?; - - let mut all_stats = vec![]; - for fri_param in fri_params { - for (field_width, log_degree) in &data_sizes { - let stats = compare_provers(fri_param, *field_width, *log_degree); - wtr.serialize(&stats)?; - wtr.flush()?; - all_stats.push(stats); - } - } - - let json_path = format!("{}/data/cached_trace_prover.json", cargo_manifest_dir); - let file = File::create(json_path)?; - serde_json::to_writer(file, &all_stats)?; - - Ok(()) -} diff --git a/crates/stark-backend/tests/fib_selector_air/air.rs b/crates/stark-backend/tests/fib_selector_air/air.rs deleted file mode 100644 index 7e796f6da1..0000000000 --- a/crates/stark-backend/tests/fib_selector_air/air.rs +++ /dev/null @@ -1,100 +0,0 @@ -use std::borrow::Borrow; - -use openvm_stark_backend::{ - interaction::InteractionBuilder, - p3_field::{AbstractField, Field}, - rap::{BaseAirWithPublicValues, PartitionedBaseAir}, -}; -use openvm_stark_sdk::dummy_airs::fib_air::columns::{FibonacciCols, NUM_FIBONACCI_COLS}; -use p3_air::{Air, AirBuilder, AirBuilderWithPublicValues, BaseAir, PairBuilder}; -use p3_matrix::{dense::RowMajorMatrix, Matrix}; - -use super::columns::FibonacciSelectorCols; - -pub struct FibonacciSelectorAir { - sels: Vec, - enable_interactions: bool, -} - -impl FibonacciSelectorAir { - pub fn new(sels: Vec, enable_interactions: bool) -> Self { - Self { - sels, - enable_interactions, - } - } - - pub fn sels(&self) -> &[bool] { - &self.sels - } -} - -impl PartitionedBaseAir for FibonacciSelectorAir {} -impl BaseAir for FibonacciSelectorAir { - fn width(&self) -> usize { - NUM_FIBONACCI_COLS - } - - fn preprocessed_trace(&self) -> Option> { - let sels = self.sels.iter().map(|&s| F::from_bool(s)).collect(); - Some(RowMajorMatrix::new_col(sels)) - } -} - -impl BaseAirWithPublicValues for FibonacciSelectorAir { - fn num_public_values(&self) -> usize { - 3 - } -} - -impl Air - for FibonacciSelectorAir -{ - fn eval(&self, builder: &mut AB) { - let pis = builder.public_values(); - let preprocessed = builder.preprocessed(); - let main = builder.main(); - - let a = pis[0]; - let b = pis[1]; - let x = pis[2]; - - let preprocessed_local = preprocessed.row_slice(0); - let preprocessed_local: &FibonacciSelectorCols = (*preprocessed_local).borrow(); - - let (local, next) = (main.row_slice(0), main.row_slice(1)); - let local: &FibonacciCols = (*local).borrow(); - let next: &FibonacciCols = (*next).borrow(); - - let mut when_first_row = builder.when_first_row(); - - when_first_row.assert_eq(local.left, a); - when_first_row.assert_eq(local.right, b); - - // a' <- sel*b + (1 - sel)*a - builder - .when_transition() - .when(preprocessed_local.sel) - .assert_eq(local.right, next.left); - builder - .when_transition() - .when_ne(preprocessed_local.sel, AB::Expr::ONE) - .assert_eq(local.left, next.left); - - // b' <- sel*(a + b) + (1 - sel)*b - builder - .when_transition() - .when(preprocessed_local.sel) - .assert_eq(local.left + local.right, next.right); - builder - .when_transition() - .when_ne(preprocessed_local.sel, AB::Expr::ONE) - .assert_eq(local.right, next.right); - - builder.when_last_row().assert_eq(local.right, x); - - if self.enable_interactions { - builder.push_receive(0, vec![local.left + local.right], preprocessed_local.sel); - } - } -} diff --git a/crates/stark-backend/tests/fib_selector_air/columns.rs b/crates/stark-backend/tests/fib_selector_air/columns.rs deleted file mode 100644 index 0076440bed..0000000000 --- a/crates/stark-backend/tests/fib_selector_air/columns.rs +++ /dev/null @@ -1,7 +0,0 @@ -use openvm_circuit_primitives_derive::AlignedBorrow; - -#[repr(C)] -#[derive(AlignedBorrow)] -pub struct FibonacciSelectorCols { - pub sel: F, -} diff --git a/crates/stark-backend/tests/fib_selector_air/mod.rs b/crates/stark-backend/tests/fib_selector_air/mod.rs deleted file mode 100644 index 47ab3e66b4..0000000000 --- a/crates/stark-backend/tests/fib_selector_air/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -pub mod air; -pub mod columns; -pub mod trace; diff --git a/crates/stark-backend/tests/fib_selector_air/trace.rs b/crates/stark-backend/tests/fib_selector_air/trace.rs deleted file mode 100644 index 18fbcfc699..0000000000 --- a/crates/stark-backend/tests/fib_selector_air/trace.rs +++ /dev/null @@ -1,21 +0,0 @@ -use openvm_stark_backend::p3_field::PrimeField32; -use openvm_stark_sdk::dummy_airs::fib_air::columns::NUM_FIBONACCI_COLS; -use p3_matrix::dense::RowMajorMatrix; - -/// sels contain boolean selectors to enable the fibonacci gate -pub fn generate_trace_rows(a: u32, b: u32, sels: &[bool]) -> RowMajorMatrix { - let n = sels.len(); - assert!(n.is_power_of_two()); - - let mut rows = vec![vec![F::from_canonical_u32(a), F::from_canonical_u32(b)]]; - - for i in 1..n { - if sels[i - 1] { - rows.push(vec![rows[i - 1][1], rows[i - 1][0] + rows[i - 1][1]]); - } else { - rows.push(vec![rows[i - 1][0], rows[i - 1][1]]); - } - } - - RowMajorMatrix::new(rows.concat(), NUM_FIBONACCI_COLS) -} diff --git a/crates/stark-backend/tests/fib_triples_air/air.rs b/crates/stark-backend/tests/fib_triples_air/air.rs deleted file mode 100644 index ae82144f99..0000000000 --- a/crates/stark-backend/tests/fib_triples_air/air.rs +++ /dev/null @@ -1,56 +0,0 @@ -use std::borrow::Borrow; - -use openvm_stark_backend::rap::{BaseAirWithPublicValues, PartitionedBaseAir}; -use p3_air::{Air, AirBuilder, AirBuilderWithPublicValues, BaseAir}; -use p3_matrix::Matrix; - -use super::columns::{FibonacciCols, NUM_FIBONACCI_COLS}; - -pub struct FibonacciAir; - -impl PartitionedBaseAir for FibonacciAir {} -impl BaseAir for FibonacciAir { - fn width(&self) -> usize { - NUM_FIBONACCI_COLS - } -} - -impl BaseAirWithPublicValues for FibonacciAir { - fn num_public_values(&self) -> usize { - 3 - } -} - -impl Air for FibonacciAir { - fn eval(&self, builder: &mut AB) { - let main = builder.main(); - let pis = builder.public_values(); - - let a = pis[0]; - let b = pis[1]; - let x = pis[2]; - - let (local, next) = (main.row_slice(0), main.row_slice(1)); - let local: &FibonacciCols = (*local).borrow(); - let next: &FibonacciCols = (*next).borrow(); - - let mut when_first_row = builder.when_first_row(); - - when_first_row.assert_eq(local.left, a); - when_first_row.assert_eq(local.middle, b); - when_first_row.assert_eq(local.right, local.left + local.middle); - - let mut when_transition = builder.when_transition(); - - // a' <- b - when_transition.assert_eq(local.middle, next.left); - - // b' <- c - when_transition.assert_eq(local.right, next.middle); - - // c' <- b + c - when_transition.assert_eq(local.middle + local.right, next.right); - - builder.when_last_row().assert_eq(local.right, x); - } -} diff --git a/crates/stark-backend/tests/fib_triples_air/columns.rs b/crates/stark-backend/tests/fib_triples_air/columns.rs deleted file mode 100644 index 9d3bc6f08e..0000000000 --- a/crates/stark-backend/tests/fib_triples_air/columns.rs +++ /dev/null @@ -1,21 +0,0 @@ -use openvm_circuit_primitives_derive::AlignedBorrow; - -pub const NUM_FIBONACCI_COLS: usize = 3; - -#[repr(C)] -#[derive(AlignedBorrow)] -pub struct FibonacciCols { - pub left: F, - pub middle: F, - pub right: F, -} - -impl FibonacciCols { - pub const fn new(left: F, middle: F, right: F) -> FibonacciCols { - FibonacciCols { - left, - middle, - right, - } - } -} diff --git a/crates/stark-backend/tests/fib_triples_air/mod.rs b/crates/stark-backend/tests/fib_triples_air/mod.rs deleted file mode 100644 index 47ab3e66b4..0000000000 --- a/crates/stark-backend/tests/fib_triples_air/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -pub mod air; -pub mod columns; -pub mod trace; diff --git a/crates/stark-backend/tests/fib_triples_air/trace.rs b/crates/stark-backend/tests/fib_triples_air/trace.rs deleted file mode 100644 index ff1ee75775..0000000000 --- a/crates/stark-backend/tests/fib_triples_air/trace.rs +++ /dev/null @@ -1,27 +0,0 @@ -use openvm_stark_backend::p3_field::PrimeField32; -use p3_matrix::dense::RowMajorMatrix; - -use crate::fib_triples_air::columns::NUM_FIBONACCI_COLS; - -/// n is number of rows in the trace -pub fn generate_trace_rows(a: u32, b: u32, n: usize) -> RowMajorMatrix { - assert!(n.is_power_of_two()); - - let c = a + b; - - let mut rows = vec![vec![ - F::from_canonical_u32(a), - F::from_canonical_u32(b), - F::from_canonical_u32(c), - ]]; - - for i in 1..n { - rows.push(vec![ - rows[i - 1][1], - rows[i - 1][2], - rows[i - 1][1] + rows[i - 1][2], - ]); - } - - RowMajorMatrix::new(rows.concat(), NUM_FIBONACCI_COLS) -} diff --git a/crates/stark-backend/tests/integration_test.rs b/crates/stark-backend/tests/integration_test.rs deleted file mode 100644 index 717a238453..0000000000 --- a/crates/stark-backend/tests/integration_test.rs +++ /dev/null @@ -1,262 +0,0 @@ -#![feature(trait_upcasting)] -#![allow(incomplete_features)] - -use openvm_stark_backend::{ - config::StarkGenericConfig, p3_field::AbstractField, utils::disable_debug_builder, Chip, -}; -/// Test utils -use openvm_stark_sdk::{ - any_rap_arc_vec, config, - config::{baby_bear_poseidon2::BabyBearPoseidon2Engine, FriParameters}, - dummy_airs::{ - fib_air::chip::FibonacciChip, - interaction::dummy_interaction_air::{DummyInteractionChip, DummyInteractionData}, - }, - engine::StarkFriEngine, - utils, -}; -use p3_baby_bear::BabyBear; - -mod cached_lookup; -mod fib_selector_air; -mod fib_triples_air; -pub mod interaction; -mod partitioned_sum_air; - -#[test] -fn test_single_fib_stark() { - use openvm_stark_sdk::dummy_airs::fib_air::{air::FibonacciAir, trace::generate_trace_rows}; - - let log_trace_degree = 3; - - // Public inputs: - let a = 0u32; - let b = 1u32; - let n = 1usize << log_trace_degree; - - type Val = BabyBear; - let pis = [a, b, get_fib_number(n)] - .map(BabyBear::from_canonical_u32) - .to_vec(); - let air = FibonacciAir; - - let trace = generate_trace_rows::(a, b, n); - - BabyBearPoseidon2Engine::run_simple_test_fast(any_rap_arc_vec![air], vec![trace], vec![pis]) - .expect("Verification failed"); -} - -#[test] -fn test_single_fib_triples_stark() { - use fib_triples_air::{air::FibonacciAir, trace::generate_trace_rows}; - - let log_trace_degree = 3; - - // Public inputs: - let a = 0u32; - let b = 1u32; - let n = 1usize << log_trace_degree; - - type Val = BabyBear; - let pis = [a, b, get_fib_number(n + 1)] - .map(BabyBear::from_canonical_u32) - .to_vec(); - - let air = FibonacciAir; - - let trace = generate_trace_rows::(a, b, n); - - BabyBearPoseidon2Engine::run_simple_test_fast(any_rap_arc_vec![air], vec![trace], vec![pis]) - .expect("Verification failed"); -} - -#[test] -fn test_single_fib_selector_stark() { - use fib_selector_air::{air::FibonacciSelectorAir, trace::generate_trace_rows}; - - let log_trace_degree = 3; - - // Public inputs: - let a = 0u32; - let b = 1u32; - let n = 1usize << log_trace_degree; - - type Val = BabyBear; - let sels: Vec = (0..n).map(|i| i % 2 == 0).collect(); - let pis = [a, b, get_conditional_fib_number(&sels)] - .map(BabyBear::from_canonical_u32) - .to_vec(); - - let air = FibonacciSelectorAir::new(sels, false); - - let trace = generate_trace_rows::(a, b, air.sels()); - - BabyBearPoseidon2Engine::run_simple_test_fast(any_rap_arc_vec![air], vec![trace], vec![pis]) - .expect("Verification failed"); -} - -#[test] -fn test_double_fib_starks() { - use fib_selector_air::air::FibonacciSelectorAir; - use openvm_stark_sdk::dummy_airs::{fib_air, fib_air::air::FibonacciAir}; - - let log_n1 = 3; - let log_n2 = 5; - - // Public inputs: - let a = 0u32; - let b = 1u32; - let n1 = 1usize << log_n1; - let n2 = 1usize << log_n2; - - type Val = BabyBear; - let sels: Vec = (0..n2).map(|i| i % 2 == 0).collect(); // Evens - let pis1 = [a, b, get_fib_number(n1)] - .map(BabyBear::from_canonical_u32) - .to_vec(); - let pis2 = [a, b, get_conditional_fib_number(&sels)] - .map(BabyBear::from_canonical_u32) - .to_vec(); - - let air1 = FibonacciAir; - let air2 = FibonacciSelectorAir::new(sels, false); - - let trace1 = fib_air::trace::generate_trace_rows::(a, b, n1); - let trace2 = fib_selector_air::trace::generate_trace_rows::(a, b, air2.sels()); - - BabyBearPoseidon2Engine::run_simple_test_fast( - any_rap_arc_vec![air1, air2], - vec![trace1, trace2], - vec![pis1, pis2], - ) - .expect("Verification failed"); -} - -#[test] -fn test_optional_air() { - use openvm_stark_backend::{engine::StarkEngine, prover::types::ProofInput}; - - let engine = BabyBearPoseidon2Engine::new(FriParameters::standard_fast()); - let fib_chip = FibonacciChip::new(0, 1, 8); - let send_chip1 = DummyInteractionChip::new_without_partition(1, true, 0); - let send_chip2 = DummyInteractionChip::new_with_partition(engine.config().pcs(), 1, true, 0); - let recv_chip1 = DummyInteractionChip::new_without_partition(1, false, 0); - let mut keygen_builder = engine.keygen_builder(); - let fib_chip_id = keygen_builder.add_air(fib_chip.air()); - let send_chip1_id = keygen_builder.add_air(send_chip1.air()); - let send_chip2_id = keygen_builder.add_air(send_chip2.air()); - let recv_chip1_id = keygen_builder.add_air(recv_chip1.air()); - let pk = keygen_builder.generate_pk(); - let prover = engine.prover(); - let verifier = engine.verifier(); - - // Case 1: All AIRs are present. - { - let fib_chip = fib_chip.clone(); - let mut send_chip1 = send_chip1.clone(); - let mut send_chip2 = send_chip2.clone(); - let mut recv_chip1 = recv_chip1.clone(); - let mut challenger = engine.new_challenger(); - send_chip1.load_data(DummyInteractionData { - count: vec![1, 2, 4], - fields: vec![vec![1], vec![2], vec![3]], - }); - send_chip2.load_data(DummyInteractionData { - count: vec![1, 2, 8], - fields: vec![vec![1], vec![2], vec![3]], - }); - recv_chip1.load_data(DummyInteractionData { - count: vec![2, 4, 12], - fields: vec![vec![1], vec![2], vec![3]], - }); - let proof = prover.prove( - &mut challenger, - &pk, - ProofInput { - per_air: vec![ - fib_chip.generate_air_proof_input_with_id(fib_chip_id), - send_chip1.generate_air_proof_input_with_id(send_chip1_id), - send_chip2.generate_air_proof_input_with_id(send_chip2_id), - recv_chip1.generate_air_proof_input_with_id(recv_chip1_id), - ], - }, - ); - let mut challenger = engine.new_challenger(); - verifier - .verify(&mut challenger, &pk.get_vk(), &proof) - .expect("Verification failed"); - } - // Case 2: The second AIR is not presented. - { - let mut send_chip1 = send_chip1.clone(); - let mut recv_chip1 = recv_chip1.clone(); - let mut challenger = engine.new_challenger(); - send_chip1.load_data(DummyInteractionData { - count: vec![1, 2, 4], - fields: vec![vec![1], vec![2], vec![3]], - }); - recv_chip1.load_data(DummyInteractionData { - count: vec![1, 2, 4], - fields: vec![vec![1], vec![2], vec![3]], - }); - let proof = prover.prove( - &mut challenger, - &pk, - ProofInput { - per_air: vec![ - send_chip1.generate_air_proof_input_with_id(send_chip1_id), - recv_chip1.generate_air_proof_input_with_id(recv_chip1_id), - ], - }, - ); - let mut challenger = engine.new_challenger(); - verifier - .verify(&mut challenger, &pk.get_vk(), &proof) - .expect("Verification failed"); - } - // Case 3: Negative - unbalanced interactions. - { - disable_debug_builder(); - let mut recv_chip1 = recv_chip1.clone(); - let mut challenger = engine.new_challenger(); - recv_chip1.load_data(DummyInteractionData { - count: vec![1, 2, 4], - fields: vec![vec![1], vec![2], vec![3]], - }); - let proof = prover.prove( - &mut challenger, - &pk, - ProofInput { - per_air: vec![recv_chip1.generate_air_proof_input_with_id(recv_chip1_id)], - }, - ); - let mut challenger = engine.new_challenger(); - assert!(verifier - .verify(&mut challenger, &pk.get_vk(), &proof) - .is_err()); - } -} - -fn get_fib_number(n: usize) -> u32 { - let mut a = 0; - let mut b = 1; - for _ in 0..n - 1 { - let c = a + b; - a = b; - b = c; - } - b -} - -fn get_conditional_fib_number(sels: &[bool]) -> u32 { - let mut a = 0; - let mut b = 1; - for &s in sels[0..sels.len() - 1].iter() { - if s { - let c = a + b; - a = b; - b = c; - } - } - b -} diff --git a/crates/stark-backend/tests/interaction/mod.rs b/crates/stark-backend/tests/interaction/mod.rs deleted file mode 100644 index 631a0eaeec..0000000000 --- a/crates/stark-backend/tests/interaction/mod.rs +++ /dev/null @@ -1,276 +0,0 @@ -use itertools::Itertools; -use openvm_stark_backend::{ - p3_field::AbstractField, prover::USE_DEBUG_BUILDER, verifier::VerificationError, -}; -use openvm_stark_sdk::{ - any_rap_arc_vec, - dummy_airs::interaction::{dummy_interaction_air::DummyInteractionAir, verify_interactions}, -}; -use p3_baby_bear::BabyBear; -use p3_matrix::dense::RowMajorMatrix; - -use crate::{ - fib_selector_air::{air::FibonacciSelectorAir, trace::generate_trace_rows}, - get_conditional_fib_number, - utils::to_field_vec, -}; - -type Val = BabyBear; - -#[test] -fn test_interaction_fib_selector_happy_path() { - let log_trace_degree = 3; - - // Public inputs: - let a = 0u32; - let b = 1u32; - let n = 1usize << log_trace_degree; - - let sels: Vec = (0..n).map(|i| i % 2 == 0).collect(); - let fib_res = get_conditional_fib_number(&sels); - let pis = vec![a, b, fib_res] - .into_iter() - .map(Val::from_canonical_u32) - .collect_vec(); - - let air = FibonacciSelectorAir::new(sels.clone(), true); - let trace = generate_trace_rows::(a, b, &sels); - - let mut curr_a = a; - let mut curr_b = b; - let mut vals = vec![]; - for sel in sels { - vals.push(Val::from_bool(sel)); - if sel { - let c = curr_a + curr_b; - curr_a = curr_b; - curr_b = c; - } - vals.push(Val::from_canonical_u32(curr_b)); - } - let sender_trace = RowMajorMatrix::new(vals, 2); - let sender_air = DummyInteractionAir::new(1, true, 0); - verify_interactions( - vec![trace, sender_trace], - any_rap_arc_vec![air, sender_air], - vec![pis, vec![]], - ) - .expect("Verification failed"); -} - -#[test] -fn test_interaction_stark_multi_rows_happy_path() { - // Mul Val - // 0 1 - // 7 4 - // 3 5 - // 546 889 - let sender_trace = - RowMajorMatrix::new(to_field_vec::(vec![0, 1, 3, 5, 7, 4, 546, 889]), 2); - let sender_air = DummyInteractionAir::new(1, true, 0); - - // Mul Val - // 1 5 - // 3 4 - // 4 4 - // 2 5 - // 0 123 - // 545 889 - // 1 889 - // 0 456 - let receiver_trace = RowMajorMatrix::new( - to_field_vec(vec![ - 1, 5, 3, 4, 4, 4, 2, 5, 0, 123, 545, 889, 1, 889, 0, 456, - ]), - 2, - ); - let receiver_air = DummyInteractionAir::new(1, false, 0); - verify_interactions( - vec![sender_trace, receiver_trace], - any_rap_arc_vec![sender_air, receiver_air], - vec![vec![], vec![]], - ) - .expect("Verification failed"); -} - -#[test] -fn test_interaction_stark_multi_rows_neg() { - // Mul Val - // 0 1 - // 3 5 - // 7 4 - // 546 0 - let sender_trace = RowMajorMatrix::new(to_field_vec(vec![0, 1, 3, 5, 7, 4, 546, 0]), 2); - let sender_air = DummyInteractionAir::new(1, true, 0); - - // count of 0 is 545 != 546 in send. - // Mul Val - // 1 5 - // 3 4 - // 4 4 - // 2 5 - // 0 123 - // 545 0 - // 0 0 - // 0 456 - let receiver_trace = RowMajorMatrix::new( - to_field_vec(vec![1, 5, 3, 4, 4, 4, 2, 5, 0, 123, 545, 0, 0, 0, 0, 456]), - 2, - ); - let receiver_air = DummyInteractionAir::new(1, false, 0); - USE_DEBUG_BUILDER.with(|debug| { - *debug.lock().unwrap() = false; - }); - let res = verify_interactions( - vec![sender_trace, receiver_trace], - any_rap_arc_vec![sender_air, receiver_air], - vec![vec![], vec![]], - ); - assert_eq!(res, Err(VerificationError::ChallengePhaseError)); -} - -#[test] -fn test_interaction_stark_all_0_sender_happy_path() { - // Mul Val - // 0 1 - // 0 646 - // 0 0 - // 0 589 - let sender_trace = RowMajorMatrix::new(to_field_vec(vec![0, 1, 0, 5, 0, 4, 0, 889]), 2); - let sender_air = DummyInteractionAir::new(1, true, 0); - verify_interactions( - vec![sender_trace], - any_rap_arc_vec![sender_air], - vec![vec![]], - ) - .expect("Verification failed"); -} - -#[test] -fn test_interaction_stark_multi_senders_happy_path() { - // Mul Val - // 0 1 - // 6 4 - // 3 5 - // 333 889 - let sender_trace1 = RowMajorMatrix::new(to_field_vec(vec![0, 1, 3, 5, 6, 4, 333, 889]), 2); - // Mul Val - // 1 4 - // 213 889 - let sender_trace2 = RowMajorMatrix::new(to_field_vec(vec![1, 4, 213, 889]), 2); - - let sender_air = DummyInteractionAir::new(1, true, 0); - - // Mul Val - // 1 5 - // 3 4 - // 4 4 - // 2 5 - // 0 123 - // 545 889 - // 1 889 - // 0 456 - let receiver_trace = RowMajorMatrix::new( - to_field_vec(vec![ - 1, 5, 3, 4, 4, 4, 2, 5, 0, 123, 545, 889, 1, 889, 0, 456, - ]), - 2, - ); - let receiver_air = DummyInteractionAir::new(1, false, 0); - verify_interactions( - vec![sender_trace1, sender_trace2, receiver_trace], - any_rap_arc_vec![sender_air, sender_air, receiver_air], - vec![vec![]; 3], - ) - .expect("Verification failed"); -} - -#[test] -fn test_interaction_stark_multi_senders_neg() { - // Mul Val - // 0 1 - // 5 4 - // 3 5 - // 333 889 - let sender_trace1 = RowMajorMatrix::new(to_field_vec(vec![0, 1, 3, 5, 5, 4, 333, 889]), 2); - // Mul Val - // 1 4 - // 213 889 - let sender_trace2 = RowMajorMatrix::new(to_field_vec(vec![1, 4, 213, 889]), 2); - - let sender_air = DummyInteractionAir::new(1, true, 0); - - // Mul Val - // 1 5 - // 3 4 - // 4 4 - // 2 5 - // 0 123 - // 545 889 - // 1 889 - // 0 456 - let receiver_trace = RowMajorMatrix::new( - to_field_vec(vec![ - 1, 5, 3, 4, 4, 4, 2, 5, 0, 123, 545, 889, 1, 889, 0, 456, - ]), - 2, - ); - let receiver_air = DummyInteractionAir::new(1, false, 0); - USE_DEBUG_BUILDER.with(|debug| { - *debug.lock().unwrap() = false; - }); - let res = verify_interactions( - vec![sender_trace1, sender_trace2, receiver_trace], - any_rap_arc_vec![sender_air, sender_air, receiver_air], - vec![vec![]; 3], - ); - assert_eq!(res, Err(VerificationError::ChallengePhaseError)); -} - -#[test] -fn test_interaction_stark_multi_sender_receiver_happy_path() { - // Mul Val - // 0 1 - // 6 4 - // 3 5 - // 333 889 - let sender_trace1 = RowMajorMatrix::new(to_field_vec(vec![0, 1, 3, 5, 6, 4, 333, 889]), 2); - // Mul Val - // 1 4 - // 213 889 - let sender_trace2 = RowMajorMatrix::new(to_field_vec(vec![1, 4, 213, 889]), 2); - - let sender_air = DummyInteractionAir::new(1, true, 0); - - // Mul Val - // 1 5 - // 3 4 - // 4 4 - // 2 5 - // 0 123 - // 545 889 - // 0 289 - // 0 456 - let receiver_trace1 = RowMajorMatrix::new( - to_field_vec(vec![ - 1, 5, 3, 4, 4, 4, 2, 5, 0, 123, 545, 889, 0, 289, 0, 456, - ]), - 2, - ); - - // Mul Val - // 1 889 - let receiver_trace2 = RowMajorMatrix::new(to_field_vec(vec![1, 889]), 2); - let receiver_air = DummyInteractionAir::new(1, false, 0); - verify_interactions( - vec![ - sender_trace1, - sender_trace2, - receiver_trace1, - receiver_trace2, - ], - any_rap_arc_vec![sender_air, sender_air, receiver_air, receiver_air], - vec![vec![]; 4], - ) - .expect("Verification failed"); -} diff --git a/crates/stark-backend/tests/partitioned_sum_air/air.rs b/crates/stark-backend/tests/partitioned_sum_air/air.rs deleted file mode 100644 index b4f638b62f..0000000000 --- a/crates/stark-backend/tests/partitioned_sum_air/air.rs +++ /dev/null @@ -1,47 +0,0 @@ -//! AIR with partitioned main trace -//! | x | y_0 | ... | y_w | -//! -//! Constrains x == a_0 + ... + a_w - -use openvm_stark_backend::{ - air_builders::PartitionedAirBuilder, - p3_field::AbstractField, - rap::{BaseAirWithPublicValues, PartitionedBaseAir}, -}; -use p3_air::{Air, BaseAir}; -use p3_matrix::Matrix; - -/// Inner value is width of y-submatrix -pub struct SumAir(pub usize); - -impl BaseAirWithPublicValues for SumAir {} -impl PartitionedBaseAir for SumAir { - fn cached_main_widths(&self) -> Vec { - vec![self.0] - } - fn common_main_width(&self) -> usize { - 1 - } -} -impl BaseAir for SumAir { - fn width(&self) -> usize { - self.0 + 1 - } -} - -impl Air for SumAir { - fn eval(&self, builder: &mut AB) { - assert_eq!(builder.cached_mains().len(), 1); - - let x = builder.common_main().row_slice(0)[0]; - let ys = builder.cached_mains()[0].row_slice(0); - - let mut y_sum = AB::Expr::ZERO; - for &y in &*ys { - y_sum = y_sum + y; - } - drop(ys); - - builder.assert_eq(x, y_sum); - } -} diff --git a/crates/stark-backend/tests/partitioned_sum_air/mod.rs b/crates/stark-backend/tests/partitioned_sum_air/mod.rs deleted file mode 100644 index c3efebc1d6..0000000000 --- a/crates/stark-backend/tests/partitioned_sum_air/mod.rs +++ /dev/null @@ -1,96 +0,0 @@ -use std::sync::Arc; - -use itertools::Itertools; -use openvm_stark_backend::{ - p3_field::AbstractField, - prover::{ - types::{AirProofInput, AirProofRawInput, ProofInput}, - USE_DEBUG_BUILDER, - }, - verifier::VerificationError, -}; -use openvm_stark_sdk::{config::baby_bear_poseidon2::default_engine, engine::StarkEngine}; -use p3_baby_bear::BabyBear; -use p3_matrix::dense::RowMajorMatrix; -use rand::{rngs::StdRng, SeedableRng}; - -use crate::utils::generate_random_matrix; - -pub mod air; - -use self::air::SumAir; - -type Val = BabyBear; - -// See air.rs for description of SumAir -fn prove_and_verify_sum_air(x: Vec, ys: Vec>) -> Result<(), VerificationError> { - assert_eq!(x.len(), ys.len()); - - let engine = default_engine(); - - let x_trace = RowMajorMatrix::new(x, 1); - let y_width = ys[0].len(); - let y_trace = RowMajorMatrix::new(ys.into_iter().flatten().collect_vec(), y_width); - - let air = Arc::new(SumAir(y_width)); - - let mut keygen_builder = engine.keygen_builder(); - let air_id = keygen_builder.add_air(air.clone()); - let pk = keygen_builder.generate_pk(); - let vk = pk.get_vk(); - - let prover = engine.prover(); - // Demonstrate y is cached - let y_data = prover.committer().commit(vec![y_trace.clone()]); - // Load x normally - let air_proof_input = AirProofInput { - air, - cached_mains_pdata: vec![y_data], - raw: AirProofRawInput { - cached_mains: vec![Arc::new(y_trace)], - common_main: Some(x_trace), - public_values: vec![], - }, - }; - let proof_input = ProofInput::new(vec![(air_id, air_proof_input)]); - - let mut challenger = engine.new_challenger(); - let proof = prover.prove(&mut challenger, &pk, proof_input); - - // Verify the proof: - // Start from clean challenger - let mut challenger = engine.new_challenger(); - let verifier = engine.verifier(); - verifier.verify(&mut challenger, &vk, &proof) -} - -#[test] -fn test_partitioned_sum_air_happy_path() { - let rng = StdRng::seed_from_u64(0); - let n = 1 << 3; - let ys = generate_random_matrix::(rng, n, 5); - let x: Vec = ys - .iter() - .map(|row| row.iter().fold(Val::ZERO, |sum, x| sum + *x)) - .collect(); - prove_and_verify_sum_air(x, ys).expect("Verification failed"); -} - -#[test] -fn test_partitioned_sum_air_happy_neg() { - let rng = StdRng::seed_from_u64(0); - let n = 1 << 3; - let ys = generate_random_matrix(rng, n, 5); - let mut x: Vec = ys - .iter() - .map(|row| row.iter().fold(Val::ZERO, |sum, x| sum + *x)) - .collect(); - x[0] = Val::ZERO; - USE_DEBUG_BUILDER.with(|debug| { - *debug.lock().unwrap() = false; - }); - assert_eq!( - prove_and_verify_sum_air(x, ys), - Err(VerificationError::OodEvaluationMismatch) - ); -} diff --git a/crates/stark-sdk/Cargo.toml b/crates/stark-sdk/Cargo.toml deleted file mode 100644 index df1c21a536..0000000000 --- a/crates/stark-sdk/Cargo.toml +++ /dev/null @@ -1,50 +0,0 @@ -[package] -name = "openvm-stark-sdk" -version.workspace = true -authors.workspace = true -edition.workspace = true -description = "SDK for production and testing usage of STARKs." - -[dependencies] -p3-dft = { workspace = true } -p3-merkle-tree = { workspace = true } -p3-fri = { workspace = true } -p3-baby-bear = { workspace = true } -p3-bn254-fr = { workspace = true } -p3-goldilocks = { workspace = true } -p3-poseidon2 = { workspace = true } -p3-poseidon = { workspace = true } -p3-symmetric = { workspace = true } -p3-blake3 = { workspace = true } -p3-keccak = { workspace = true } -zkhash = { workspace = true } -itertools.workspace = true -tracing.workspace = true - -openvm-stark-backend = { workspace = true } -openvm-circuit-primitives-derive = { workspace = true } - -serde = { workspace = true, features = ["alloc"] } -rand.workspace = true -metrics.workspace = true -serde_json.workspace = true -static_assertions.workspace = true -toml = "0.8.14" -derive_more = "0.99.18" -ff = { workspace = true, features = ["derive", "derive_bits"] } -tracing-subscriber = { version = "0.3.17", features = ["std", "env-filter"] } -tracing-forest = { version = "0.1.6", features = ["ansi", "smallvec"] } -metrics-tracing-context = "0.16.0" -metrics-util = "0.17.0" - -[dev-dependencies] -openvm-instructions.workspace = true - -[features] -default = ["parallel"] -parallel = ["openvm-stark-backend/parallel"] -nightly-features = [ - "p3-dft/nightly-features", - "p3-keccak/nightly-features", - "p3-poseidon2/nightly-features", -] diff --git a/crates/stark-sdk/src/bench/mod.rs b/crates/stark-sdk/src/bench/mod.rs deleted file mode 100644 index 9c429f844d..0000000000 --- a/crates/stark-sdk/src/bench/mod.rs +++ /dev/null @@ -1,114 +0,0 @@ -use std::{collections::BTreeMap, ffi::OsStr}; - -use metrics_tracing_context::{MetricsLayer, TracingContextLayer}; -use metrics_util::{ - debugging::{DebugValue, DebuggingRecorder, Snapshot}, - layers::Layer, - CompositeKey, MetricKind, -}; -use serde_json::json; -use tracing::Level; -use tracing_forest::ForestLayer; -use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry}; - -/// Run a function with metric collection enabled. The metrics will be written to a file specified -/// by an environment variable which name is `output_path_envar`. -pub fn run_with_metric_collection( - output_path_envar: impl AsRef, - f: impl FnOnce() -> R, -) -> R { - let file = std::env::var(output_path_envar).map(|path| std::fs::File::create(path).unwrap()); - // Set up tracing: - let env_filter = EnvFilter::builder() - .with_default_directive(Level::INFO.into()) - .from_env_lossy(); - let subscriber = Registry::default() - .with(env_filter) - .with(ForestLayer::default()) - .with(MetricsLayer::new()); - // Prepare tracing. - tracing::subscriber::set_global_default(subscriber).unwrap(); - - // Prepare metrics. - let recorder = DebuggingRecorder::new(); - let snapshotter = recorder.snapshotter(); - let recorder = TracingContextLayer::all().layer(recorder); - // Install the registry as the global recorder - metrics::set_global_recorder(recorder).unwrap(); - let res = f(); - - if let Ok(file) = file { - serde_json::to_writer_pretty(&file, &serialize_metric_snapshot(snapshotter.snapshot())) - .unwrap(); - } - res -} - -/// Serialize a gauge/counter metric into a JSON object. The object has the following structure: -/// { -/// "metric": , -/// "labels": [ -/// (, ), -/// (, ), -/// ], -/// "value": -/// } -/// -fn serialize_metric(ckey: CompositeKey, value: DebugValue) -> serde_json::Value { - let (_kind, key) = ckey.into_parts(); - let (key_name, labels) = key.into_parts(); - let value = match value { - DebugValue::Gauge(v) => v.into_inner().to_string(), - DebugValue::Counter(v) => v.to_string(), - DebugValue::Histogram(_) => todo!("Histograms not supported yet."), - }; - let labels = labels - .into_iter() - .map(|label| { - let (k, v) = label.into_parts(); - (k.as_ref().to_owned(), v.as_ref().to_owned()) - }) - .collect::>(); - - json!({ - "metric": key_name.as_str(), - "labels": labels, - "value": value, - }) -} - -/// Serialize a metric snapshot into a JSON object. The object has the following structure: -/// { -/// "gauge": [ -/// { -/// "metric": , -/// "labels": [ -/// (, ), -/// (, ), -/// ], -/// "value": -/// }, -/// ... -/// ], -/// ... -/// } -/// -fn serialize_metric_snapshot(snapshot: Snapshot) -> serde_json::Value { - let mut ret = BTreeMap::<_, Vec>::new(); - for (ckey, _, _, value) in snapshot.into_vec() { - match ckey.kind() { - MetricKind::Gauge => { - ret.entry("gauge") - .or_default() - .push(serialize_metric(ckey, value)); - } - MetricKind::Counter => { - ret.entry("counter") - .or_default() - .push(serialize_metric(ckey, value)); - } - MetricKind::Histogram => todo!(), - } - } - json!(ret) -} diff --git a/crates/stark-sdk/src/config/baby_bear_blake3.rs b/crates/stark-sdk/src/config/baby_bear_blake3.rs deleted file mode 100644 index 9234cc6d7d..0000000000 --- a/crates/stark-sdk/src/config/baby_bear_blake3.rs +++ /dev/null @@ -1,34 +0,0 @@ -use p3_blake3::Blake3; - -use super::{ - baby_bear_bytehash::{ - self, config_from_byte_hash, BabyBearByteHashConfig, BabyBearByteHashEngine, - }, - FriParameters, -}; -use crate::{ - assert_sc_compatible_with_serde, - config::baby_bear_bytehash::BabyBearByteHashEngineWithDefaultHash, -}; - -pub type BabyBearBlake3Config = BabyBearByteHashConfig; -pub type BabyBearBlake3Engine = BabyBearByteHashEngine; - -assert_sc_compatible_with_serde!(BabyBearBlake3Config); - -/// `pcs_log_degree` is the upper bound on the log_2(PCS polynomial degree). -pub fn default_engine() -> BabyBearBlake3Engine { - baby_bear_bytehash::default_engine(Blake3) -} - -/// `pcs_log_degree` is the upper bound on the log_2(PCS polynomial degree). -pub fn default_config() -> BabyBearBlake3Config { - let fri_params = FriParameters::standard_fast(); - config_from_byte_hash(Blake3, fri_params) -} - -impl BabyBearByteHashEngineWithDefaultHash for BabyBearBlake3Engine { - fn default_hash() -> Blake3 { - Blake3 - } -} diff --git a/crates/stark-sdk/src/config/baby_bear_bytehash.rs b/crates/stark-sdk/src/config/baby_bear_bytehash.rs deleted file mode 100644 index ed6223ff77..0000000000 --- a/crates/stark-sdk/src/config/baby_bear_bytehash.rs +++ /dev/null @@ -1,123 +0,0 @@ -use openvm_stark_backend::{ - config::StarkConfig, - interaction::stark_log_up::StarkLogUpPhase, - p3_challenger::{HashChallenger, SerializingChallenger32}, - p3_commit::ExtensionMmcs, - p3_field::extension::BinomialExtensionField, -}; -use p3_baby_bear::BabyBear; -use p3_dft::Radix2DitParallel; -use p3_fri::{FriConfig, TwoAdicFriPcs}; -use p3_merkle_tree::MerkleTreeMmcs; -use p3_symmetric::{CompressionFunctionFromHasher, CryptographicHasher, SerializingHasher32}; - -use super::FriParameters; -use crate::engine::{StarkEngine, StarkFriEngine}; - -type Val = BabyBear; -type Challenge = BinomialExtensionField; - -// Generic over H: CryptographicHasher -type FieldHash = SerializingHasher32; -type Compress = CompressionFunctionFromHasher; -// type InstrCompress = Instrumented>; - -type ValMmcs = MerkleTreeMmcs, Compress, 32>; -type ChallengeMmcs = ExtensionMmcs>; -type Dft = Radix2DitParallel; -type Challenger = SerializingChallenger32>; - -type Pcs = TwoAdicFriPcs, ChallengeMmcs>; - -type RapPhase = StarkLogUpPhase>; - -pub type BabyBearByteHashConfig = StarkConfig, RapPhase, Challenge, Challenger>; - -pub struct BabyBearByteHashEngine -where - H: CryptographicHasher + Clone, -{ - pub fri_params: FriParameters, - pub config: BabyBearByteHashConfig, - pub byte_hash: H, -} - -impl StarkEngine> for BabyBearByteHashEngine -where - H: CryptographicHasher + Clone + Send + Sync, -{ - fn config(&self) -> &BabyBearByteHashConfig { - &self.config - } - - fn new_challenger(&self) -> Challenger { - Challenger::from_hasher(vec![], self.byte_hash.clone()) - } -} - -/// `pcs_log_degree` is the upper bound on the log_2(PCS polynomial degree). -pub fn default_engine(byte_hash: H) -> BabyBearByteHashEngine -where - H: CryptographicHasher + Clone, -{ - let fri_params = FriParameters::standard_fast(); - engine_from_byte_hash(byte_hash, fri_params) -} - -pub fn engine_from_byte_hash( - byte_hash: H, - fri_params: FriParameters, -) -> BabyBearByteHashEngine -where - H: CryptographicHasher + Clone, -{ - let config = config_from_byte_hash(byte_hash.clone(), fri_params); - BabyBearByteHashEngine { - config, - byte_hash, - fri_params, - } -} - -pub fn config_from_byte_hash( - byte_hash: H, - fri_params: FriParameters, -) -> BabyBearByteHashConfig -where - H: CryptographicHasher + Clone, -{ - let field_hash = FieldHash::new(byte_hash.clone()); - let compress = Compress::new(byte_hash); - let val_mmcs = ValMmcs::new(field_hash, compress); - let challenge_mmcs = ChallengeMmcs::new(val_mmcs.clone()); - let dft = Dft::default(); - let fri_config = FriConfig { - log_blowup: fri_params.log_blowup, - num_queries: fri_params.num_queries, - proof_of_work_bits: fri_params.proof_of_work_bits, - mmcs: challenge_mmcs, - }; - let pcs = Pcs::new(dft, val_mmcs, fri_config); - let rap_phase = StarkLogUpPhase::new(); - BabyBearByteHashConfig::new(pcs, rap_phase) -} - -pub trait BabyBearByteHashEngineWithDefaultHash -where - H: CryptographicHasher + Clone, -{ - fn default_hash() -> H; -} - -impl + Clone + Send + Sync> - StarkFriEngine> for BabyBearByteHashEngine -where - BabyBearByteHashEngine: BabyBearByteHashEngineWithDefaultHash, -{ - fn new(fri_params: FriParameters) -> Self { - engine_from_byte_hash(Self::default_hash(), fri_params) - } - fn fri_params(&self) -> FriParameters { - self.fri_params - } -} diff --git a/crates/stark-sdk/src/config/baby_bear_keccak.rs b/crates/stark-sdk/src/config/baby_bear_keccak.rs deleted file mode 100644 index 80122f5a83..0000000000 --- a/crates/stark-sdk/src/config/baby_bear_keccak.rs +++ /dev/null @@ -1,34 +0,0 @@ -use p3_keccak::Keccak256Hash; - -use super::{ - baby_bear_bytehash::{ - self, config_from_byte_hash, BabyBearByteHashConfig, BabyBearByteHashEngine, - }, - FriParameters, -}; -use crate::{ - assert_sc_compatible_with_serde, - config::baby_bear_bytehash::BabyBearByteHashEngineWithDefaultHash, -}; - -pub type BabyBearKeccakConfig = BabyBearByteHashConfig; -pub type BabyBearKeccakEngine = BabyBearByteHashEngine; - -assert_sc_compatible_with_serde!(BabyBearKeccakConfig); - -/// `pcs_log_degree` is the upper bound on the log_2(PCS polynomial degree). -pub fn default_engine() -> BabyBearKeccakEngine { - baby_bear_bytehash::default_engine(Keccak256Hash) -} - -/// `pcs_log_degree` is the upper bound on the log_2(PCS polynomial degree). -pub fn default_config() -> BabyBearKeccakConfig { - let fri_params = FriParameters::standard_fast(); - config_from_byte_hash(Keccak256Hash, fri_params) -} - -impl BabyBearByteHashEngineWithDefaultHash for BabyBearKeccakEngine { - fn default_hash() -> Keccak256Hash { - Keccak256Hash - } -} diff --git a/crates/stark-sdk/src/config/baby_bear_poseidon2.rs b/crates/stark-sdk/src/config/baby_bear_poseidon2.rs deleted file mode 100644 index 2023733124..0000000000 --- a/crates/stark-sdk/src/config/baby_bear_poseidon2.rs +++ /dev/null @@ -1,267 +0,0 @@ -use std::any::type_name; - -use openvm_stark_backend::{ - config::StarkConfig, - interaction::stark_log_up::StarkLogUpPhase, - p3_challenger::DuplexChallenger, - p3_commit::ExtensionMmcs, - p3_field::{extension::BinomialExtensionField, AbstractField, Field}, -}; -use p3_baby_bear::{BabyBear, Poseidon2BabyBear}; -use p3_dft::Radix2DitParallel; -use p3_fri::{FriConfig, TwoAdicFriPcs}; -use p3_merkle_tree::MerkleTreeMmcs; -use p3_poseidon2::ExternalLayerConstants; -use p3_symmetric::{CryptographicPermutation, PaddingFreeSponge, TruncatedPermutation}; -use rand::{rngs::StdRng, SeedableRng}; -use zkhash::{ - ark_ff::PrimeField as _, fields::babybear::FpBabyBear as HorizenBabyBear, - poseidon2::poseidon2_instance_babybear::RC16, -}; - -use super::{ - instrument::{HashStatistics, InstrumentCounter, Instrumented, StarkHashStatistics}, - FriParameters, -}; -use crate::{ - assert_sc_compatible_with_serde, - engine::{StarkEngine, StarkEngineWithHashInstrumentation, StarkFriEngine}, -}; - -const RATE: usize = 8; -// permutation width -const WIDTH: usize = 16; // rate + capacity -const DIGEST_WIDTH: usize = 8; - -type Val = BabyBear; -type PackedVal = ::Packing; -type Challenge = BinomialExtensionField; -type Perm = Poseidon2BabyBear; -type InstrPerm = Instrumented; - -// Generic over P: CryptographicPermutation<[F; WIDTH]> -type Hash

= PaddingFreeSponge; -type Compress

= TruncatedPermutation; -type ValMmcs

= - MerkleTreeMmcs::Packing, Hash

, Compress

, DIGEST_WIDTH>; -type ChallengeMmcs

= ExtensionMmcs>; -pub type Challenger

= DuplexChallenger; -type Dft = Radix2DitParallel; -type Pcs

= TwoAdicFriPcs, ChallengeMmcs

>; -type RapPhase

= StarkLogUpPhase>; - -pub type BabyBearPermutationConfig

= StarkConfig, RapPhase

, Challenge, Challenger

>; -pub type BabyBearPoseidon2Config = BabyBearPermutationConfig; -pub type BabyBearPoseidon2Engine = BabyBearPermutationEngine; - -assert_sc_compatible_with_serde!(BabyBearPoseidon2Config); - -pub struct BabyBearPermutationEngine

-where - P: CryptographicPermutation<[Val; WIDTH]> - + CryptographicPermutation<[PackedVal; WIDTH]> - + Clone, -{ - pub fri_params: FriParameters, - pub config: BabyBearPermutationConfig

, - pub perm: P, -} - -impl

StarkEngine> for BabyBearPermutationEngine

-where - P: CryptographicPermutation<[Val; WIDTH]> - + CryptographicPermutation<[PackedVal; WIDTH]> - + Clone, -{ - fn config(&self) -> &BabyBearPermutationConfig

{ - &self.config - } - - fn new_challenger(&self) -> Challenger

{ - Challenger::new(self.perm.clone()) - } -} - -impl

StarkEngineWithHashInstrumentation>> - for BabyBearPermutationEngine> -where - P: CryptographicPermutation<[Val; WIDTH]> - + CryptographicPermutation<[PackedVal; WIDTH]> - + Clone, -{ - fn clear_instruments(&mut self) { - self.perm.input_lens_by_type.lock().unwrap().clear(); - } - fn stark_hash_statistics(&self, custom: T) -> StarkHashStatistics { - let counter = self.perm.input_lens_by_type.lock().unwrap(); - let permutations = counter.iter().fold(0, |total, (name, lens)| { - if name == type_name::<[Val; WIDTH]>() { - let count: usize = lens.iter().sum(); - println!("Permutation: {name}, Count: {count}"); - total + count - } else { - panic!("Permutation type not yet supported: {}", name); - } - }); - - StarkHashStatistics { - name: type_name::

().to_string(), - stats: HashStatistics { permutations }, - fri_params: self.fri_params, - custom, - } - } -} - -/// `pcs_log_degree` is the upper bound on the log_2(PCS polynomial degree). -pub fn default_engine() -> BabyBearPoseidon2Engine { - default_engine_impl(FriParameters::standard_fast()) -} - -/// `pcs_log_degree` is the upper bound on the log_2(PCS polynomial degree). -fn default_engine_impl(fri_params: FriParameters) -> BabyBearPoseidon2Engine { - let perm = default_perm(); - engine_from_perm(perm, fri_params) -} - -/// `pcs_log_degree` is the upper bound on the log_2(PCS polynomial degree). -pub fn default_config(perm: &Perm) -> BabyBearPoseidon2Config { - let fri_params = FriParameters::standard_fast(); - config_from_perm(perm, fri_params) -} - -pub fn engine_from_perm

(perm: P, fri_params: FriParameters) -> BabyBearPermutationEngine

-where - P: CryptographicPermutation<[Val; WIDTH]> - + CryptographicPermutation<[PackedVal; WIDTH]> - + Clone, -{ - let config = config_from_perm(&perm, fri_params); - BabyBearPermutationEngine { - config, - perm, - fri_params, - } -} - -pub fn config_from_perm

(perm: &P, fri_params: FriParameters) -> BabyBearPermutationConfig

-where - P: CryptographicPermutation<[Val; WIDTH]> - + CryptographicPermutation<[PackedVal; WIDTH]> - + Clone, -{ - let hash = Hash::new(perm.clone()); - let compress = Compress::new(perm.clone()); - let val_mmcs = ValMmcs::new(hash, compress); - let challenge_mmcs = ChallengeMmcs::new(val_mmcs.clone()); - let dft = Dft::default(); - let fri_config = FriConfig { - log_blowup: fri_params.log_blowup, - num_queries: fri_params.num_queries, - proof_of_work_bits: fri_params.proof_of_work_bits, - mmcs: challenge_mmcs, - }; - let pcs = Pcs::new(dft, val_mmcs, fri_config); - let rap_phase = StarkLogUpPhase::new(); - BabyBearPermutationConfig::new(pcs, rap_phase) -} - -/// Uses HorizenLabs Poseidon2 round constants, but plonky3 Mat4 and also -/// with a p3 Monty reduction factor. -pub fn default_perm() -> Perm { - let (external_constants, internal_constants) = horizen_round_consts_16(); - Perm::new(external_constants, internal_constants) -} - -pub fn random_perm() -> Perm { - let seed = [42; 32]; - let mut rng = StdRng::from_seed(seed); - Perm::new_from_rng_128(&mut rng) -} - -pub fn random_instrumented_perm() -> InstrPerm { - let perm = random_perm(); - Instrumented::new(perm) -} - -fn horizen_to_p3(horizen_babybear: HorizenBabyBear) -> BabyBear { - BabyBear::from_canonical_u64(horizen_babybear.into_bigint().0[0]) -} - -pub fn horizen_round_consts_16() -> (ExternalLayerConstants, Vec) { - let p3_rc16: Vec> = RC16 - .iter() - .map(|round| { - round - .iter() - .map(|babybear| horizen_to_p3(*babybear)) - .collect() - }) - .collect(); - - let rounds_f = 8; - let rounds_p = 13; - let rounds_f_beginning = rounds_f / 2; - let p_end = rounds_f_beginning + rounds_p; - let initial: Vec<[BabyBear; 16]> = p3_rc16[..rounds_f_beginning] - .iter() - .cloned() - .map(|round| round.try_into().unwrap()) - .collect(); - let terminal: Vec<[BabyBear; 16]> = p3_rc16[p_end..] - .iter() - .cloned() - .map(|round| round.try_into().unwrap()) - .collect(); - let internal_round_constants: Vec = p3_rc16[rounds_f_beginning..p_end] - .iter() - .map(|round| round[0]) - .collect(); - ( - ExternalLayerConstants::new(initial, terminal), - internal_round_constants, - ) -} - -/// Logs hash count statistics to stdout and returns as struct. -/// Count of 1 corresponds to a Poseidon2 permutation with rate RATE that outputs OUT field elements -#[allow(dead_code)] -pub fn print_hash_counts(hash_counter: &InstrumentCounter, compress_counter: &InstrumentCounter) { - let hash_counter = hash_counter.lock().unwrap(); - let mut hash_count = 0; - hash_counter.iter().for_each(|(name, lens)| { - if name == type_name::<(Val, [Val; DIGEST_WIDTH])>() { - let count = lens.iter().fold(0, |count, len| count + len.div_ceil(RATE)); - println!("Hash: {name}, Count: {count}"); - hash_count += count; - } else { - panic!("Hash type not yet supported: {}", name); - } - }); - drop(hash_counter); - let compress_counter = compress_counter.lock().unwrap(); - let mut compress_count = 0; - compress_counter.iter().for_each(|(name, lens)| { - if name == type_name::<[Val; DIGEST_WIDTH]>() { - let count = lens.iter().fold(0, |count, len| { - // len should always be N=2 for TruncatedPermutation - count + (DIGEST_WIDTH * len).div_ceil(WIDTH) - }); - println!("Compress: {name}, Count: {count}"); - compress_count += count; - } else { - panic!("Compress type not yet supported: {}", name); - } - }); - let total_count = hash_count + compress_count; - println!("Total Count: {total_count}"); -} - -impl StarkFriEngine for BabyBearPoseidon2Engine { - fn new(fri_params: FriParameters) -> Self { - default_engine_impl(fri_params) - } - fn fri_params(&self) -> FriParameters { - self.fri_params - } -} diff --git a/crates/stark-sdk/src/config/baby_bear_poseidon2_root.rs b/crates/stark-sdk/src/config/baby_bear_poseidon2_root.rs deleted file mode 100644 index 40493ff369..0000000000 --- a/crates/stark-sdk/src/config/baby_bear_poseidon2_root.rs +++ /dev/null @@ -1,248 +0,0 @@ -use std::any::type_name; - -use ff::PrimeField; -use openvm_stark_backend::{ - config::StarkConfig, interaction::stark_log_up::StarkLogUpPhase, - p3_challenger::MultiField32Challenger, p3_commit::ExtensionMmcs, - p3_field::extension::BinomialExtensionField, -}; -use p3_baby_bear::BabyBear; -use p3_bn254_fr::{Bn254Fr, FFBn254Fr, Poseidon2Bn254}; -use p3_dft::Radix2DitParallel; -use p3_fri::{FriConfig, TwoAdicFriPcs}; -use p3_merkle_tree::MerkleTreeMmcs; -use p3_poseidon2::ExternalLayerConstants; -use p3_symmetric::{CryptographicPermutation, MultiField32PaddingFreeSponge, TruncatedPermutation}; -use zkhash::{ - ark_ff::{BigInteger, PrimeField as _}, - fields::bn256::FpBN256 as ark_FpBN256, - poseidon2::poseidon2_instance_bn256::RC3, -}; - -use super::{ - instrument::{HashStatistics, InstrumentCounter, Instrumented, StarkHashStatistics}, - FriParameters, -}; -use crate::{ - assert_sc_compatible_with_serde, - engine::{StarkEngine, StarkEngineWithHashInstrumentation, StarkFriEngine}, -}; - -const WIDTH: usize = 3; -/// Poseidon rate in F. (2) * <# of F in a N>(8) = 16 -const RATE: usize = 16; -const DIGEST_WIDTH: usize = 1; - -/// A configuration for recursion. -type Val = BabyBear; -type Challenge = BinomialExtensionField; -type Perm = Poseidon2Bn254; -type Hash

= MultiField32PaddingFreeSponge; -type Compress

= TruncatedPermutation; -type ValMmcs

= MerkleTreeMmcs, Compress

, 1>; -type ChallengeMmcs

= ExtensionMmcs>; -type Dft = Radix2DitParallel; -type Challenger

= MultiField32Challenger; -type Pcs

= TwoAdicFriPcs, ChallengeMmcs

>; -type RapPhase

= StarkLogUpPhase>; - -pub type BabyBearPermutationRootConfig

= - StarkConfig, RapPhase

, Challenge, Challenger

>; -pub type BabyBearPoseidon2RootConfig = BabyBearPermutationRootConfig; -pub type BabyBearPoseidon2RootEngine = BabyBearPermutationRootEngine; - -assert_sc_compatible_with_serde!(BabyBearPoseidon2RootConfig); - -pub struct BabyBearPermutationRootEngine

-where - P: CryptographicPermutation<[Bn254Fr; WIDTH]> + Clone, -{ - pub fri_params: FriParameters, - pub config: BabyBearPermutationRootConfig

, - pub perm: P, -} - -impl

StarkEngine> for BabyBearPermutationRootEngine

-where - P: CryptographicPermutation<[Bn254Fr; WIDTH]> + Clone, -{ - fn config(&self) -> &BabyBearPermutationRootConfig

{ - &self.config - } - - fn new_challenger(&self) -> Challenger

{ - Challenger::new(self.perm.clone()).unwrap() - } -} - -impl

StarkEngineWithHashInstrumentation>> - for BabyBearPermutationRootEngine> -where - P: CryptographicPermutation<[Bn254Fr; WIDTH]> + Clone, -{ - fn clear_instruments(&mut self) { - self.perm.input_lens_by_type.lock().unwrap().clear(); - } - fn stark_hash_statistics(&self, custom: T) -> StarkHashStatistics { - let counter = self.perm.input_lens_by_type.lock().unwrap(); - let permutations = counter.iter().fold(0, |total, (name, lens)| { - if name == type_name::<[Val; WIDTH]>() { - let count: usize = lens.iter().sum(); - println!("Permutation: {name}, Count: {count}"); - total + count - } else { - panic!("Permutation type not yet supported: {}", name); - } - }); - - StarkHashStatistics { - name: type_name::

().to_string(), - stats: HashStatistics { permutations }, - fri_params: self.fri_params, - custom, - } - } -} - -/// `pcs_log_degree` is the upper bound on the log_2(PCS polynomial degree). -pub fn default_engine() -> BabyBearPoseidon2RootEngine { - default_engine_impl(FriParameters::standard_fast()) -} - -/// `pcs_log_degree` is the upper bound on the log_2(PCS polynomial degree). -fn default_engine_impl(fri_params: FriParameters) -> BabyBearPoseidon2RootEngine { - let perm = root_perm(); - engine_from_perm(perm, fri_params) -} - -/// `pcs_log_degree` is the upper bound on the log_2(PCS polynomial degree). -pub fn default_config(perm: &Perm) -> BabyBearPoseidon2RootConfig { - let fri_params = FriParameters::standard_fast(); - config_from_perm(perm, fri_params) -} - -pub fn engine_from_perm

(perm: P, fri_params: FriParameters) -> BabyBearPermutationRootEngine

-where - P: CryptographicPermutation<[Bn254Fr; WIDTH]> + Clone, -{ - let config = config_from_perm(&perm, fri_params); - BabyBearPermutationRootEngine { - config, - perm, - fri_params, - } -} - -pub fn config_from_perm

(perm: &P, fri_params: FriParameters) -> BabyBearPermutationRootConfig

-where - P: CryptographicPermutation<[Bn254Fr; WIDTH]> + Clone, -{ - let hash = Hash::new(perm.clone()).unwrap(); - let compress = Compress::new(perm.clone()); - let val_mmcs = ValMmcs::new(hash, compress); - let challenge_mmcs = ChallengeMmcs::new(val_mmcs.clone()); - let dft = Dft::default(); - let fri_config = FriConfig { - log_blowup: fri_params.log_blowup, - num_queries: fri_params.num_queries, - proof_of_work_bits: fri_params.proof_of_work_bits, - mmcs: challenge_mmcs, - }; - let pcs = Pcs::new(dft, val_mmcs, fri_config); - let rap_phase = StarkLogUpPhase::new(); - BabyBearPermutationRootConfig::new(pcs, rap_phase) -} - -/// The permutation for outer recursion. -pub fn root_perm() -> Perm { - const ROUNDS_F: usize = 8; - const ROUNDS_P: usize = 56; - let mut round_constants = bn254_poseidon2_rc3(); - let internal_end = (ROUNDS_F / 2) + ROUNDS_P; - let terminal = round_constants.split_off(internal_end); - let internal_round_constants = round_constants.split_off(ROUNDS_F / 2); - let internal_round_constants = internal_round_constants - .into_iter() - .map(|vec| vec[0]) - .collect::>(); - let initial = round_constants; - - let external_round_constants = ExternalLayerConstants::new(initial, terminal); - Perm::new(external_round_constants, internal_round_constants) -} - -fn bn254_from_ark_ff(input: ark_FpBN256) -> Bn254Fr { - let bytes = input.into_bigint().to_bytes_le(); - - let mut res = ::Repr::default(); - - for (i, digit) in res.as_mut().iter_mut().enumerate() { - *digit = bytes[i]; - } - - let value = FFBn254Fr::from_repr(res); - - if value.is_some().into() { - Bn254Fr { - value: value.unwrap(), - } - } else { - panic!("Invalid field element") - } -} - -fn bn254_poseidon2_rc3() -> Vec<[Bn254Fr; 3]> { - RC3.iter() - .map(|vec| { - vec.iter() - .cloned() - .map(bn254_from_ark_ff) - .collect::>() - .try_into() - .unwrap() - }) - .collect() -} - -/// Logs hash count statistics to stdout and returns as struct. -/// Count of 1 corresponds to a Poseidon2 permutation with rate RATE that outputs OUT field elements -#[allow(dead_code)] -pub fn print_hash_counts(hash_counter: &InstrumentCounter, compress_counter: &InstrumentCounter) { - let hash_counter = hash_counter.lock().unwrap(); - let mut hash_count = 0; - hash_counter.iter().for_each(|(name, lens)| { - if name == type_name::<(Val, [Val; DIGEST_WIDTH])>() { - let count = lens.iter().fold(0, |count, len| count + len.div_ceil(RATE)); - println!("Hash: {name}, Count: {count}"); - hash_count += count; - } else { - panic!("Hash type not yet supported: {}", name); - } - }); - drop(hash_counter); - let compress_counter = compress_counter.lock().unwrap(); - let mut compress_count = 0; - compress_counter.iter().for_each(|(name, lens)| { - if name == type_name::<[Val; DIGEST_WIDTH]>() { - let count = lens.iter().fold(0, |count, len| { - // len should always be N=2 for TruncatedPermutation - count + (DIGEST_WIDTH * len).div_ceil(WIDTH) - }); - println!("Compress: {name}, Count: {count}"); - compress_count += count; - } else { - panic!("Compress type not yet supported: {}", name); - } - }); - let total_count = hash_count + compress_count; - println!("Total Count: {total_count}"); -} - -impl StarkFriEngine for BabyBearPoseidon2RootEngine { - fn new(fri_params: FriParameters) -> Self { - default_engine_impl(fri_params) - } - fn fri_params(&self) -> FriParameters { - self.fri_params - } -} diff --git a/crates/stark-sdk/src/config/fri_params.rs b/crates/stark-sdk/src/config/fri_params.rs deleted file mode 100644 index 62326bc637..0000000000 --- a/crates/stark-sdk/src/config/fri_params.rs +++ /dev/null @@ -1,75 +0,0 @@ -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq)] -pub struct FriParameters { - pub log_blowup: usize, - pub num_queries: usize, - pub proof_of_work_bits: usize, -} - -impl FriParameters { - /// Conjectured bits of security. - /// See ethSTARK paper (https://eprint.iacr.org/2021/582.pdf) section 5.10.1 equation (19) - /// - /// `challenge_field_bits` is the number of bits in the challenge field (extension field) of the STARK config. - pub fn get_conjectured_security_bits(&self, challenge_field_bits: usize) -> usize { - let fri_query_security_bits = self.num_queries * self.log_blowup + self.proof_of_work_bits; - // The paper says min(fri_field_bits, fri_query_security_bits) - 1 but plonky2 (https://github.com/0xPolygonZero/plonky2/blob/41dc325e61ab8d4c0491e68e667c35a4e8173ffa/starky/src/config.rs#L86C1-L87C1) omits the -1 - challenge_field_bits.min(fri_query_security_bits) - } - - pub fn standard_fast() -> FriParameters { - standard_fri_params_with_100_bits_conjectured_security(1) - } - - pub fn standard_with_100_bits_conjectured_security(log_blowup: usize) -> FriParameters { - standard_fri_params_with_100_bits_conjectured_security(log_blowup) - } - - pub fn max_constraint_degree(&self) -> usize { - (1 << self.log_blowup) + 1 - } -} - -/// Pre-defined FRI parameters with 100 bits of conjectured security. -/// Security bits calculated following ethSTARK (https://eprint.iacr.org/2021/582.pdf) 5.10.1 eq (19) -/// -/// Assumes that the challenge field used as more than 100 bits. -pub fn standard_fri_params_with_100_bits_conjectured_security(log_blowup: usize) -> FriParameters { - if let Ok("1") = std::env::var("OPENVM_FAST_TEST").as_deref() { - return FriParameters { - log_blowup, - num_queries: 2, - proof_of_work_bits: 0, - }; - } - let fri_params = match log_blowup { - // plonky2 standard fast config uses num_queries=84: https://github.com/0xPolygonZero/plonky2/blob/41dc325e61ab8d4c0491e68e667c35a4e8173ffa/starky/src/config.rs#L49 - // plonky3's default is num_queries=100, so we will use that. See https://github.com/Plonky3/Plonky3/issues/380 for related security discussion. - 1 => FriParameters { - log_blowup, - num_queries: 100, - proof_of_work_bits: 16, - }, - 2 => FriParameters { - log_blowup, - num_queries: 42, - proof_of_work_bits: 16, - }, - // plonky2 standard recursion config: https://github.com/0xPolygonZero/plonky2/blob/41dc325e61ab8d4c0491e68e667c35a4e8173ffa/plonky2/src/plonk/circuit_data.rs#L101 - 3 => FriParameters { - log_blowup, - num_queries: 28, - proof_of_work_bits: 16, - }, - 4 => FriParameters { - log_blowup, - num_queries: 21, - proof_of_work_bits: 16, - }, - _ => todo!("No standard FRI params defined for log blowup {log_blowup}",), - }; - assert!(fri_params.get_conjectured_security_bits(100) >= 100); - tracing::info!("FRI parameters | log_blowup: {log_blowup:<2} | num_queries: {:<2} | proof_of_work_bits: {:<2}", fri_params.num_queries, fri_params.proof_of_work_bits); - fri_params -} diff --git a/crates/stark-sdk/src/config/goldilocks_poseidon.rs b/crates/stark-sdk/src/config/goldilocks_poseidon.rs deleted file mode 100644 index ce93cc2438..0000000000 --- a/crates/stark-sdk/src/config/goldilocks_poseidon.rs +++ /dev/null @@ -1,171 +0,0 @@ -use std::any::type_name; - -use openvm_stark_backend::{ - config::StarkConfig, - interaction::stark_log_up::StarkLogUpPhase, - p3_challenger::DuplexChallenger, - p3_commit::ExtensionMmcs, - p3_field::{extension::BinomialExtensionField, Field}, -}; -use p3_dft::Radix2DitParallel; -use p3_fri::{FriConfig, TwoAdicFriPcs}; -use p3_goldilocks::{Goldilocks, MdsMatrixGoldilocks}; -use p3_merkle_tree::MerkleTreeMmcs; -use p3_poseidon::Poseidon; -use p3_symmetric::{CryptographicPermutation, PaddingFreeSponge, TruncatedPermutation}; -use rand::{rngs::StdRng, SeedableRng}; - -use super::{ - instrument::{HashStatistics, Instrumented, StarkHashStatistics}, - FriParameters, -}; -use crate::{ - assert_sc_compatible_with_serde, - engine::{StarkEngine, StarkEngineWithHashInstrumentation}, -}; - -const RATE: usize = 4; -// permutation width -const WIDTH: usize = 8; // rate + capacity -const DIGEST_WIDTH: usize = 4; - -type Val = Goldilocks; -type PackedVal = ::Packing; -type Challenge = BinomialExtensionField; -type Perm = Poseidon; -type InstrPerm = Instrumented; - -// Generic over P: CryptographicPermutation<[F; WIDTH]> -type Hash

= PaddingFreeSponge; -type Compress

= TruncatedPermutation; -type ValMmcs

= - MerkleTreeMmcs::Packing, Hash

, Compress

, DIGEST_WIDTH>; -type ChallengeMmcs

= ExtensionMmcs>; -pub type Challenger

= DuplexChallenger; -type Dft = Radix2DitParallel; -type Pcs

= TwoAdicFriPcs, ChallengeMmcs

>; -type RapPhase

= StarkLogUpPhase>; - -pub type GoldilocksPermutationConfig

= - StarkConfig, RapPhase

, Challenge, Challenger

>; -pub type GoldilocksPoseidonConfig = GoldilocksPermutationConfig; -pub type GoldilocksPoseidonEngine = GoldilocksPermutationEngine; - -assert_sc_compatible_with_serde!(GoldilocksPoseidonConfig); - -pub struct GoldilocksPermutationEngine

-where - P: CryptographicPermutation<[Val; WIDTH]> - + CryptographicPermutation<[PackedVal; WIDTH]> - + Clone, -{ - fri_params: FriParameters, - pub config: GoldilocksPermutationConfig

, - pub perm: P, -} - -impl

StarkEngine> for GoldilocksPermutationEngine

-where - P: CryptographicPermutation<[Val; WIDTH]> - + CryptographicPermutation<[PackedVal; WIDTH]> - + Clone, -{ - fn config(&self) -> &GoldilocksPermutationConfig

{ - &self.config - } - - fn new_challenger(&self) -> Challenger

{ - Challenger::new(self.perm.clone()) - } -} - -impl

StarkEngineWithHashInstrumentation>> - for GoldilocksPermutationEngine> -where - P: CryptographicPermutation<[Val; WIDTH]> - + CryptographicPermutation<[PackedVal; WIDTH]> - + Clone, -{ - fn clear_instruments(&mut self) { - self.perm.input_lens_by_type.lock().unwrap().clear(); - } - fn stark_hash_statistics(&self, custom: T) -> StarkHashStatistics { - let counter = self.perm.input_lens_by_type.lock().unwrap(); - let permutations = counter.iter().fold(0, |total, (name, lens)| { - if name == type_name::<[Val; WIDTH]>() { - let count: usize = lens.iter().sum(); - println!("Permutation: {name}, Count: {count}"); - total + count - } else { - panic!("Permutation type not yet supported: {}", name); - } - }); - - StarkHashStatistics { - name: type_name::

().to_string(), - stats: HashStatistics { permutations }, - fri_params: self.fri_params, - custom, - } - } -} - -/// `pcs_log_degree` is the upper bound on the log_2(PCS polynomial degree). -pub fn default_engine() -> GoldilocksPoseidonEngine { - let perm = random_perm(); - let fri_params = FriParameters::standard_fast(); - engine_from_perm(perm, fri_params) -} - -/// `pcs_log_degree` is the upper bound on the log_2(PCS polynomial degree). -pub fn default_config(perm: &Perm) -> GoldilocksPoseidonConfig { - let fri_params = FriParameters::standard_fast(); - config_from_perm(perm, fri_params) -} - -pub fn engine_from_perm

(perm: P, fri_params: FriParameters) -> GoldilocksPermutationEngine

-where - P: CryptographicPermutation<[Val; WIDTH]> - + CryptographicPermutation<[PackedVal; WIDTH]> - + Clone, -{ - let config = config_from_perm(&perm, fri_params); - GoldilocksPermutationEngine { - config, - perm, - fri_params, - } -} - -pub fn config_from_perm

(perm: &P, fri_params: FriParameters) -> GoldilocksPermutationConfig

-where - P: CryptographicPermutation<[Val; WIDTH]> - + CryptographicPermutation<[PackedVal; WIDTH]> - + Clone, -{ - let hash = Hash::new(perm.clone()); - let compress = Compress::new(perm.clone()); - let val_mmcs = ValMmcs::new(hash, compress); - let challenge_mmcs = ChallengeMmcs::new(val_mmcs.clone()); - let dft = Dft::default(); - let fri_config = FriConfig { - log_blowup: fri_params.log_blowup, - num_queries: fri_params.num_queries, - proof_of_work_bits: fri_params.proof_of_work_bits, - mmcs: challenge_mmcs, - }; - let pcs = Pcs::new(dft, val_mmcs, fri_config); - let rap_phase = RapPhase::new(); - GoldilocksPermutationConfig::new(pcs, rap_phase) -} - -pub fn random_perm() -> Perm { - let seed = [42; 32]; - let mut rng = StdRng::from_seed(seed); - Perm::new_from_rng(4, 22, MdsMatrixGoldilocks, &mut rng) -} - -pub fn random_instrumented_perm() -> InstrPerm { - let perm = random_perm(); - Instrumented::new(perm) -} diff --git a/crates/stark-sdk/src/config/instrument.rs b/crates/stark-sdk/src/config/instrument.rs deleted file mode 100644 index b8cf59da0f..0000000000 --- a/crates/stark-sdk/src/config/instrument.rs +++ /dev/null @@ -1,102 +0,0 @@ -use std::{ - any::type_name, - collections::HashMap, - sync::{Arc, Mutex}, -}; - -use p3_symmetric::{ - CryptographicHasher, CryptographicPermutation, Permutation, PseudoCompressionFunction, -}; -use serde::{Deserialize, Serialize}; - -use super::FriParameters; - -pub type InstrumentCounter = Arc>>>; - -/// Wrapper to instrument a type to count function calls. -/// CAUTION: Performance may be impacted. -#[derive(Clone, Debug)] -pub struct Instrumented { - pub is_on: bool, - pub inner: T, - pub input_lens_by_type: InstrumentCounter, -} - -impl Instrumented { - pub fn new(inner: T) -> Self { - Self { - is_on: true, - inner, - input_lens_by_type: Arc::new(Mutex::new(HashMap::new())), - } - } - - fn add_len_for_type(&self, len: usize) { - if !self.is_on { - return; - } - self.input_lens_by_type - .lock() - .unwrap() - .entry(type_name::().to_string()) - .and_modify(|lens| lens.push(len)) - .or_insert(vec![len]); - } -} - -impl> Permutation for Instrumented

{ - fn permute_mut(&self, input: &mut T) { - self.add_len_for_type::(1); - self.inner.permute_mut(input); - } - fn permute(&self, input: T) -> T { - self.add_len_for_type::(1); - self.inner.permute(input) - } -} - -impl> CryptographicPermutation for Instrumented

{} - -// Note: this does not currently need to be used if the implemeation is derived from a CryptographicPermutation: -// we can instrument the permutation itself -impl> PseudoCompressionFunction - for Instrumented -{ - fn compress(&self, input: [T; N]) -> T { - self.add_len_for_type::(N); - self.inner.compress(input) - } -} - -impl> CryptographicHasher - for Instrumented -{ - fn hash_iter(&self, input: I) -> Out - where - I: IntoIterator, - { - if self.is_on { - let input = input.into_iter().collect::>(); - self.add_len_for_type::<(Item, Out)>(input.len()); - self.inner.hash_iter(input) - } else { - self.inner.hash_iter(input) - } - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct HashStatistics { - // pub cryptographic_hasher: usize, - // pub pseudo_compression_function: usize, - pub permutations: usize, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct StarkHashStatistics { - /// Identifier for the hash permutation - pub name: String, - pub stats: HashStatistics, - pub fri_params: FriParameters, - pub custom: T, -} diff --git a/crates/stark-sdk/src/config/mod.rs b/crates/stark-sdk/src/config/mod.rs deleted file mode 100644 index 06c34ef5a9..0000000000 --- a/crates/stark-sdk/src/config/mod.rs +++ /dev/null @@ -1,42 +0,0 @@ -use derive_more::Display; -use serde::{Deserialize, Serialize}; -use tracing::Level; -use tracing_forest::ForestLayer; -use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Registry}; - -pub mod baby_bear_blake3; -pub mod baby_bear_bytehash; -pub mod baby_bear_keccak; -pub mod baby_bear_poseidon2; -/// Stark Config for root stark, which field is BabyBear but polynomials are committed in Bn254. -pub mod baby_bear_poseidon2_root; -pub mod fri_params; -pub mod goldilocks_poseidon; -pub mod instrument; - -pub use fri_params::FriParameters; - -pub fn setup_tracing() { - setup_tracing_with_log_level(Level::INFO); -} - -pub fn setup_tracing_with_log_level(level: Level) { - // Set up tracing: - let env_filter = EnvFilter::builder() - .with_default_directive(level.into()) - .from_env_lossy(); - let _ = Registry::default() - .with(env_filter) - .with(ForestLayer::default()) - .try_init(); -} - -#[derive(Clone, Copy, Default, Display, Debug, Serialize, Deserialize)] -#[serde(rename_all = "PascalCase")] -pub enum EngineType { - #[default] - BabyBearPoseidon2, - BabyBearBlake3, - BabyBearKeccak, - GoldilocksPoseidon, -} diff --git a/crates/stark-sdk/src/cost_estimate.rs b/crates/stark-sdk/src/cost_estimate.rs deleted file mode 100644 index 900709c1a3..0000000000 --- a/crates/stark-sdk/src/cost_estimate.rs +++ /dev/null @@ -1,222 +0,0 @@ -use std::{marker::PhantomData, ops::Add}; - -use openvm_stark_backend::{ - config::{StarkGenericConfig, Val}, - keygen::types::StarkVerifyingKey, - p3_field::AbstractExtensionField, -}; - -use crate::config::FriParameters; - -/// Properties of a multi-trace circuit necessary to estimate verifier cost. -#[derive(Clone, Copy, Debug)] -pub struct VerifierCostParameters { - /// Total number of base field columns across all AIR traces before challenge. - pub num_main_columns: usize, - /// Total number of base field columns across all AIR traces for logup permutation. - pub num_perm_columns: usize, - /// log_2 Maximum height of an AIR trace. - pub log_max_height: usize, - /// Degree of quotient polynomial. This is `max_constraint_degree - 1`. - pub quotient_degree: usize, -} - -/// Mmcs batch verification consist of hashing the leaf and then a normal Merkle proof. -/// We separate the cost of hashing (which requires proper padding to be a crytographic hash) from the cost of -/// 2-to-1 compression function on the hash digest because in tree proofs the internal layers do not need to use -/// a compression function with padding. -/// -/// Currently the estimate ignores the additional details of hashing in matrices of different heights. -#[derive(Clone, Copy, Debug)] -pub struct MmcsVerifyBatchCostEstimate { - /// Hash cost in terms of number of field elments to hash. To convert to true hash cost, it depends on the rate - /// of the cryptographic hash. - pub num_f_to_hash: usize, - /// Number of calls of 2-to-1 compression function. - pub num_compress: usize, -} - -impl MmcsVerifyBatchCostEstimate { - /// `width` is number of base field columns. - /// `max_log_height_lde` is the height of the MMCS (which includes blowup) - pub fn from_dim(width: usize, max_log_height_lde: usize) -> Self { - Self { - num_f_to_hash: width, - num_compress: max_log_height_lde, - } - } -} - -impl Add for MmcsVerifyBatchCostEstimate { - type Output = Self; - - fn add(self, rhs: Self) -> Self::Output { - Self { - num_f_to_hash: self.num_f_to_hash + rhs.num_f_to_hash, - num_compress: self.num_compress + rhs.num_compress, - } - } -} - -#[derive(Clone, Copy, Debug)] -pub struct FriOpenInputCostEstimate { - /// Cost from MMCS batch verification. - pub mmcs: MmcsVerifyBatchCostEstimate, - /// Number of operations of the form $+ \alpha^? \frac{M_j(\zeta) - y_{ij}}{\zeta - z_i}$ in the reduced opening evaluation. - pub num_ro_eval: usize, -} - -impl FriOpenInputCostEstimate { - /// `width` is number of base field columns. - /// `max_log_height` is the trace height, before blowup. - /// `num_points` is number of points to open. - pub fn new( - width: usize, - max_log_height: usize, - num_points: usize, - fri_params: FriParameters, - ) -> Self { - let mut mmcs = - MmcsVerifyBatchCostEstimate::from_dim(width, max_log_height + fri_params.log_blowup); - mmcs.num_compress *= fri_params.num_queries; - mmcs.num_f_to_hash *= fri_params.num_queries; - let num_ro_eval = width * num_points * fri_params.num_queries; - Self { - mmcs: MmcsVerifyBatchCostEstimate::from_dim(width, max_log_height), - num_ro_eval, - } - } -} - -impl Add for FriOpenInputCostEstimate { - type Output = Self; - - fn add(self, rhs: Self) -> Self::Output { - Self { - mmcs: self.mmcs + rhs.mmcs, - num_ro_eval: self.num_ro_eval + rhs.num_ro_eval, - } - } -} - -pub struct FriQueryCostEstimate { - /// Cost from MMCS batch verification. - pub mmcs: MmcsVerifyBatchCostEstimate, - /// Number of single FRI fold evaluations: `e0 + (beta - xs[0]) * (e1 - e0) / (xs[1] - xs[0])`. - pub num_fri_folds: usize, -} - -impl FriQueryCostEstimate { - /// `max_log_height` is the trace height, before blowup. - pub fn new(max_log_height: usize, fri_params: FriParameters) -> Self { - let mut mmcs = MmcsVerifyBatchCostEstimate { - num_f_to_hash: 2 * max_log_height, - num_compress: max_log_height * (max_log_height + fri_params.log_blowup - 1) / 2, - }; - mmcs.num_compress *= fri_params.num_queries; - mmcs.num_f_to_hash *= fri_params.num_queries; - let num_fri_folds = max_log_height * fri_params.num_queries; - Self { - mmcs, - num_fri_folds, - } - } -} - -impl Add for FriQueryCostEstimate { - type Output = Self; - - fn add(self, rhs: Self) -> Self::Output { - Self { - mmcs: self.mmcs + rhs.mmcs, - num_fri_folds: self.num_fri_folds + rhs.num_fri_folds, - } - } -} - -pub struct FriVerifierCostEstimate { - pub open_input: FriOpenInputCostEstimate, - pub query: FriQueryCostEstimate, - /// We currently ignore the constraint evaluation cost because it does not scale with number of FRI queries. - pub constraint_eval: PhantomData, -} - -impl FriVerifierCostEstimate { - pub fn new( - params: VerifierCostParameters, - fri_params: FriParameters, - ext_degree: usize, - ) -> Self { - // Go through different rounds: preprocessed, main, permutation, quotient - - // TODO: ignoring preprocessed trace opening for now - - // Main - // Currently assumes opening at just zeta, omega * zeta - let mut open_input = FriOpenInputCostEstimate::new( - params.num_main_columns, - params.log_max_height, - 2, - fri_params, - ); - let mut query = FriQueryCostEstimate::new(params.log_max_height, fri_params); - - // Permutation - // Currently assumes opening at just zeta, omega * zeta - open_input = open_input - + FriOpenInputCostEstimate::new( - params.num_perm_columns, - params.log_max_height, - 2, - fri_params, - ); - query = query + FriQueryCostEstimate::new(params.log_max_height, fri_params); - - // Add quotient polynomial opening contribution - // Quotient only opens at single point zeta - open_input = open_input - + FriOpenInputCostEstimate::new( - params.quotient_degree * ext_degree, - params.log_max_height, - 1, - fri_params, - ); - query = query + FriQueryCostEstimate::new(params.log_max_height, fri_params); - - Self { - open_input, - query, - constraint_eval: PhantomData, - } - } - - pub fn from_vk( - vks: &[&StarkVerifyingKey], - fri_params: FriParameters, - log_max_height: usize, - ) -> Self { - let num_main_columns: usize = vks - .iter() - .map(|vk| { - vk.params.width.common_main + vk.params.width.cached_mains.iter().sum::() - }) - .sum(); - let ext_degree = >>::D; - let num_perm_columns: usize = vks - .iter() - .map(|vk| vk.params.width.after_challenge.iter().sum::()) - .sum::() - * ext_degree; - let quotient_degree = vks.iter().map(|vk| vk.quotient_degree).max().unwrap_or(0); - Self::new( - VerifierCostParameters { - num_main_columns, - num_perm_columns, - log_max_height, - quotient_degree, - }, - fri_params, - ext_degree, - ) - } -} diff --git a/crates/stark-sdk/src/dummy_airs/fib_air/air.rs b/crates/stark-sdk/src/dummy_airs/fib_air/air.rs deleted file mode 100644 index 330408be33..0000000000 --- a/crates/stark-sdk/src/dummy_airs/fib_air/air.rs +++ /dev/null @@ -1,55 +0,0 @@ -use std::borrow::Borrow; - -use openvm_stark_backend::{ - p3_air::{Air, AirBuilder, AirBuilderWithPublicValues, BaseAir}, - p3_matrix::Matrix, - rap::{BaseAirWithPublicValues, PartitionedBaseAir}, -}; - -use super::columns::{FibonacciCols, NUM_FIBONACCI_COLS}; - -#[derive(Clone, Copy)] -pub struct FibonacciAir; - -impl PartitionedBaseAir for FibonacciAir {} -impl BaseAir for FibonacciAir { - fn width(&self) -> usize { - NUM_FIBONACCI_COLS - } -} - -impl BaseAirWithPublicValues for FibonacciAir { - fn num_public_values(&self) -> usize { - 3 - } -} - -impl Air for FibonacciAir { - fn eval(&self, builder: &mut AB) { - let main = builder.main(); - let pis = builder.public_values(); - - let a = pis[0]; - let b = pis[1]; - let x = pis[2]; - - let (local, next) = (main.row_slice(0), main.row_slice(1)); - let local: &FibonacciCols = (*local).borrow(); - let next: &FibonacciCols = (*next).borrow(); - - let mut when_first_row = builder.when_first_row(); - - when_first_row.assert_eq(local.left, a); - when_first_row.assert_eq(local.right, b); - - let mut when_transition = builder.when_transition(); - - // a' <- b - when_transition.assert_eq(local.right, next.left); - - // b' <- a + b - when_transition.assert_eq(local.left + local.right, next.right); - - builder.when_last_row().assert_eq(local.right, x); - } -} diff --git a/crates/stark-sdk/src/dummy_airs/fib_air/chip.rs b/crates/stark-sdk/src/dummy_airs/fib_air/chip.rs deleted file mode 100644 index 5356494b64..0000000000 --- a/crates/stark-sdk/src/dummy_airs/fib_air/chip.rs +++ /dev/null @@ -1,67 +0,0 @@ -use std::sync::Arc; - -use openvm_stark_backend::{ - config::{StarkGenericConfig, Val}, - p3_field::PrimeField32, - p3_matrix::Matrix, - prover::types::{AirProofInput, AirProofRawInput}, - rap::AnyRap, - Chip, ChipUsageGetter, -}; - -use super::{air::FibonacciAir, trace::generate_trace_rows}; -use crate::dummy_airs::fib_air::columns::NUM_FIBONACCI_COLS; - -#[derive(Clone, Debug)] -pub struct FibonacciChip { - /// The 0th number in the fibonacci sequence. - pub a: u32, - /// The 1st number in the fibonacci sequence. - pub b: u32, - /// Target n-th number in the fibonacci sequence. - pub n: usize, -} - -impl FibonacciChip { - pub fn new(a: u32, b: u32, n: usize) -> Self { - assert!(n.is_power_of_two()); - Self { a, b, n } - } -} - -impl Chip for FibonacciChip -where - Val: PrimeField32, -{ - fn air(&self) -> Arc> { - Arc::new(FibonacciAir) - } - - fn generate_air_proof_input(self) -> AirProofInput { - let common_main = generate_trace_rows::>(self.a, self.b, self.n); - let a = common_main.get(0, 0); - let b = common_main.get(0, 1); - let last_val = common_main.get(self.n - 1, 1); - AirProofInput { - air: self.air(), - cached_mains_pdata: vec![], - raw: AirProofRawInput { - cached_mains: vec![], - common_main: Some(generate_trace_rows::>(self.a, self.b, self.n)), - public_values: vec![a, b, last_val], - }, - } - } -} - -impl ChipUsageGetter for FibonacciChip { - fn air_name(&self) -> String { - "FibonacciAir".to_string() - } - fn current_trace_height(&self) -> usize { - self.n - } - fn trace_width(&self) -> usize { - NUM_FIBONACCI_COLS - } -} diff --git a/crates/stark-sdk/src/dummy_airs/fib_air/columns.rs b/crates/stark-sdk/src/dummy_airs/fib_air/columns.rs deleted file mode 100644 index 52b19739af..0000000000 --- a/crates/stark-sdk/src/dummy_airs/fib_air/columns.rs +++ /dev/null @@ -1,16 +0,0 @@ -use openvm_circuit_primitives_derive::AlignedBorrow; - -pub const NUM_FIBONACCI_COLS: usize = 2; - -#[repr(C)] -#[derive(AlignedBorrow)] -pub struct FibonacciCols { - pub left: F, - pub right: F, -} - -impl FibonacciCols { - pub const fn new(left: F, right: F) -> FibonacciCols { - FibonacciCols { left, right } - } -} diff --git a/crates/stark-sdk/src/dummy_airs/fib_air/mod.rs b/crates/stark-sdk/src/dummy_airs/fib_air/mod.rs deleted file mode 100644 index 331651e890..0000000000 --- a/crates/stark-sdk/src/dummy_airs/fib_air/mod.rs +++ /dev/null @@ -1,4 +0,0 @@ -pub mod air; -pub mod chip; -pub mod columns; -pub mod trace; diff --git a/crates/stark-sdk/src/dummy_airs/fib_air/trace.rs b/crates/stark-sdk/src/dummy_airs/fib_air/trace.rs deleted file mode 100644 index 2d3cafe128..0000000000 --- a/crates/stark-sdk/src/dummy_airs/fib_air/trace.rs +++ /dev/null @@ -1,16 +0,0 @@ -use openvm_stark_backend::{p3_field::PrimeField32, p3_matrix::dense::RowMajorMatrix}; - -use super::columns::NUM_FIBONACCI_COLS; - -/// n is number of rows in the trace -pub fn generate_trace_rows(a: u32, b: u32, n: usize) -> RowMajorMatrix { - assert!(n.is_power_of_two()); - - let mut rows = vec![vec![F::from_canonical_u32(a), F::from_canonical_u32(b)]]; - - for i in 1..n { - rows.push(vec![rows[i - 1][1], rows[i - 1][0] + rows[i - 1][1]]); - } - - RowMajorMatrix::new(rows.concat(), NUM_FIBONACCI_COLS) -} diff --git a/crates/stark-sdk/src/dummy_airs/interaction/dummy_interaction_air.rs b/crates/stark-sdk/src/dummy_airs/interaction/dummy_interaction_air.rs deleted file mode 100644 index 5803a9189d..0000000000 --- a/crates/stark-sdk/src/dummy_airs/interaction/dummy_interaction_air.rs +++ /dev/null @@ -1,285 +0,0 @@ -//! Air with columns -//! | count | fields[..] | -//! -//! Chip will either send or receive the fields with multiplicity count. -//! The main Air has no constraints, the only constraints are specified by the Chip trait - -use std::{iter, sync::Arc}; - -use itertools::izip; -use openvm_stark_backend::{ - air_builders::PartitionedAirBuilder, - config::{StarkGenericConfig, Val}, - interaction::{InteractionBuilder, InteractionType}, - p3_air::{Air, BaseAir}, - p3_field::{AbstractField, Field}, - p3_matrix::{dense::RowMajorMatrix, Matrix}, - prover::types::{AirProofInput, AirProofRawInput, CommittedTraceData, TraceCommitter}, - rap::{AnyRap, BaseAirWithPublicValues, PartitionedBaseAir}, - Chip, ChipUsageGetter, -}; - -pub struct DummyInteractionCols; -impl DummyInteractionCols { - pub fn count_col() -> usize { - 0 - } - pub fn field_col(field_idx: usize) -> usize { - field_idx + 1 - } -} - -#[derive(Clone, Copy)] -pub struct DummyInteractionAir { - field_width: usize, - /// Send if true. Receive if false. - pub is_send: bool, - bus_index: usize, - /// If true, then | count | and | fields[..] | are in separate main trace partitions. - pub partition: bool, -} - -impl DummyInteractionAir { - pub fn new(field_width: usize, is_send: bool, bus_index: usize) -> Self { - Self { - field_width, - is_send, - bus_index, - partition: false, - } - } - - pub fn partition(self) -> Self { - Self { - partition: true, - ..self - } - } - - pub fn field_width(&self) -> usize { - self.field_width - } -} - -impl BaseAirWithPublicValues for DummyInteractionAir {} -impl PartitionedBaseAir for DummyInteractionAir { - fn cached_main_widths(&self) -> Vec { - if self.partition { - vec![self.field_width] - } else { - vec![] - } - } - fn common_main_width(&self) -> usize { - if self.partition { - 1 - } else { - 1 + self.field_width - } - } -} -impl BaseAir for DummyInteractionAir { - fn width(&self) -> usize { - 1 + self.field_width - } - - fn preprocessed_trace(&self) -> Option> { - None - } -} - -impl Air for DummyInteractionAir { - fn eval(&self, builder: &mut AB) { - let (fields, count) = if self.partition { - let local_0 = builder.common_main().row_slice(0); - let local_1 = builder.cached_mains()[0].row_slice(0); - let count = local_0[0]; - let fields = local_1.to_vec(); - (fields, count) - } else { - let main = builder.main(); - let local = main.row_slice(0); - let count = local[DummyInteractionCols::count_col()]; - let fields: Vec<_> = (0..self.field_width) - .map(|i| local[DummyInteractionCols::field_col(i)]) - .collect(); - (fields, count) - }; - let interaction_type = if self.is_send { - InteractionType::Send - } else { - InteractionType::Receive - }; - builder.push_interaction(self.bus_index, fields, count, interaction_type) - } -} - -/// Note: in principle, committing cached trace is out of scope of a chip. But this chip is for -/// usually testing, so we support it for convenience. -pub struct DummyInteractionChip<'a, SC: StarkGenericConfig> { - trace_committer: Option>, - // common_main: Option>>, - data: Option, - pub air: DummyInteractionAir, -} - -impl Clone for DummyInteractionChip<'_, SC> { - fn clone(&self) -> Self { - Self { - trace_committer: self.trace_committer.clone(), - data: self.data.clone(), - air: self.air, - } - } -} - -#[derive(Debug, Clone)] -pub struct DummyInteractionData { - pub count: Vec, - pub fields: Vec>, -} - -impl<'a, SC: StarkGenericConfig> DummyInteractionChip<'a, SC> -where - Val: AbstractField, -{ - pub fn new_without_partition(field_width: usize, is_send: bool, bus_index: usize) -> Self { - let air = DummyInteractionAir::new(field_width, is_send, bus_index); - Self { - trace_committer: None, - data: None, - air, - } - } - pub fn new_with_partition( - pcs: &'a SC::Pcs, - field_width: usize, - is_send: bool, - bus_index: usize, - ) -> Self { - let air = DummyInteractionAir::new(field_width, is_send, bus_index).partition(); - Self { - trace_committer: Some(TraceCommitter::new(pcs)), - data: None, - air, - } - } - pub fn load_data(&mut self, data: DummyInteractionData) { - let DummyInteractionData { count, fields } = &data; - let h = count.len(); - assert_eq!(fields.len(), h); - let w = fields[0].len(); - assert_eq!(self.air.field_width, w); - assert!(fields.iter().all(|r| r.len() == w)); - self.data = Some(data); - } - fn generate_traces_with_partition( - &self, - data: DummyInteractionData, - ) -> (RowMajorMatrix>, CommittedTraceData) { - let DummyInteractionData { - mut count, - mut fields, - } = data; - let h = count.len(); - assert_eq!(fields.len(), h); - let w = fields[0].len(); - assert_eq!(self.air.field_width, w); - assert!(fields.iter().all(|r| r.len() == w)); - let h = h.next_power_of_two(); - count.resize(h, 0); - fields.resize(h, vec![0; w]); - let common_main_val: Vec<_> = count - .into_iter() - .map(Val::::from_canonical_u32) - .collect(); - let cached_trace_val: Vec<_> = fields - .into_iter() - .flatten() - .map(Val::::from_canonical_u32) - .collect(); - let cached_trace = RowMajorMatrix::new(cached_trace_val, w); - let prover_data = self - .trace_committer - .as_ref() - .unwrap() - .commit(vec![cached_trace.clone()]); - ( - RowMajorMatrix::new(common_main_val, 1), - CommittedTraceData { - raw_data: Arc::new(cached_trace), - prover_data, - }, - ) - } - - fn generate_traces_without_partition( - &self, - data: DummyInteractionData, - ) -> RowMajorMatrix> { - let DummyInteractionData { count, fields } = data; - let h = count.len(); - assert_eq!(fields.len(), h); - let w = fields[0].len(); - assert_eq!(self.air.field_width, w); - assert!(fields.iter().all(|r| r.len() == w)); - let common_main_val: Vec<_> = izip!(count, fields) - .flat_map(|(count, fields)| iter::once(count).chain(fields)) - .chain(iter::repeat(0)) - .take((w + 1) * h.next_power_of_two()) - .map(Val::::from_canonical_u32) - .collect(); - RowMajorMatrix::new(common_main_val, w + 1) - } -} - -impl Chip for DummyInteractionChip<'_, SC> { - fn air(&self) -> Arc> { - Arc::new(self.air) - } - - fn generate_air_proof_input(self) -> AirProofInput { - assert!(self.data.is_some()); - let data = self.data.clone().unwrap(); - if self.trace_committer.is_some() { - let (common_main, cached_main) = self.generate_traces_with_partition(data); - AirProofInput { - air: self.air(), - cached_mains_pdata: vec![cached_main.prover_data], - raw: AirProofRawInput { - cached_mains: vec![cached_main.raw_data], - common_main: Some(common_main), - public_values: vec![], - }, - } - } else { - let common_main = self.generate_traces_without_partition(data); - AirProofInput { - air: self.air(), - cached_mains_pdata: vec![], - raw: AirProofRawInput { - cached_mains: vec![], - common_main: Some(common_main), - public_values: vec![], - }, - } - } - } -} - -impl ChipUsageGetter for DummyInteractionChip<'_, SC> { - fn air_name(&self) -> String { - "DummyInteractionAir".to_string() - } - fn current_trace_height(&self) -> usize { - if let Some(data) = &self.data { - data.count.len() - } else { - 0 - } - } - - fn trace_width(&self) -> usize { - self.air.field_width + 1 - } -} diff --git a/crates/stark-sdk/src/dummy_airs/interaction/mod.rs b/crates/stark-sdk/src/dummy_airs/interaction/mod.rs deleted file mode 100644 index f611cb85a8..0000000000 --- a/crates/stark-sdk/src/dummy_airs/interaction/mod.rs +++ /dev/null @@ -1,64 +0,0 @@ -use std::sync::Arc; - -use itertools::{izip, Itertools}; -use openvm_stark_backend::{ - keygen::MultiStarkKeygenBuilder, - p3_matrix::dense::RowMajorMatrix, - prover::{ - types::{AirProofInput, AirProofRawInput, ProofInput}, - MultiTraceStarkProver, - }, - rap::AnyRap, - verifier::{MultiTraceStarkVerifier, VerificationError}, -}; -use p3_baby_bear::BabyBear; - -use crate::config::{self, baby_bear_poseidon2::BabyBearPoseidon2Config}; - -pub mod dummy_interaction_air; - -type Val = BabyBear; - -pub fn verify_interactions( - traces: Vec>, - airs: Vec>>, - pis: Vec>, -) -> Result<(), VerificationError> { - let perm = config::baby_bear_poseidon2::random_perm(); - let config = config::baby_bear_poseidon2::default_config(&perm); - - let mut keygen_builder = MultiStarkKeygenBuilder::new(&config); - let air_ids = airs - .iter() - .map(|air| keygen_builder.add_air(air.clone())) - .collect_vec(); - let pk = keygen_builder.generate_pk(); - let vk = pk.get_vk(); - - let per_air: Vec<_> = izip!(air_ids, airs, traces, pis) - .map(|(air_id, air, trace, pvs)| { - ( - air_id, - AirProofInput { - air, - cached_mains_pdata: vec![], - raw: AirProofRawInput { - cached_mains: vec![], - common_main: Some(trace), - public_values: pvs, - }, - }, - ) - }) - .collect(); - - let prover = MultiTraceStarkProver::new(&config); - let mut challenger = config::baby_bear_poseidon2::Challenger::new(perm.clone()); - let proof = prover.prove(&mut challenger, &pk, ProofInput { per_air }); - - // Verify the proof: - // Start from clean challenger - let mut challenger = config::baby_bear_poseidon2::Challenger::new(perm.clone()); - let verifier = MultiTraceStarkVerifier::new(prover.config); - verifier.verify(&mut challenger, &vk, &proof) -} diff --git a/crates/stark-sdk/src/dummy_airs/mod.rs b/crates/stark-sdk/src/dummy_airs/mod.rs deleted file mode 100644 index faa4d82118..0000000000 --- a/crates/stark-sdk/src/dummy_airs/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -pub mod fib_air; -/// Some dummy AIRs for testing. -pub mod interaction; diff --git a/crates/stark-sdk/src/engine.rs b/crates/stark-sdk/src/engine.rs deleted file mode 100644 index 7d21f47d2b..0000000000 --- a/crates/stark-sdk/src/engine.rs +++ /dev/null @@ -1,94 +0,0 @@ -use std::sync::Arc; - -pub use openvm_stark_backend::engine::StarkEngine; -use openvm_stark_backend::{ - config::{StarkGenericConfig, Val}, - engine::VerificationData, - p3_matrix::dense::DenseMatrix, - prover::types::AirProofInput, - rap::AnyRap, - verifier::VerificationError, -}; -use tracing::Level; - -use crate::config::{instrument::StarkHashStatistics, setup_tracing_with_log_level, FriParameters}; - -pub trait StarkEngineWithHashInstrumentation: StarkEngine { - fn clear_instruments(&mut self); - fn stark_hash_statistics(&self, custom: T) -> StarkHashStatistics; -} - -/// All necessary data to verify a Stark proof. -pub struct VerificationDataWithFriParams { - pub data: VerificationData, - pub fri_params: FriParameters, -} - -/// `stark-backend::prover::types::ProofInput` without specifying AIR IDs. -pub struct ProofInputForTest { - pub per_air: Vec>, -} - -impl ProofInputForTest { - pub fn run_test( - self, - engine: &impl StarkFriEngine, - ) -> Result, VerificationError> { - engine.run_test(self.per_air) - } -} - -/// Stark engine using Fri. -pub trait StarkFriEngine: StarkEngine + Sized { - fn new(fri_parameters: FriParameters) -> Self; - fn fri_params(&self) -> FriParameters; - fn run_test( - &self, - air_proof_inputs: Vec>, - ) -> Result, VerificationError> - where - AirProofInput: Send + Sync, - { - setup_tracing_with_log_level(Level::WARN); - let data = >::run_test_impl(self, air_proof_inputs)?; - Ok(VerificationDataWithFriParams { - data, - fri_params: self.fri_params(), - }) - } - fn run_test_fast( - air_proof_inputs: Vec>, - ) -> Result, VerificationError> - where - AirProofInput: Send + Sync, - { - let engine = Self::new(FriParameters::standard_fast()); - engine.run_test(air_proof_inputs) - } - fn run_simple_test_impl( - &self, - chips: Vec>>, - traces: Vec>>, - public_values: Vec>>, - ) -> Result, VerificationError> - where - AirProofInput: Send + Sync, - { - self.run_test(AirProofInput::multiple_simple(chips, traces, public_values)) - } - fn run_simple_test_fast( - chips: Vec>>, - traces: Vec>>, - public_values: Vec>>, - ) -> Result, VerificationError> { - let engine = Self::new(FriParameters::standard_fast()); - StarkFriEngine::<_>::run_simple_test_impl(&engine, chips, traces, public_values) - } - fn run_simple_test_no_pis_fast( - chips: Vec>>, - traces: Vec>>, - ) -> Result, VerificationError> { - let pis = vec![vec![]; chips.len()]; - >::run_simple_test_fast(chips, traces, pis) - } -} diff --git a/crates/stark-sdk/src/lib.rs b/crates/stark-sdk/src/lib.rs deleted file mode 100644 index 4d1396a58d..0000000000 --- a/crates/stark-sdk/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -pub use openvm_stark_backend; -pub use p3_baby_bear; -pub use p3_blake3; -pub use p3_bn254_fr; -pub use p3_goldilocks; -pub use p3_keccak; - -pub mod bench; -pub mod config; -/// Verifier cost estimation -pub mod cost_estimate; -pub mod dummy_airs; -pub mod engine; -pub mod utils; diff --git a/crates/stark-sdk/src/utils.rs b/crates/stark-sdk/src/utils.rs deleted file mode 100644 index 32063f213d..0000000000 --- a/crates/stark-sdk/src/utils.rs +++ /dev/null @@ -1,56 +0,0 @@ -use itertools::Itertools; -use openvm_stark_backend::p3_field::AbstractField; -use rand::{rngs::StdRng, Rng, SeedableRng}; - -/// Deterministic seeded RNG, for testing use -pub fn create_seeded_rng() -> StdRng { - let seed = [42; 32]; - StdRng::from_seed(seed) -} - -pub fn create_seeded_rng_with_seed(seed: u64) -> StdRng { - let seed_be = seed.to_be_bytes(); - let mut seed = [0u8; 32]; - seed[24..32].copy_from_slice(&seed_be); - StdRng::from_seed(seed) -} - -// Returns row major matrix -pub fn generate_random_matrix( - mut rng: impl Rng, - height: usize, - width: usize, -) -> Vec> { - (0..height) - .map(|_| { - (0..width) - .map(|_| F::from_wrapped_u32(rng.gen())) - .collect_vec() - }) - .collect_vec() -} - -pub fn to_field_vec(v: Vec) -> Vec { - v.into_iter().map(F::from_canonical_u32).collect() -} - -/// A macro to create a `Vec>>` from a list of AIRs because Rust cannot infer the -/// type correctly when using `vec!`. -#[macro_export] -macro_rules! any_rap_arc_vec { - [$($e:expr),*] => { - { - let chips: Vec>> = vec![$(std::sync::Arc::new($e)),*]; - chips - } - }; -} - -#[macro_export] -macro_rules! assert_sc_compatible_with_serde { - ($sc:ty) => { - static_assertions::assert_impl_all!(openvm_stark_backend::keygen::types::MultiStarkProvingKey<$sc>: serde::Serialize, serde::de::DeserializeOwned); - static_assertions::assert_impl_all!(openvm_stark_backend::keygen::types::MultiStarkVerifyingKey<$sc>: serde::Serialize, serde::de::DeserializeOwned); - static_assertions::assert_impl_all!(openvm_stark_backend::prover::types::Proof<$sc>: serde::Serialize, serde::de::DeserializeOwned); - }; -} diff --git a/crates/stark-sdk/tests/serde_type.rs b/crates/stark-sdk/tests/serde_type.rs deleted file mode 100644 index bbbd8cadc0..0000000000 --- a/crates/stark-sdk/tests/serde_type.rs +++ /dev/null @@ -1,6 +0,0 @@ -use openvm_instructions::exe::VmExe; -use p3_baby_bear::BabyBear; -use serde::{de::DeserializeOwned, Serialize}; -use static_assertions::assert_impl_all; - -assert_impl_all!(VmExe: Serialize, DeserializeOwned); diff --git a/docs/specs/vm/stark.md b/docs/specs/vm/stark.md index 94de494dae..2c12e62577 100644 --- a/docs/specs/vm/stark.md +++ b/docs/specs/vm/stark.md @@ -5,7 +5,7 @@ randomness between AIR matrices to enable permutation arguments such as log-up. In the following, we will refer to a circuit as a collection of AIR matrices (also referred to as chips) of possibly different heights, which may communicate with one another over buses using a log-up permutation argument. We refer to -messages sent to such a bus as [interactions](../../../stark-backend/src/interaction/README.md). +messages sent to such a bus as [interactions](https://github.com/openvm-org/stark-backend/tree/main/crates/stark-backend/src/interaction). Our framework is modular and allows the creation of custom VM circuits to support different instruction sets that follow our overall ISA framework.