Skip to content

Commit

Permalink
Super circuit (#73)
Browse files Browse the repository at this point in the history
* refactor challenges API

* implement super circuit

* `new_from_state` and witness by reference

* fix inconsistant endianness in RLC

* super circuit works

* full validators tree

* fix state circuit padding

* prove full state tree

* add state_root instances

* add target_epoch instance

* remote dependencies

* refactor

* gh actions: run test with `release` flag

* gh action: run test_super_circuit only

* refactor
  • Loading branch information
nulltea authored Jul 31, 2023
1 parent 7eec358 commit cb54ee7
Show file tree
Hide file tree
Showing 30 changed files with 1,282 additions and 762 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,4 @@ jobs:
- name: Check
run: cargo check --all
- name: Check
run: RUST_LOG="preprocessor,eth-types,gadgets,zkcasper-circuits=debug" cargo test --all
run: RUST_LOG="preprocessor,eth-types,gadgets,zkcasper-circuits=debug" cargo test --release test_super_circuit
20 changes: 16 additions & 4 deletions eth-types/src/curve.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use std::array::TryFromSliceError;
use std::iter;

use halo2_ecc::fields::PrimeField;
Expand All @@ -8,6 +9,7 @@ use halo2curves::FieldExt;
use itertools::Itertools;
use pasta_curves::arithmetic::SqrtRatio;
use pasta_curves::group::GroupEncoding;
use pasta_curves::group::UncompressedEncoding;

use crate::Field;

Expand All @@ -17,9 +19,13 @@ pub trait AppCurveExt: CurveExt<AffineExt: CurveAffineExt> {
/// Prime field of order $q = p^k$ where k is the embedding degree.
type Fq: PrimeField + FieldExt + Halo2Field = Self::Fp;
/// Affine version of the curve.
type Affine: CurveAffineExt<Base = Self::Fq> + GroupEncoding<Repr = Self::CompressedRepr>;
type Affine: CurveAffineExt<Base = Self::Fq>
+ GroupEncoding<Repr = Self::CompressedRepr>
+ UncompressedEncoding<Uncompressed = Self::UnompressedRepr>;
/// Compressed representation of the curve.
type CompressedRepr: TryFrom<Vec<u8>, Error = std::array::TryFromSliceError>;
type CompressedRepr: TryFrom<Vec<u8>, Error = TryFromSliceError>;
/// Compressed representation of the curve.
type UnompressedRepr: TryFrom<Vec<u8>, Error = TryFromSliceError>;
/// Constant $b$ in the curve equation $y^2 = x^3 + b$.
const B: u64;
// Bytes needed to encode [`Self::Fq];
Expand Down Expand Up @@ -64,12 +70,16 @@ pub trait HashCurveExt: AppCurveExt<Fq: SqrtRatio> {

mod bls12_381 {
use super::*;
use halo2curves::bls12_381::{Fq, Fq2, G1Affine, G1Compressed, G2Affine, G2Compressed, G1, G2};
use halo2curves::bls12_381::{
Fq, Fq2, G1Affine, G1Compressed, G1Uncompressed, G2Affine, G2Compressed, G2Uncompressed,
G1, G2,
};

impl AppCurveExt for G1 {
type Affine = G1Affine;
type Fp = Fq;
type CompressedRepr = G1Compressed;
type UnompressedRepr = G1Uncompressed;
const BYTES_FQ: usize = 48;
const BYTES_UNCOMPRESSED: usize = Self::BYTES_FQ * 2;
const LIMB_BITS: usize = 112;
Expand All @@ -86,6 +96,7 @@ mod bls12_381 {
type Fp = Fq;
type Fq = Fq2;
type CompressedRepr = G2Compressed;
type UnompressedRepr = G2Uncompressed;
const BYTES_FQ: usize = 96;
const BYTES_UNCOMPRESSED: usize = Self::BYTES_FQ * 2;
const LIMB_BITS: usize = 112;
Expand Down Expand Up @@ -423,12 +434,13 @@ mod bls12_381 {

mod bn254 {
use super::*;
use halo2curves::bn256::{Fq, G1Affine, G1Compressed, G1};
use halo2curves::bn256::{Fq, G1Affine, G1Compressed, G1Uncompressed, G1};

impl AppCurveExt for G1 {
type Affine = G1Affine;
type Fp = Fq;
type CompressedRepr = G1Compressed;
type UnompressedRepr = G1Uncompressed;
const BYTES_FQ: usize = 32;
const BYTES_UNCOMPRESSED: usize = Self::BYTES_FQ * 2;
const LIMB_BITS: usize = 88;
Expand Down
26 changes: 11 additions & 15 deletions eth-types/src/spec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,11 @@ pub trait Spec: 'static + Sized + Copy + Default + Debug {
const MAX_VALIDATORS_PER_COMMITTEE: usize;
const MAX_COMMITTEES_PER_SLOT: usize;
const SLOTS_PER_EPOCH: usize;
const VALIDATOR_0_G_INDEX: usize;
const VALIDATOR_SSZ_CHUNKS: usize;
const USED_CHUNKS_PER_VALIDATOR: usize;
const VALIDATOR_0_GINDEX: usize;
const STATE_TREE_DEPTH: usize;
const STATE_TREE_LEVEL_PUBKEYS: usize;
const STATE_TREE_LEVEL_VALIDATORS: usize;
const STATE_TREE_LEVEL_BEACON_STATE: usize;
const DST: &'static [u8];

type PubKeysCurve: AppCurveExt;
Expand All @@ -35,15 +34,14 @@ pub struct Test;

impl Spec for Test {
const VALIDATOR_REGISTRY_LIMIT: usize = 100;
const MAX_VALIDATORS_PER_COMMITTEE: usize = 10;
const MAX_VALIDATORS_PER_COMMITTEE: usize = 5;
const MAX_COMMITTEES_PER_SLOT: usize = 1;
const SLOTS_PER_EPOCH: usize = 1;
const VALIDATOR_0_G_INDEX: usize = 32;
const VALIDATOR_SSZ_CHUNKS: usize = 8;
const USED_CHUNKS_PER_VALIDATOR: usize = 5;
const STATE_TREE_DEPTH: usize = 10;
const STATE_TREE_LEVEL_PUBKEYS: usize = 10;
const VALIDATOR_0_GINDEX: usize = 94557999988736;
const STATE_TREE_DEPTH: usize = 51;
const STATE_TREE_LEVEL_PUBKEYS: usize = Self::STATE_TREE_DEPTH;
const STATE_TREE_LEVEL_VALIDATORS: usize = Self::STATE_TREE_LEVEL_PUBKEYS - 1;
const STATE_TREE_LEVEL_BEACON_STATE: usize = 6;
const DST: &'static [u8] = b"BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_POP_";

type PubKeysCurve = bls12_381::G1;
Expand All @@ -58,13 +56,11 @@ impl Spec for Mainnet {
const MAX_VALIDATORS_PER_COMMITTEE: usize = 2048;
const MAX_COMMITTEES_PER_SLOT: usize = 64;
const SLOTS_PER_EPOCH: usize = 32;
const VALIDATOR_0_G_INDEX: usize = 94557999988736;
const VALIDATOR_SSZ_CHUNKS: usize = 9;
const USED_CHUNKS_PER_VALIDATOR: usize = 5;
const STATE_TREE_DEPTH: usize = 47;
// TODO: calculate and verify the pubkeys level for mainnet
const STATE_TREE_LEVEL_PUBKEYS: usize = 49;
const VALIDATOR_0_GINDEX: usize = 94557999988736;
const STATE_TREE_DEPTH: usize = 51;
const STATE_TREE_LEVEL_PUBKEYS: usize = Self::STATE_TREE_DEPTH;
const STATE_TREE_LEVEL_VALIDATORS: usize = Self::STATE_TREE_LEVEL_PUBKEYS - 1;
const STATE_TREE_LEVEL_BEACON_STATE: usize = 6;
const DST: &'static [u8] = b"BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_POP_";

type PubKeysCurve = bls12_381::G1;
Expand Down
5 changes: 3 additions & 2 deletions gadgets/src/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ pub mod rlc {
ctx: &mut Context<F>,
) -> AssignedValue<F> {
if !values.is_empty() {
let mut values = values.iter().rev();
let mut values = values.iter();
let init = values.next().expect("values should not be empty");

values.fold(*init, |acc, value| {
Expand All @@ -278,7 +278,8 @@ pub mod rlc {
V: Clone + Add<R, Output = V> + Add<Output = V> + Mul<R, Output = V>,
R: Clone,
{
let mut values = values.into_iter().rev();
// we don't reverse bytes because https://github.com/ChainSafe/banshee-zk/issues/72
let mut values = values.into_iter();
let init = values.next().expect("values should not be empty");

values.fold(init, |acc, value| acc * randomness.clone() + value)
Expand Down
67 changes: 28 additions & 39 deletions preprocessor/scripts/generateInputData.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,33 +14,18 @@ import { g1PointToLeBytes as g1PointToBytesLE, g2PointToLeBytes, serialize } fro
import { createNodeFromMultiProofWithTrace, printTrace } from "./merkleTrace";
import { hexToBytes, bytesToHex } from "@noble/curves/abstract/utils";
import { ProjPointType } from "@noble/curves/abstract/weierstrass";
import { createNodeFromCompactMultiProof } from "@chainsafe/persistent-merkle-tree/lib/proof/compactMulti";
import { ValidatorsSsz, Validator, BeaconStateSsz } from "./types";

const DST = "BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_POP_";

const ValidatorContainer = new ContainerType(
{
pubkey: ssz.Bytes48,
withdrawalCredentials: ssz.Bytes32,
effectiveBalance: ssz.UintNum64,
slashed: ssz.Boolean,
activationEligibilityEpoch: ssz.EpochInf,
activationEpoch: ssz.EpochInf,
exitEpoch: ssz.EpochInf,
withdrawableEpoch: ssz.EpochInf,
},
{ typeName: "Validator", jsonCase: "eth2" }
);

type Validator = ValueOf<typeof ValidatorContainer>;

export const ValidatorsSsz = new ListCompositeType(ValidatorContainer, 10);
console.log("VALIDATOR_0_GINDEX:", BeaconStateSsz.getPathInfo(['validators', 0]).gindex);

const N = 5;
let validators: Validator[] = [];
let gindices: bigint[] = [];
let validatorBaseGindices: bigint[] = [];

let nonRlcGindices = [];
let nonRlcGindices: bigint[] = [];

let privKeyHexes = [
"5644920314564b11404384380c1d677871ada2ec9470d5f43f03aa931ecef54b",
Expand All @@ -52,7 +37,12 @@ let privKeyHexes = [

const target_epoch = 25;

//----------------- State tree -----------------//
//----------------- Beacon state -----------------//

let beaconState = ssz.capella.BeaconState.deserialize(new Uint8Array(fs.readFileSync("../test_data/beacon_state_2915750")));
beaconState.validators = [];

//----------------- Validators -----------------//

let pubKeyPoints: ProjPointType<bigint>[] = [];

Expand All @@ -62,7 +52,7 @@ for (let i = 0; i < N; i++) {
let p = bls12_381.G1.ProjectivePoint.fromPrivateKey(privKey);
let pubkey = g1PointToBytesLE(p, true);

validators.push({
beaconState.validators.push({
pubkey: pubkey,
withdrawalCredentials: Uint8Array.from(Array(32).fill(0)),
effectiveBalance: 32000000,
Expand All @@ -74,23 +64,23 @@ for (let i = 0; i < N; i++) {
});
privKeyHexes[i] = bytesToHex(privKey);
pubKeyPoints.push(p);
validatorBaseGindices.push(ValidatorsSsz.getPathInfo([i]).gindex);
gindices.push(ValidatorsSsz.getPathInfo([i, 'pubkey']).gindex * 2n);
gindices.push(ValidatorsSsz.getPathInfo([i, 'pubkey']).gindex * 2n + 1n);
gindices.push(ValidatorsSsz.getPathInfo([i, 'effectiveBalance']).gindex);
gindices.push(ValidatorsSsz.getPathInfo([i, 'slashed']).gindex);
gindices.push(ValidatorsSsz.getPathInfo([i, 'activationEpoch']).gindex);
gindices.push(ValidatorsSsz.getPathInfo([i, 'exitEpoch']).gindex);

nonRlcGindices.push(ValidatorsSsz.getPathInfo([i, 'effectiveBalance']).gindex);
nonRlcGindices.push(ValidatorsSsz.getPathInfo([i, 'slashed']).gindex);
nonRlcGindices.push(ValidatorsSsz.getPathInfo([i, 'activationEpoch']).gindex);
nonRlcGindices.push(ValidatorsSsz.getPathInfo([i, 'exitEpoch']).gindex);
validatorBaseGindices.push(BeaconStateSsz.getPathInfo(['validators', i]).gindex);
gindices.push(BeaconStateSsz.getPathInfo(['validators', i, 'pubkey']).gindex * 2n);
gindices.push(BeaconStateSsz.getPathInfo(['validators', i, 'pubkey']).gindex * 2n + 1n);
gindices.push(BeaconStateSsz.getPathInfo(['validators', i, 'effectiveBalance']).gindex);
gindices.push(BeaconStateSsz.getPathInfo(['validators', i, 'slashed']).gindex);
gindices.push(BeaconStateSsz.getPathInfo(['validators', i, 'activationEpoch']).gindex);
gindices.push(BeaconStateSsz.getPathInfo(['validators', i, 'exitEpoch']).gindex);

nonRlcGindices.push(BeaconStateSsz.getPathInfo(['validators', i, 'effectiveBalance']).gindex);
nonRlcGindices.push(BeaconStateSsz.getPathInfo(['validators', i, 'slashed']).gindex);
nonRlcGindices.push(BeaconStateSsz.getPathInfo(['validators', i, 'activationEpoch']).gindex);
nonRlcGindices.push(BeaconStateSsz.getPathInfo(['validators', i, 'exitEpoch']).gindex);
}

fs.writeFileSync(
`../test_data/validators.json`,
serialize(Array.from(validators.entries()).map(([i, validator]) => ({
serialize(Array.from(beaconState.validators.entries()).map(([i, validator]) => ({
id: i,
shufflePos: i,
committee: 0,
Expand Down Expand Up @@ -173,16 +163,15 @@ fs.writeFileSync(


//----------------- State tree -----------------//
let view = ValidatorsSsz.toView(validators);

let proof = createProof(view.node, { type: ProofType.multi, gindices: gindices }) as MultiProof;
let view = BeaconStateSsz.toView(beaconState);

const areEqual = (first: Uint8Array, second: Uint8Array) =>
first.length === second.length && first.every((value, index) => value === second[index]);

let [partial_tree, trace] = createNodeFromMultiProofWithTrace(proof.leaves, proof.witnesses, proof.gindices, nonRlcGindices);
let proof = createProof(view.node, { type: ProofType.multi, gindices: gindices }) as MultiProof;

let [partial_tree, trace] = createNodeFromMultiProofWithTrace(proof.leaves, proof.witnesses, proof.gindices, nonRlcGindices);
// printTrace(partial_tree, trace);
console.log("state_root:", bytesToHex(view.hashTreeRoot()));

fs.writeFileSync(
`../test_data/merkle_trace.json`,
Expand Down
19 changes: 11 additions & 8 deletions preprocessor/scripts/merkleTrace.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,22 +81,25 @@ export function createNodeFromMultiProofWithTrace(
throw new Error(`Sibling not found: ${siblingBitstring}`);
}

let index = isLeft ? nodeGindex : siblingGindex;
let siblingIndex = isLeft ? siblingGindex : nodeGindex;

// store the parent node
const parentNode = isLeft ? new BranchNode(node, siblingNode) : new BranchNode(siblingNode, node);
trace.push({
node: node.root,
index: nodeGindex,
sibling: siblingNode.root,
siblingIndex: siblingGindex,
node: isLeft ? node.root : siblingNode.root,
index: index,
sibling: isLeft ? siblingNode.root : node.root,
siblingIndex: siblingIndex,
intoLeft: parentBitstring[parentBitstring.length - 1] === "0",
isLeft: gindices.includes(isLeft ? nodeGindex : siblingGindex),
isRight: gindices.includes(isLeft ? siblingGindex : nodeGindex),
isLeft: gindices.includes(index),
isRight: gindices.includes(siblingIndex),
parent: parentNode.root,
parentIndex: parentGindex,
depth: i,
isRLC: [
!nonRlcGindices.includes(nodeGindex),
!nonRlcGindices.includes(siblingGindex),
!nonRlcGindices.includes(index),
!nonRlcGindices.includes(siblingIndex),
]
});

Expand Down
Loading

0 comments on commit cb54ee7

Please sign in to comment.