Skip to content

Commit

Permalink
use batch instead of blob (no encoding) and load seqexconf correct
Browse files Browse the repository at this point in the history
  • Loading branch information
roynalnaruto authored and lispc committed Aug 15, 2024
1 parent f851e09 commit 32619ee
Show file tree
Hide file tree
Showing 6 changed files with 184 additions and 10 deletions.
3 changes: 1 addition & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
.vscode
.idea
*.log
*.json
*.sh
*.txt
*.srs
tmp
tmp
107 changes: 107 additions & 0 deletions aggregator/data/batch-task.json

Large diffs are not rendered by default.

3 changes: 3 additions & 0 deletions aggregator/src/aggregation/blob_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,9 @@ impl<const N_SNARKS: usize> BlobDataConfig<N_SNARKS> {
};

let enable_encoding = blob_bytes.len() < batch_bytes.len();
if !enable_encoding {
blob_bytes = batch_bytes.clone();
}
blob_bytes.insert(0, enable_encoding as u8);

assert!(blob_bytes.len() <= N_BLOB_BYTES, "too many blob bytes");
Expand Down
12 changes: 6 additions & 6 deletions aggregator/src/aggregation/circuit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -481,12 +481,12 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> {
barycentric_assignments,
)?;

// construct bytes to be populated into the [`BatchDataConfig`].
let batch_bytes = batch_data.get_batch_data_bytes();

// conditionally encode those bytes. By default we use a worked example.
let encoded_bytes = if blob_data_exports.enable_encoding_bool {
batch_data.get_encoded_batch_data_bytes()
let (batch_bytes, encoded_bytes) = if blob_data_exports.enable_encoding_bool {
(
batch_data.get_batch_data_bytes(),
batch_data.get_encoded_batch_data_bytes(),
)
} else {
let dummy_bytes = WORKED_EXAMPLE.as_bytes().to_vec();
let mut encoder = init_zstd_encoder(None);
Expand All @@ -496,7 +496,7 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> {
encoder
.write_all(&dummy_bytes)
.map_err(|_| Error::Synthesis)?;
encoder.finish().map_err(|_| Error::Synthesis)?
(dummy_bytes, encoder.finish().map_err(|_| Error::Synthesis)?)
};

let MultiBlockProcessResult {
Expand Down
2 changes: 1 addition & 1 deletion aggregator/src/param.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ impl ConfigParams {
Self {
strategy: FpStrategy::Simple,
degree: 21,
num_advice: vec![63],
num_advice: vec![64],
num_lookup_advice: vec![8],
num_fixed: 2,
lookup_bits: 20,
Expand Down
67 changes: 66 additions & 1 deletion aggregator/src/tests/aggregation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@ fn test_max_agg_snarks_batch_circuit() {
let k = 21;

// This set up requires one round of keccak for chunk's data hash
let circuit: BatchCircuit<MAX_AGG_SNARKS> = build_new_batch_circuit(2, k);
// let circuit: BatchCircuit<MAX_AGG_SNARKS> = build_new_batch_circuit(2, k);
let circuit: BatchCircuit<MAX_AGG_SNARKS> = build_batch_circuit_skip_encoding();
let instance = circuit.instances();
let mock_prover = MockProver::<Fr>::run(k, &circuit, instance).unwrap();
mock_prover.assert_satisfied_par();
Expand Down Expand Up @@ -196,3 +197,67 @@ fn build_new_batch_circuit<const N_SNARKS: usize>(
)
.unwrap()
}

/// Build a batch circuit where blob == batch, i.e. no encoding.
fn build_batch_circuit_skip_encoding<const N_SNARKS: usize>() -> BatchCircuit<N_SNARKS> {
let k0 = 8;
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
pub struct ChunkProof {
pub chunk_info: ChunkInfo,
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
struct BatchProvingTask {
pub chunk_proofs: Vec<ChunkProof>,
pub batch_header: BatchHeader<MAX_AGG_SNARKS>,
}
let file = std::fs::File::open("data/batch-task.json").expect("batch-task.json exists");
let reader = std::io::BufReader::new(file);
let batch_proving_task: BatchProvingTask =
serde_json::from_reader(reader).expect("deserialisation should succeed");
let chunks = batch_proving_task
.chunk_proofs
.clone()
.into_iter()
.map(|p| p.chunk_info)
.collect::<Vec<_>>();
let corrected_batch_header = BatchHeader::construct_from_chunks(
batch_proving_task.batch_header.version,
batch_proving_task.batch_header.batch_index,
batch_proving_task.batch_header.l1_message_popped,
batch_proving_task.batch_header.total_l1_message_popped,
batch_proving_task.batch_header.parent_batch_hash,
batch_proving_task.batch_header.last_block_timestamp,
&chunks,
);
let batch_hash = BatchHash::construct_with_unpadded(&chunks, corrected_batch_header);
let params = gen_srs(k0);
let rng = test_rng();
// ==========================
// real chunks
// ==========================
let real_snarks = {
let circuits = chunks
.iter()
.take(chunks.len())
.map(|chunk| MockChunkCircuit::new(true, chunk.clone()))
.collect_vec();
circuits
.iter()
.map(|circuit| {
let circuit = circuit.clone();
layer_0!(circuit, MockChunkCircuit, params, k0, path)
})
.collect_vec()
};
// ==========================
// padded chunks
// ==========================
let padded_snarks = { vec![real_snarks.last().unwrap().clone(); N_SNARKS - chunks.len()] };
BatchCircuit::new(
&params,
[real_snarks, padded_snarks].concat().as_ref(),
rng,
batch_hash,
)
.unwrap()
}

0 comments on commit 32619ee

Please sign in to comment.