Skip to content

Commit

Permalink
Merge pull request #3095 from autonomys/change-pos-seed-derivation
Browse files Browse the repository at this point in the history
Change PoSpace seed derivation
  • Loading branch information
nazar-pc authored Oct 4, 2024
2 parents 950df0b + 41874fc commit c82252d
Show file tree
Hide file tree
Showing 19 changed files with 86 additions and 56 deletions.
6 changes: 5 additions & 1 deletion crates/pallet-subspace/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1495,7 +1495,11 @@ fn check_vote<T: Config>(
parent_vote_verification_data
};

let sector_id = SectorId::new(solution.public_key.hash(), solution.sector_index);
let sector_id = SectorId::new(
solution.public_key.hash(),
solution.sector_index,
solution.history_size,
);

let recent_segments = T::RecentSegments::get();
let recent_history_fraction = (
Expand Down
1 change: 1 addition & 0 deletions crates/pallet-subspace/src/mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -385,6 +385,7 @@ pub fn create_signed_vote(
let sector_id = SectorId::new(
PublicKey::from(keypair.public.to_bytes()).hash(),
solution.sector_index,
solution.history_size,
);
let sector_slot_challenge = sector_id.derive_sector_slot_challenge(&global_challenge);
let masked_chunk =
Expand Down
1 change: 1 addition & 0 deletions crates/sc-consensus-subspace/src/block_import.rs
Original file line number Diff line number Diff line change
Expand Up @@ -456,6 +456,7 @@ where
let sector_id = SectorId::new(
pre_digest.solution().public_key.hash(),
pre_digest.solution().sector_index,
pre_digest.solution().history_size,
);

let max_pieces_in_sector = self
Expand Down
6 changes: 5 additions & 1 deletion crates/sc-consensus-subspace/src/slot_worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -523,7 +523,11 @@ where
}
}

let sector_id = SectorId::new(solution.public_key.hash(), solution.sector_index);
let sector_id = SectorId::new(
solution.public_key.hash(),
solution.sector_index,
solution.history_size,
);

let history_size = runtime_api.history_size(parent_hash).ok()?;
let max_pieces_in_sector = runtime_api.max_pieces_in_sector(parent_hash).ok()?;
Expand Down
10 changes: 10 additions & 0 deletions crates/subspace-core-primitives/src/hashes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,16 @@ pub fn blake3_hash_with_key(key: &[u8; 32], data: &[u8]) -> Blake3Hash {
blake3::keyed_hash(key, data).as_bytes().into()
}

/// BLAKE3 keyed hashing of a list of values.
#[inline]
pub fn blake3_hash_list_with_key(key: &[u8; 32], data: &[&[u8]]) -> Blake3Hash {
let mut state = blake3::Hasher::new_keyed(key);
for d in data {
state.update(d);
}
state.finalize().as_bytes().into()
}

/// BLAKE3 hashing of a list of values.
#[inline]
pub fn blake3_hash_list(data: &[&[u8]]) -> Blake3Hash {
Expand Down
29 changes: 15 additions & 14 deletions crates/subspace-core-primitives/src/sectors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@
#[cfg(test)]
mod tests;

use crate::hashes::{blake3_hash_list, blake3_hash_with_key, Blake3Hash};
use crate::hashes::{
blake3_hash_list, blake3_hash_list_with_key, blake3_hash_with_key, Blake3Hash,
};
use crate::pieces::{PieceIndex, PieceOffset, Record};
use crate::pos::PosSeed;
use crate::segments::{HistorySize, SegmentCommitment};
Expand Down Expand Up @@ -54,10 +56,17 @@ impl AsRef<[u8]> for SectorId {

impl SectorId {
/// Create new sector ID by deriving it from public key and sector index
pub fn new(public_key_hash: Blake3Hash, sector_index: SectorIndex) -> Self {
Self(blake3_hash_with_key(
pub fn new(
public_key_hash: Blake3Hash,
sector_index: SectorIndex,
history_size: HistorySize,
) -> Self {
Self(blake3_hash_list_with_key(
&public_key_hash,
&sector_index.to_le_bytes(),
&[
&sector_index.to_le_bytes(),
&history_size.get().to_le_bytes(),
],
))
}

Expand Down Expand Up @@ -117,16 +126,8 @@ impl SectorId {
}

/// Derive evaluation seed
pub fn derive_evaluation_seed(
&self,
piece_offset: PieceOffset,
history_size: HistorySize,
) -> PosSeed {
let evaluation_seed = blake3_hash_list(&[
self.as_ref(),
&piece_offset.to_bytes(),
&history_size.get().to_le_bytes(),
]);
pub fn derive_evaluation_seed(&self, piece_offset: PieceOffset) -> PosSeed {
let evaluation_seed = blake3_hash_list(&[self.as_ref(), &piece_offset.to_bytes()]);

PosSeed::from(*evaluation_seed)
}
Expand Down
6 changes: 5 additions & 1 deletion crates/subspace-farmer-components/benches/auditing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,11 @@ pub fn criterion_benchmark(c: &mut Criterion) {

(
PlottedSector {
sector_id: SectorId::new(public_key.hash(), sector_index),
sector_id: SectorId::new(
public_key.hash(),
sector_index,
farmer_protocol_info.history_size,
),
sector_index,
sector_metadata,
piece_indexes: vec![],
Expand Down
6 changes: 5 additions & 1 deletion crates/subspace-farmer-components/benches/proving.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,11 @@ pub fn criterion_benchmark(c: &mut Criterion) {

(
PlottedSector {
sector_id: SectorId::new(public_key.hash(), sector_index),
sector_id: SectorId::new(
public_key.hash(),
sector_index,
farmer_protocol_info.history_size,
),
sector_index,
sector_metadata,
piece_indexes: vec![],
Expand Down
6 changes: 5 additions & 1 deletion crates/subspace-farmer-components/benches/reading.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,11 @@ pub fn criterion_benchmark(c: &mut Criterion) {

(
PlottedSector {
sector_id: SectorId::new(public_key.hash(), sector_index),
sector_id: SectorId::new(
public_key.hash(),
sector_index,
farmer_protocol_info.history_size,
),
sector_index,
sector_metadata,
piece_indexes: vec![],
Expand Down
6 changes: 5 additions & 1 deletion crates/subspace-farmer-components/src/auditing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,11 @@ fn collect_sector_auditing_details(
global_challenge: &Blake3Hash,
sector_metadata: &SectorMetadataChecksummed,
) -> SectorAuditingDetails {
let sector_id = SectorId::new(public_key_hash, sector_metadata.sector_index);
let sector_id = SectorId::new(
public_key_hash,
sector_metadata.sector_index,
sector_metadata.history_size,
);

let sector_slot_challenge = sector_id.derive_sector_slot_challenge(global_challenge);
let s_bucket_audit_index = sector_slot_challenge.s_bucket_audit_index();
Expand Down
18 changes: 7 additions & 11 deletions crates/subspace-farmer-components/src/plotting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,11 @@ where
pieces_in_sector,
} = options;

let sector_id = SectorId::new(public_key.hash(), sector_index);
let sector_id = SectorId::new(
public_key.hash(),
sector_index,
farmer_protocol_info.history_size,
);

let piece_indices = (PieceOffset::ZERO..)
.take(pieces_in_sector.into())
Expand Down Expand Up @@ -333,7 +337,6 @@ pub trait RecordsEncoder {
&mut self,
sector_id: &SectorId,
records: &mut [Record],
history_size: HistorySize,
abort_early: &AtomicBool,
) -> Result<SectorContentsMap, Box<dyn std::error::Error + Send + Sync + 'static>>;
}
Expand All @@ -357,7 +360,6 @@ where
&mut self,
sector_id: &SectorId,
records: &mut [Record],
history_size: HistorySize,
abort_early: &AtomicBool,
) -> Result<SectorContentsMap, Box<dyn std::error::Error + Send + Sync + 'static>> {
if self.erasure_coding.max_shards() < Record::NUM_S_BUCKETS {
Expand Down Expand Up @@ -406,8 +408,7 @@ where
else {
return;
};
let pos_seed =
sector_id.derive_evaluation_seed(piece_offset, history_size);
let pos_seed = sector_id.derive_evaluation_seed(piece_offset);

record_encoding::<PosTable>(
&pos_seed,
Expand Down Expand Up @@ -504,12 +505,7 @@ where
);

let sector_contents_map = records_encoder
.encode_records(
&sector_id,
&mut raw_sector.records,
history_size,
abort_early,
)
.encode_records(&sector_id, &mut raw_sector.records, abort_early)
.map_err(|error| PlottingError::RecordsEncoderError { error })?;

let sector_metadata = SectorMetadataChecksummed::from(SectorMetadata {
Expand Down
7 changes: 2 additions & 5 deletions crates/subspace-farmer-components/src/proving.rs
Original file line number Diff line number Diff line change
Expand Up @@ -244,11 +244,8 @@ where
self.count -= 1;

// Derive PoSpace table
let pos_table = (self.table_generator)(
&self
.sector_id
.derive_evaluation_seed(piece_offset, self.sector_metadata.history_size),
);
let pos_table =
(self.table_generator)(&self.sector_id.derive_evaluation_seed(piece_offset));

let maybe_solution: Result<_, ProvingError> = try {
let sector_record_chunks_fut = read_sector_record_chunks(
Expand Down
4 changes: 1 addition & 3 deletions crates/subspace-farmer-components/src/reading.rs
Original file line number Diff line number Diff line change
Expand Up @@ -465,9 +465,7 @@ where
pieces_in_sector,
&sector_metadata.s_bucket_offsets(),
&sector_contents_map,
&table_generator.generate(
&sector_id.derive_evaluation_seed(piece_offset, sector_metadata.history_size),
),
&table_generator.generate(&sector_id.derive_evaluation_seed(piece_offset)),
sector,
mode,
)
Expand Down
4 changes: 1 addition & 3 deletions crates/subspace-farmer/src/plotter/gpu/cuda.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use subspace_core_primitives::pieces::{PieceOffset, Record};
use subspace_core_primitives::sectors::SectorId;
use subspace_core_primitives::segments::HistorySize;
use subspace_farmer_components::plotting::RecordsEncoder;
use subspace_farmer_components::sector::SectorContentsMap;
use subspace_proof_of_space_gpu::cuda::CudaDevice;
Expand All @@ -30,7 +29,6 @@ impl RecordsEncoder for CudaRecordsEncoder {
&mut self,
sector_id: &SectorId,
records: &mut [Record],
history_size: HistorySize,
abort_early: &AtomicBool,
) -> Result<SectorContentsMap, Box<dyn std::error::Error + Send + Sync + 'static>> {
let pieces_in_sector = records
Expand Down Expand Up @@ -59,7 +57,7 @@ impl RecordsEncoder for CudaRecordsEncoder {
else {
return;
};
let pos_seed = sector_id.derive_evaluation_seed(piece_offset, history_size);
let pos_seed = sector_id.derive_evaluation_seed(piece_offset);

if let Err(error) = self.cuda_device.generate_and_encode_pospace(
&pos_seed,
Expand Down
6 changes: 5 additions & 1 deletion crates/subspace-farmer/src/single_disk_farm/piece_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,11 @@ where
{
let sector_index = sector_metadata.sector_index;

let sector_id = SectorId::new(public_key.hash(), sector_index);
let sector_id = SectorId::new(
public_key.hash(),
sector_index,
sector_metadata.history_size,
);

let piece = match reading::read_piece::<PosTable, _, _>(
piece_offset,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ impl PlottedSectors for SingleDiskPlottedSectors {
let sectors_metadata = self.sectors_metadata.read().await.clone();
Ok(Box::new(stream::iter((0..).zip(sectors_metadata).map(
move |(sector_index, sector_metadata)| {
let sector_id = SectorId::new(public_key_hash, sector_index);
let sector_id =
SectorId::new(public_key_hash, sector_index, sector_metadata.history_size);

let mut piece_indexes = Vec::with_capacity(usize::from(self.pieces_in_sector));
(PieceOffset::ZERO..)
Expand Down
2 changes: 1 addition & 1 deletion crates/subspace-farmer/src/single_disk_farm/plotting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -896,7 +896,7 @@ where
if let Some(sector_expiration_check_segment_commitment) =
maybe_sector_expiration_check_segment_commitment
{
let sector_id = SectorId::new(public_key_hash, sector_index);
let sector_id = SectorId::new(public_key_hash, sector_index, history_size);
let expiration_history_size = sector_id
.derive_expiration_history_size(
history_size,
Expand Down
8 changes: 6 additions & 2 deletions crates/subspace-verification/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,11 @@ where
piece_check_params,
} = params;

let sector_id = SectorId::new(solution.public_key.hash(), solution.sector_index);
let sector_id = SectorId::new(
solution.public_key.hash(),
solution.sector_index,
solution.history_size,
);

let global_randomness = proof_of_time.derive_global_randomness();
let global_challenge = global_randomness.derive_global_challenge(slot);
Expand All @@ -221,7 +225,7 @@ where

// Check that proof of space is valid
if !PosTable::is_proof_valid(
&sector_id.derive_evaluation_seed(solution.piece_offset, solution.history_size),
&sector_id.derive_evaluation_seed(solution.piece_offset),
s_bucket_audit_index.into(),
&solution.proof_of_space,
) {
Expand Down
13 changes: 4 additions & 9 deletions shared/subspace-proof-of-space-gpu/src/cuda/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ fn basic() {
&global_mutex,
);

let sector_id = SectorId::new(blake3_hash(b"hello"), 500);
let history_size = HistorySize::ONE;
let sector_id = SectorId::new(blake3_hash(b"hello"), 500, history_size);
let mut record = Record::new_boxed();
record
.iter_mut()
Expand All @@ -46,12 +46,7 @@ fn basic() {
cpu_encoded_record.clone_from(&record);
}
let cpu_sector_contents_map = cpu_records_encoder
.encode_records(
&sector_id,
&mut cpu_encoded_records,
history_size,
&Default::default(),
)
.encode_records(&sector_id, &mut cpu_encoded_records, &Default::default())
.unwrap();

let mut gpu_encoded_records = Record::new_zero_vec(2);
Expand All @@ -61,7 +56,7 @@ fn basic() {
let mut gpu_sector_contents_map = SectorContentsMap::new(2);
cuda_device
.generate_and_encode_pospace(
&sector_id.derive_evaluation_seed(PieceOffset::ZERO, history_size),
&sector_id.derive_evaluation_seed(PieceOffset::ZERO),
&mut gpu_encoded_records[0],
gpu_sector_contents_map
.iter_record_bitfields_mut()
Expand All @@ -72,7 +67,7 @@ fn basic() {
.unwrap();
cuda_device
.generate_and_encode_pospace(
&sector_id.derive_evaluation_seed(PieceOffset::ONE, history_size),
&sector_id.derive_evaluation_seed(PieceOffset::ONE),
&mut gpu_encoded_records[1],
gpu_sector_contents_map
.iter_record_bitfields_mut()
Expand Down

0 comments on commit c82252d

Please sign in to comment.