mirror of
https://gitlab.com/pulsechaincom/lighthouse-pulse.git
synced 2024-12-25 21:17:17 +00:00
Add new build method for cached hashes
This commit is contained in:
parent
40bfd5a6c7
commit
7b05c506df
@ -44,6 +44,39 @@ impl Into<Vec<u8>> for TreeHashCache {
|
||||
}
|
||||
|
||||
impl TreeHashCache {
|
||||
pub fn new(mut leaves_and_subtrees: Vec<u8>, offset_handler: OffsetHandler) -> Option<Self> {
|
||||
if leaves_and_subtrees.len() % BYTES_PER_CHUNK != 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Allocate enough bytes to store the internal nodes and the leaves and subtrees, then fill
|
||||
// all the to-be-built internal nodes with zeros and append the leaves and subtrees.
|
||||
let internal_node_bytes = offset_handler.num_internal_nodes * BYTES_PER_CHUNK;
|
||||
let mut cache = Vec::with_capacity(internal_node_bytes + leaves_and_subtrees.len());
|
||||
cache.resize(internal_node_bytes, 0);
|
||||
cache.append(&mut leaves_and_subtrees);
|
||||
|
||||
// Concat all the leaves into one big byte array, ready for `merkleize`.
|
||||
let mut leaves = vec![];
|
||||
for leaf_chunk in offset_handler.iter_leaf_nodes() {
|
||||
let start = leaf_chunk * BYTES_PER_CHUNK;
|
||||
let end = start + BYTES_PER_CHUNK;
|
||||
|
||||
leaves.extend_from_slice(cache.get(start..end)?);
|
||||
}
|
||||
|
||||
// Merkleize the leaves, then split the leaf nodes off them. Then, replace all-zeros
|
||||
// internal nodes created earlier with the internal nodes generated by `merkleize`.
|
||||
let mut merkleized = merkleize(leaves);
|
||||
merkleized.split_off(internal_node_bytes);
|
||||
cache.splice(0..internal_node_bytes, merkleized);
|
||||
|
||||
Some(Self {
|
||||
chunk_modified: vec![false; cache.len() / BYTES_PER_CHUNK],
|
||||
cache,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn from_bytes(bytes: Vec<u8>) -> Option<Self> {
|
||||
if bytes.len() % BYTES_PER_CHUNK > 0 {
|
||||
return None;
|
||||
|
@ -13,28 +13,18 @@ impl CachedTreeHash for Inner {
|
||||
type Item = Self;
|
||||
|
||||
fn build_cache_bytes(&self) -> Vec<u8> {
|
||||
let cache_a = self.a.build_cache_bytes();
|
||||
let cache_b = self.b.build_cache_bytes();
|
||||
let cache_c = self.c.build_cache_bytes();
|
||||
let cache_d = self.d.build_cache_bytes();
|
||||
let mut leaves_and_subtrees = vec![];
|
||||
|
||||
let mut leaves = vec![];
|
||||
leaves.extend_from_slice(&cache_a[0..32].to_vec());
|
||||
leaves.extend_from_slice(&cache_b[0..32].to_vec());
|
||||
leaves.extend_from_slice(&cache_c[0..32].to_vec());
|
||||
leaves.extend_from_slice(&cache_d[0..32].to_vec());
|
||||
|
||||
// TODO: fix unwrap
|
||||
let mut cache = TreeHashCache::from_bytes(merkleize(leaves)).unwrap();
|
||||
leaves_and_subtrees.append(&mut self.a.build_cache_bytes());
|
||||
leaves_and_subtrees.append(&mut self.b.build_cache_bytes());
|
||||
leaves_and_subtrees.append(&mut self.c.build_cache_bytes());
|
||||
leaves_and_subtrees.append(&mut self.d.build_cache_bytes());
|
||||
|
||||
// TODO: fix unwrap
|
||||
let offset_handler = self.offset_handler(0).unwrap();
|
||||
let mut iter = offset_handler.iter_leaf_nodes();
|
||||
|
||||
cache.single_chunk_splice(*iter.next().unwrap(), cache_a);
|
||||
cache.single_chunk_splice(*iter.next().unwrap(), cache_b);
|
||||
cache.single_chunk_splice(*iter.next().unwrap(), cache_c);
|
||||
cache.single_chunk_splice(*iter.next().unwrap(), cache_d);
|
||||
// TODO: fix unwrap
|
||||
let cache = TreeHashCache::new(leaves_and_subtrees, offset_handler).unwrap();
|
||||
|
||||
cache.into()
|
||||
}
|
||||
@ -111,25 +101,17 @@ impl CachedTreeHash for Outer {
|
||||
type Item = Self;
|
||||
|
||||
fn build_cache_bytes(&self) -> Vec<u8> {
|
||||
let cache_a = self.a.build_cache_bytes();
|
||||
let cache_b = self.b.build_cache_bytes();
|
||||
let cache_c = self.c.build_cache_bytes();
|
||||
let mut leaves_and_subtrees = vec![];
|
||||
|
||||
let mut leaves = vec![];
|
||||
leaves.extend_from_slice(&cache_a[0..32].to_vec());
|
||||
leaves.extend_from_slice(&cache_b[0..32].to_vec());
|
||||
leaves.extend_from_slice(&cache_c[0..32].to_vec());
|
||||
|
||||
// TODO: fix unwrap
|
||||
let mut cache = TreeHashCache::from_bytes(merkleize(leaves)).unwrap();
|
||||
leaves_and_subtrees.append(&mut self.a.build_cache_bytes());
|
||||
leaves_and_subtrees.append(&mut self.b.build_cache_bytes());
|
||||
leaves_and_subtrees.append(&mut self.c.build_cache_bytes());
|
||||
|
||||
// TODO: fix unwrap
|
||||
let offset_handler = self.offset_handler(0).unwrap();
|
||||
let mut iter = offset_handler.iter_leaf_nodes();
|
||||
|
||||
cache.single_chunk_splice(*iter.next().unwrap(), cache_a);
|
||||
cache.single_chunk_splice(*iter.next().unwrap(), cache_b);
|
||||
cache.single_chunk_splice(*iter.next().unwrap(), cache_c);
|
||||
// TODO: fix unwrap
|
||||
let cache = TreeHashCache::new(leaves_and_subtrees, offset_handler).unwrap();
|
||||
|
||||
cache.into()
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user