mirror of
https://gitlab.com/pulsechaincom/lighthouse-pulse.git
synced 2024-12-25 21:17:17 +00:00
Fix bug with shrinking list.
This commit is contained in:
parent
f65e981f6f
commit
dddcc91ef3
@ -228,6 +228,9 @@ mod test {
|
|||||||
|
|
||||||
let tree = BTreeSchema::from_lengths(0, vec![1, 1]).into_overlay(11);
|
let tree = BTreeSchema::from_lengths(0, vec![1, 1]).into_overlay(11);
|
||||||
assert_eq!(tree.chunk_range(), 11..14);
|
assert_eq!(tree.chunk_range(), 11..14);
|
||||||
|
|
||||||
|
let tree = BTreeSchema::from_lengths(0, vec![7, 7, 7]).into_overlay(0);
|
||||||
|
assert_eq!(tree.chunk_range(), 0..25);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -33,7 +33,7 @@ where
|
|||||||
cache.mix_in_length(new_overlay.chunk_range(), self.len())?;
|
cache.mix_in_length(new_overlay.chunk_range(), self.len())?;
|
||||||
|
|
||||||
// Skip an extra node to clear the length node.
|
// Skip an extra node to clear the length node.
|
||||||
cache.chunk_index = new_overlay.next_node() + 1;
|
cache.chunk_index += 1;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -95,11 +95,6 @@ pub fn update_tree_hash_cache<T: CachedTreeHash<T>>(
|
|||||||
let old_overlay = cache.get_overlay(cache.schema_index, cache.chunk_index)?;
|
let old_overlay = cache.get_overlay(cache.schema_index, cache.chunk_index)?;
|
||||||
let new_overlay = BTreeOverlay::new(vec, cache.chunk_index, old_overlay.depth);
|
let new_overlay = BTreeOverlay::new(vec, cache.chunk_index, old_overlay.depth);
|
||||||
|
|
||||||
dbg!(cache.schema_index);
|
|
||||||
dbg!(cache.schemas.len());
|
|
||||||
dbg!(&old_overlay);
|
|
||||||
dbg!(&new_overlay);
|
|
||||||
|
|
||||||
cache.replace_overlay(cache.schema_index, cache.chunk_index, new_overlay.clone())?;
|
cache.replace_overlay(cache.schema_index, cache.chunk_index, new_overlay.clone())?;
|
||||||
|
|
||||||
cache.schema_index += 1;
|
cache.schema_index += 1;
|
||||||
@ -178,10 +173,36 @@ pub fn update_tree_hash_cache<T: CachedTreeHash<T>>(
|
|||||||
// this node padding.
|
// this node padding.
|
||||||
cache.maybe_update_chunk(new_overlay.root(), &[0; HASHSIZE])?;
|
cache.maybe_update_chunk(new_overlay.root(), &[0; HASHSIZE])?;
|
||||||
} else {
|
} else {
|
||||||
// In this case, there are some items in the new list and we should
|
let old_internal_nodes = old_overlay.num_internal_nodes();
|
||||||
// splice out the entire tree of the removed node, replacing it
|
let new_internal_nodes = new_overlay.num_internal_nodes();
|
||||||
// with a single padding node.
|
|
||||||
cache.splice(old, vec![0; HASHSIZE], vec![true]);
|
// If the number of internal nodes have shifted between the two
|
||||||
|
// overlays, the range for this node needs to be shifted to suit the
|
||||||
|
// new overlay.
|
||||||
|
let old = if old_internal_nodes > new_internal_nodes {
|
||||||
|
let offset = old_internal_nodes - new_internal_nodes;
|
||||||
|
|
||||||
|
old.start - offset..old.end - offset
|
||||||
|
} else if old_internal_nodes < new_internal_nodes {
|
||||||
|
let offset = new_internal_nodes - old_internal_nodes;
|
||||||
|
|
||||||
|
old.start + offset..old.end + offset
|
||||||
|
} else {
|
||||||
|
old.start..old.end
|
||||||
|
};
|
||||||
|
|
||||||
|
// If there are still some old bytes left-over from this item, replace
|
||||||
|
// them with a padding chunk.
|
||||||
|
if old.start < new_overlay.chunk_range().end {
|
||||||
|
let start_chunk = old.start;
|
||||||
|
let end_chunk =
|
||||||
|
std::cmp::min(old.end, new_overlay.chunk_range().end);
|
||||||
|
|
||||||
|
// In this case, there are some items in the new list and we should
|
||||||
|
// splice out the entire tree of the removed node, replacing it
|
||||||
|
// with a single padding node.
|
||||||
|
cache.splice(start_chunk..end_chunk, vec![0; HASHSIZE], vec![true]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// The item didn't exist in the old list and doesn't exist in the new list,
|
// The item didn't exist in the old list and doesn't exist in the new list,
|
||||||
@ -198,6 +219,8 @@ pub fn update_tree_hash_cache<T: CachedTreeHash<T>>(
|
|||||||
|
|
||||||
cache.update_internal_nodes(&new_overlay)?;
|
cache.update_internal_nodes(&new_overlay)?;
|
||||||
|
|
||||||
|
cache.chunk_index = new_overlay.next_node();
|
||||||
|
|
||||||
Ok(new_overlay)
|
Ok(new_overlay)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,7 +77,7 @@ impl TreeHashCache {
|
|||||||
cache.splice(0..internal_node_bytes, merkleized);
|
cache.splice(0..internal_node_bytes, merkleized);
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
chunk_modified: vec![false; cache.len() / BYTES_PER_CHUNK],
|
chunk_modified: vec![true; cache.len() / BYTES_PER_CHUNK],
|
||||||
cache,
|
cache,
|
||||||
schemas,
|
schemas,
|
||||||
chunk_index: 0,
|
chunk_index: 0,
|
||||||
@ -141,7 +141,7 @@ impl TreeHashCache {
|
|||||||
// This grows/shrinks the bytes to accomodate the new tree, preserving as much of the tree
|
// This grows/shrinks the bytes to accomodate the new tree, preserving as much of the tree
|
||||||
// as possible.
|
// as possible.
|
||||||
if new_overlay.num_leaf_nodes() != old_overlay.num_leaf_nodes() {
|
if new_overlay.num_leaf_nodes() != old_overlay.num_leaf_nodes() {
|
||||||
// Get slices of the exsiting tree from the cache.
|
// Get slices of the existing tree from the cache.
|
||||||
let (old_bytes, old_flags) = self
|
let (old_bytes, old_flags) = self
|
||||||
.slices(old_overlay.chunk_range())
|
.slices(old_overlay.chunk_range())
|
||||||
.ok_or_else(|| Error::UnableToObtainSlices)?;
|
.ok_or_else(|| Error::UnableToObtainSlices)?;
|
||||||
|
@ -418,13 +418,9 @@ fn test_struct_with_two_vecs() {
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
test_routine(variants[0].clone(), variants[6..7].to_vec());
|
|
||||||
|
|
||||||
/*
|
|
||||||
for v in &variants {
|
for v in &variants {
|
||||||
test_routine(v.clone(), variants.clone());
|
test_routine(v.clone(), variants.clone());
|
||||||
}
|
}
|
||||||
*/
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, TreeHash, CachedTreeHash)]
|
#[derive(Clone, Debug, TreeHash, CachedTreeHash)]
|
||||||
|
Loading…
Reference in New Issue
Block a user