diff --git a/.travis.yml b/.travis.yml index 45f5d25f..58e5e45d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,6 +11,4 @@ matrix: script: - cargo check --all --tests --benches - cargo test --all - - cd trie-db && cargo check --no-default-features && cd .. - - cd memory-db && cargo check --no-default-features && cd .. - - cd trie-root && cargo check --no-default-features && cd .. + - cd check_no_std && cargo check --no-default-features && cd .. diff --git a/Cargo.toml b/Cargo.toml index a974bf7e..3172afe6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,5 +8,6 @@ members = [ "test-support/trie-standardmap", "test-support/trie-bench", "trie-db", - "trie-root" + "trie-root", + "check_no_std" ] diff --git a/check_no_std/Cargo.toml b/check_no_std/Cargo.toml new file mode 100644 index 00000000..c9ef7c8d --- /dev/null +++ b/check_no_std/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "check_no_std" +version = "0.0.1" +authors = ["Parity Technologies "] +description = "Crate that import dependency as no std for runing cargo check" +repository = "https://github.com/paritytech/trie" +license = "Apache-2.0" +edition = "2018" + +[dependencies] +trie-db = { path = "../trie-db", default-features = false, version = "0.20.1"} +memory-db = { path = "../memory-db", default-features = false, version = "0.20.1"} +trie-root = { path = "../trie-root", default-features = false, version = "0.16.0"} diff --git a/check_no_std/src/lib.rs b/check_no_std/src/lib.rs new file mode 100644 index 00000000..cfcce6ee --- /dev/null +++ b/check_no_std/src/lib.rs @@ -0,0 +1,24 @@ +// Copyright 2017, 2018 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Crate to cargo check no std compatible library +//! without leaking dev-dependencies. +//! Do not publish. + +#![no_std] + + +pub use trie_db::*; +pub use memory_db::*; +pub use trie_root::*; diff --git a/hash-db/src/lib.rs b/hash-db/src/lib.rs index 249ae4eb..15f62053 100644 --- a/hash-db/src/lib.rs +++ b/hash-db/src/lib.rs @@ -40,13 +40,14 @@ impl MaybeDebug for T {} /// nibbles (the node key can be split into prefix and node partial). /// Therefore it is always the leftmost portion of the node key, so its internal representation /// is a non expanded byte slice followed by a last padded byte representation. -/// The padded byte is an optional padded value. -pub type Prefix<'a> = (&'a[u8], Option); +/// The padded byte is a pair of u8 containing the number of nibble, followed by +/// the left aligned padded value. +pub type Prefix<'a> = (&'a[u8], (u8, u8)); /// An empty prefix constant. /// Can be use when the prefix is not use internally /// or for root nodes. -pub static EMPTY_PREFIX: Prefix<'static> = (&[], None); +pub static EMPTY_PREFIX: Prefix<'static> = (&[], (0, 0)); /// Trait describing an object that can hash a slice of bytes. Used to abstract /// other types over the hashing algorithm. Defines a single `hash` method and an diff --git a/memory-db/src/lib.rs b/memory-db/src/lib.rs index 67e67246..72d3db13 100644 --- a/memory-db/src/lib.rs +++ b/memory-db/src/lib.rs @@ -217,8 +217,8 @@ impl KeyFunction for PrefixedKey { pub fn prefixed_key(key: &H::Out, prefix: Prefix) -> Vec { let mut prefixed_key = Vec::with_capacity(key.as_ref().len() + prefix.0.len() + 1); prefixed_key.extend_from_slice(prefix.0); - if let Some(last) = prefix.1 { - prefixed_key.push(last); + if (prefix.1).0 > 0 { + prefixed_key.push((prefix.1).1); } prefixed_key.extend_from_slice(key.as_ref()); prefixed_key @@ -243,16 +243,16 @@ impl KeyFunction for LegacyPrefixedKey { /// Only for trie radix 16 trie. pub fn legacy_prefixed_key(key: &H::Out, prefix: Prefix) -> Vec { let mut prefixed_key = Vec::with_capacity(key.as_ref().len() + prefix.0.len() + 1); - if let Some(last) = prefix.1 { + if (prefix.1).0 == 0 { + prefixed_key.push(0); + prefixed_key.extend_from_slice(prefix.0); + } else { let mut prev = 0x01u8; for i in prefix.0.iter() { prefixed_key.push((prev << 4) + (*i >> 4)); prev = *i; } - prefixed_key.push((prev << 4) + (last >> 4)); - } else { - prefixed_key.push(0); - prefixed_key.extend_from_slice(prefix.0); + prefixed_key.push((prev << 4) + ((prefix.1).1 >> 4)); } prefixed_key.extend_from_slice(key.as_ref()); prefixed_key diff --git a/test-support/reference-trie/src/lib.rs b/test-support/reference-trie/src/lib.rs index 5b717c83..57e184cd 100644 --- a/test-support/reference-trie/src/lib.rs +++ b/test-support/reference-trie/src/lib.rs @@ -22,22 +22,25 @@ use parity_scale_codec::{Decode, Input, Output, Encode, Compact, Error as CodecE use trie_root::Hasher; use trie_db::{ - node::{NibbleSlicePlan, NodePlan, NodeHandlePlan}, + node::{NibbleSlicePlan, NodePlan, NodeHandlePlan, BranchChildrenNodePlan}, triedbmut::ChildReference, DBValue, trie_visit, TrieBuilder, TrieRoot, Partial, + TrieHash, }; use std::borrow::Borrow; use keccak_hasher::KeccakHasher; +pub use trie_db::triedbmut::NodeHandle; pub use trie_db::{ - decode_compact, encode_compact, - nibble_ops, NibbleSlice, NibbleVec, NodeCodec, proof, Record, Recorder, + decode_compact, encode_compact, BitMap, + NibbleSlice, NibbleVec, NodeCodec, proof, Record, Recorder, Trie, TrieConfiguration, TrieDB, TrieDBIterator, TrieDBMut, TrieDBNodeIterator, TrieError, - TrieIterator, TrieLayout, TrieMut, + TrieIterator, TrieLayout, TrieMut, ChildIndex, ChildIndex16, Radix16, NibbleOps, + Radix4, ChildIndex4, }; pub use trie_root::TrieStream; pub mod node { @@ -50,36 +53,126 @@ pub struct ExtensionLayout; impl TrieLayout for ExtensionLayout { const USE_EXTENSION: bool = true; type Hash = KeccakHasher; - type Codec = ReferenceNodeCodec; + type Nibble = Radix16; + type Codec = ReferenceNodeCodec; + type ChildRefIndex = ChildIndex16>>; + type NodeIndex = ChildIndex16>>; } impl TrieConfiguration for ExtensionLayout { } /// Trie layout without extension nodes, allowing /// generic hasher. -pub struct GenericNoExtensionLayout(PhantomData); - -impl TrieLayout for GenericNoExtensionLayout { +pub struct GenericNoExtensionLayout(PhantomData<(H, N, C, B)>); + +impl< + H: Hasher, + N: NibbleOps, + C: ChildIndex::Out>>, + B: BitMap, +> TrieLayout for GenericNoExtensionLayout { const USE_EXTENSION: bool = false; type Hash = H; - type Codec = ReferenceNodeCodecNoExt; + type Nibble = N; + type Codec = ReferenceNodeCodecNoExt; + type ChildRefIndex = ChildIndex16::Out>>; + type NodeIndex = ChildIndex16::Out>>; } -impl TrieConfiguration for GenericNoExtensionLayout { } +impl< + H: Hasher, + N: NibbleOps, + C: ChildIndex>, + B: BitMap, +> TrieConfiguration for GenericNoExtensionLayout { } + +/// Trie layout without extension nodes. +pub type NoExtensionLayout = GenericNoExtensionLayout< + keccak_hasher::KeccakHasher, + Radix16, + ChildIndex16::Out>>, + BitMap16, +>; /// Trie layout without extension nodes. -pub type NoExtensionLayout = GenericNoExtensionLayout; +pub type NoExtensionLayoutQuarter = GenericNoExtensionLayout< + keccak_hasher::KeccakHasher, + Radix4, + ChildIndex4::Out>>, + BitMap4, +>; /// Children bitmap codec for radix 16 trie. -pub struct Bitmap(u16); +/// BE representation. +pub struct BitMap256([u8; 32]); + +pub struct BuffBitMap256([u8; 33]); + +impl Default for BuffBitMap256 { + fn default() -> Self { + BuffBitMap256([ + 0u8, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + ]) + } +} + +impl AsRef<[u8]> for BuffBitMap256 { + fn as_ref(&self) -> &[u8] { + &self.0[..] + } +} -const BITMAP_LENGTH: usize = 2; +impl AsMut<[u8]> for BuffBitMap256 { + fn as_mut(&mut self) -> &mut [u8] { + &mut self.0[..] + } +} -impl Bitmap { +impl BitMap for BitMap256 { + const ENCODED_LEN: usize = 32; + type Error = CodecError; + type Buffer = BuffBitMap256; // need a byte for header + + fn decode(data: &[u8]) -> Result { + if data.len() < 32 { + return Err("End of data".into()); + } + let mut v = [0u8; 32]; + v[..].copy_from_slice(&data[..]); + Ok(BitMap256(v)) + } + + fn value_at(&self, i: usize) -> bool { + let ix = i / 8; + let i = i % 8; + self.0.as_ref()[ix] & (0b1000_0000 >> i) != 0 + } + + fn encode>(has_children: I , output: &mut [u8]) { + for (i, v) in has_children.enumerate() { + if v { + output[i / 8] |= 0b1000_0000 >> (i % 8); + } + } + } +} + +/// Children bitmap codec for radix 16 trie. +/// LE representation. +pub struct BitMap16(u16); + +impl BitMap for BitMap16 { + const ENCODED_LEN: usize = 2; + type Error = CodecError; + type Buffer = [u8;3]; // need a byte for header - fn decode(data: &[u8]) -> Result { + fn decode(data: &[u8]) -> Result { Ok(u16::decode(&mut &data[..]) - .map(|v| Bitmap(v))?) + .map(|v| BitMap16(v))?) } fn value_at(&self, i: usize) -> bool { @@ -98,16 +191,83 @@ impl Bitmap { } } +/// Children bitmap codec for radix 4 trie. +pub struct BitMap4(u8); + +impl BitMap for BitMap4 { + const ENCODED_LEN: usize = 1; + type Error = CodecError; + type Buffer = [u8;2]; // need a byte for header + + fn decode(data: &[u8]) -> Result { + if data.len() == 0 || data[0] & 0xf0 != 0 { + Err("Bad format".into()) + } else { + Ok(BitMap4(data[0])) + } + } + + fn value_at(&self, i: usize) -> bool { + self.0 & (1u8 << i) != 0 + } + + fn encode>(has_children: I , output: &mut [u8]) { + let mut bitmap: u8 = 0; + let mut cursor: u8 = 1; + for v in has_children { + if v { bitmap |= cursor } + cursor <<= 1; + } + output[0] = bitmap; + } +} + +/// Children bitmap codec for radix 2 trie. +/// Note that this could possibly be merge into the header byte, +/// so it is test only implementation. +pub struct BitMap2(u8); + +impl BitMap for BitMap2 { + const ENCODED_LEN: usize = 1; + type Error = CodecError; + type Buffer = [u8;2]; // need a byte for header + + fn decode(data: &[u8]) -> Result { + if data.len() == 0 || data[0] & 0b11111100 != 0 { + Err("Bad format".into()) + } else { + Ok(BitMap2(data[0])) + } + } + + fn value_at(&self, i: usize) -> bool { + self.0 & (1u8 << i) != 0 + } + + fn encode>(has_children: I , output: &mut [u8]) { + let mut bitmap: u8 = 0; + let mut cursor: u8 = 1; + for v in has_children { + if v { bitmap |= cursor } + cursor <<= 1; + } + output[0] = bitmap; + } +} + pub type RefTrieDB<'a> = trie_db::TrieDB<'a, ExtensionLayout>; pub type RefTrieDBNoExt<'a> = trie_db::TrieDB<'a, NoExtensionLayout>; +pub type RefTrieDBNoExtQ<'a> = trie_db::TrieDB<'a, NoExtensionLayoutQuarter>; pub type RefTrieDBMut<'a> = trie_db::TrieDBMut<'a, ExtensionLayout>; pub type RefTrieDBMutNoExt<'a> = trie_db::TrieDBMut<'a, NoExtensionLayout>; +pub type RefTrieDBMutNoExtQ<'a> = trie_db::TrieDBMut<'a, NoExtensionLayoutQuarter>; pub type RefFatDB<'a> = trie_db::FatDB<'a, ExtensionLayout>; pub type RefFatDBMut<'a> = trie_db::FatDBMut<'a, ExtensionLayout>; pub type RefSecTrieDB<'a> = trie_db::SecTrieDB<'a, ExtensionLayout>; pub type RefSecTrieDBMut<'a> = trie_db::SecTrieDBMut<'a, ExtensionLayout>; pub type RefLookup<'a, Q> = trie_db::Lookup<'a, ExtensionLayout, Q>; pub type RefLookupNoExt<'a, Q> = trie_db::Lookup<'a, NoExtensionLayout, Q>; +pub type RefLookupNoExtQ<'a, Q> = trie_db::Lookup<'a, NoExtensionLayoutQuarter, Q>; pub fn reference_trie_root(input: I) -> ::Out where I: IntoIterator, @@ -204,13 +364,13 @@ fn fuse_nibbles_node_no_extension<'a>( /// For stream variant with extension. fn branch_node(has_value: bool, has_children: impl Iterator) -> [u8; 3] { let mut result = [0, 0, 0]; - branch_node_buffered(has_value, has_children, &mut result[..]); + branch_node_buffered::(has_value, has_children, &mut result[..]); result } /// Encoding of branch header and children bitmap for any radix. /// For codec/stream variant with extension. -fn branch_node_buffered>( +fn branch_node_buffered>( has_value: bool, has_children: I, output: &mut[u8], @@ -221,7 +381,7 @@ fn branch_node_buffered>( BRANCH_NODE_NO_VALUE }; output[0] = first; - Bitmap::encode(has_children, &mut output[1..]); + BITMAP::encode(has_children, &mut output[1..]); } /// Encoding of children bitmap (for trie stream radix 16). @@ -483,48 +643,48 @@ impl Decode for NodeHeaderNoExt { /// Simple reference implementation of a `NodeCodec`. #[derive(Default, Clone)] -pub struct ReferenceNodeCodec(PhantomData); +pub struct ReferenceNodeCodec(PhantomData<(H, I, BITMAP)>); /// Simple reference implementation of a `NodeCodec`. /// Even if implementation follows initial specification of /// https://github.com/w3f/polkadot-re-spec/issues/8, this may /// not follow it in the future, it is mainly the testing codec without extension node. #[derive(Default, Clone)] -pub struct ReferenceNodeCodecNoExt(PhantomData); +pub struct ReferenceNodeCodecNoExt(PhantomData<(H, I, BITMAP)>); -fn partial_to_key(partial: Partial, offset: u8, over: u8) -> Vec { +fn partial_to_key(partial: Partial, offset: u8, over: u8) -> Vec { let number_nibble_encoded = (partial.0).0 as usize; - let nibble_count = partial.1.len() * nibble_ops::NIBBLE_PER_BYTE + number_nibble_encoded; + let nibble_count = partial.1.len() * N::NIBBLE_PER_BYTE + number_nibble_encoded; assert!(nibble_count < over as usize); let mut output = vec![offset + nibble_count as u8]; if number_nibble_encoded > 0 { - output.push(nibble_ops::pad_right((partial.0).1)); + output.push(N::pad_right(number_nibble_encoded as u8, (partial.0).1)); } output.extend_from_slice(&partial.1[..]); output } -fn partial_from_iterator_to_key>( +fn partial_from_iterator_to_key>( partial: I, nibble_count: usize, offset: u8, over: u8, ) -> Vec { assert!(nibble_count < over as usize); - let mut output = Vec::with_capacity(1 + (nibble_count / nibble_ops::NIBBLE_PER_BYTE)); + let mut output = Vec::with_capacity(1 + (nibble_count / N::NIBBLE_PER_BYTE)); output.push(offset + nibble_count as u8); output.extend(partial); output } -fn partial_from_iterator_encode>( +fn partial_from_iterator_encode>( partial: I, nibble_count: usize, node_kind: NodeKindNoExt, ) -> Vec { let nibble_count = ::std::cmp::min(NIBBLE_SIZE_BOUND_NO_EXT, nibble_count); - let mut output = Vec::with_capacity(3 + (nibble_count / nibble_ops::NIBBLE_PER_BYTE)); + let mut output = Vec::with_capacity(3 + (nibble_count / N::NIBBLE_PER_BYTE)); match node_kind { NodeKindNoExt::Leaf => NodeHeaderNoExt::Leaf(nibble_count).encode_to(&mut output), @@ -537,9 +697,9 @@ fn partial_from_iterator_encode>( output } -fn partial_encode(partial: Partial, node_kind: NodeKindNoExt) -> Vec { +fn partial_encode(partial: Partial, node_kind: NodeKindNoExt) -> Vec { let number_nibble_encoded = (partial.0).0 as usize; - let nibble_count = partial.1.len() * nibble_ops::NIBBLE_PER_BYTE + number_nibble_encoded; + let nibble_count = partial.1.len() * N::NIBBLE_PER_BYTE + number_nibble_encoded; let nibble_count = ::std::cmp::min(NIBBLE_SIZE_BOUND_NO_EXT, nibble_count); @@ -553,7 +713,7 @@ fn partial_encode(partial: Partial, node_kind: NodeKindNoExt) -> Vec { NodeHeaderNoExt::Branch(false, nibble_count).encode_to(&mut output), }; if number_nibble_encoded > 0 { - output.push(nibble_ops::pad_right((partial.0).1)); + output.push(N::pad_right(number_nibble_encoded as u8, (partial.0).1)); } output.extend_from_slice(&partial.1[..]); output @@ -615,21 +775,27 @@ impl<'a> Input for ByteSliceInput<'a> { // but due to the current limitations of Rust const evaluation we can't do // `const HASHED_NULL_NODE: ::Out = ::Out( … … )`. // Perhaps one day soon? -impl NodeCodec for ReferenceNodeCodec { +impl< + H: Hasher, + N: NibbleOps, + BITMAP: BitMap, +> +NodeCodec for ReferenceNodeCodec { type Error = CodecError; type HashOut = H::Out; + type Nibble = N; fn hashed_null_node() -> ::Out { H::hash(::empty_node()) } - fn decode_plan(data: &[u8]) -> ::std::result::Result { + fn decode_plan(data: &[u8]) -> ::std::result::Result, Self::Error> { let mut input = ByteSliceInput::new(data); match NodeHeader::decode(&mut input)? { NodeHeader::Null => Ok(NodePlan::Empty), NodeHeader::Branch(has_value) => { - let bitmap_range = input.take(BITMAP_LENGTH)?; - let bitmap = Bitmap::decode(&data[bitmap_range])?; + let bitmap_range = input.take(BITMAP::ENCODED_LEN)?; + let bitmap = BITMAP::decode(&data[bitmap_range])?; let value = if has_value { let count = >::decode(&mut input)?.0 as usize; @@ -637,28 +803,40 @@ impl NodeCodec for ReferenceNodeCodec { } else { None }; - let mut children = [ - None, None, None, None, None, None, None, None, - None, None, None, None, None, None, None, None, - ]; - for i in 0..nibble_ops::NIBBLE_LENGTH { + let mut error: ::std::result::Result<(), Self::Error> = Ok(()); + let children = BranchChildrenNodePlan::new((0..N::NIBBLE_LENGTH).map(|i| { if bitmap.value_at(i) { - let count = >::decode(&mut input)?.0 as usize; - let range = input.take(count)?; - children[i] = Some(if count == H::LENGTH { - NodeHandlePlan::Hash(range) + let count = match >::decode(&mut input) { + Ok(c) => c.0 as usize, + Err(e) => { + error = Err(e); + return None; + }, + }; + let range = match input.take(count) { + Ok(i) => i, + Err(e) => { + error = Err(e); + return None; + }, + }; + if count == H::LENGTH { + Some(NodeHandlePlan::Hash(range)) } else { - NodeHandlePlan::Inline(range) - }); + Some(NodeHandlePlan::Inline(range)) + } + } else { + None } - } + })); + error?; Ok(NodePlan::Branch { value, children }) } NodeHeader::Extension(nibble_count) => { let partial = input.take( - (nibble_count + (nibble_ops::NIBBLE_PER_BYTE - 1)) / nibble_ops::NIBBLE_PER_BYTE + (nibble_count + (N::NIBBLE_PER_BYTE - 1)) / N::NIBBLE_PER_BYTE )?; - let partial_padding = nibble_ops::number_padding(nibble_count); + let partial_padding = N::number_padding(nibble_count); let count = >::decode(&mut input)?.0 as usize; let range = input.take(count)?; let child = if count == H::LENGTH { @@ -673,9 +851,9 @@ impl NodeCodec for ReferenceNodeCodec { } NodeHeader::Leaf(nibble_count) => { let partial = input.take( - (nibble_count + (nibble_ops::NIBBLE_PER_BYTE - 1)) / nibble_ops::NIBBLE_PER_BYTE + (nibble_count + (N::NIBBLE_PER_BYTE - 1)) / N::NIBBLE_PER_BYTE )?; - let partial_padding = nibble_ops::number_padding(nibble_count); + let partial_padding = N::number_padding(nibble_count); let count = >::decode(&mut input)?.0 as usize; let value = input.take(count)?; Ok(NodePlan::Leaf { @@ -695,7 +873,7 @@ impl NodeCodec for ReferenceNodeCodec { } fn leaf_node(partial: Partial, value: &[u8]) -> Vec { - let mut output = partial_to_key(partial, LEAF_NODE_OFFSET, LEAF_NODE_OVER); + let mut output = partial_to_key::(partial, LEAF_NODE_OFFSET, LEAF_NODE_OVER); value.encode_to(&mut output); output } @@ -705,7 +883,7 @@ impl NodeCodec for ReferenceNodeCodec { number_nibble: usize, child: ChildReference, ) -> Vec { - let mut output = partial_from_iterator_to_key( + let mut output = partial_from_iterator_to_key::( partial, number_nibble, EXTENSION_NODE_OFFSET, @@ -723,8 +901,8 @@ impl NodeCodec for ReferenceNodeCodec { children: impl Iterator>>>, maybe_value: Option<&[u8]>, ) -> Vec { - let mut output = vec![0; BITMAP_LENGTH + 1]; - let mut prefix: [u8; 3] = [0; 3]; + let mut output = vec![0; BITMAP::ENCODED_LEN + 1]; + let mut prefix: BITMAP::Buffer = Default::default(); let have_value = if let Some(value) = maybe_value { value.encode_to(&mut output); true @@ -742,8 +920,8 @@ impl NodeCodec for ReferenceNodeCodec { } None => false, }); - branch_node_buffered(have_value, has_children, prefix.as_mut()); - output[0..BITMAP_LENGTH + 1].copy_from_slice(prefix.as_ref()); + branch_node_buffered::(have_value, has_children, prefix.as_mut()); + output[0..BITMAP::ENCODED_LEN + 1].copy_from_slice(prefix.as_ref()); output } @@ -757,51 +935,70 @@ impl NodeCodec for ReferenceNodeCodec { } -impl NodeCodec for ReferenceNodeCodecNoExt { +impl< + H: Hasher, + N: NibbleOps, + BITMAP: BitMap, +> NodeCodec for ReferenceNodeCodecNoExt { type Error = CodecError; type HashOut = ::Out; + type Nibble = N; fn hashed_null_node() -> ::Out { H::hash(::empty_node()) } - fn decode_plan(data: &[u8]) -> ::std::result::Result { + fn decode_plan(data: &[u8]) -> ::std::result::Result, Self::Error> { let mut input = ByteSliceInput::new(data); match NodeHeaderNoExt::decode(&mut input)? { NodeHeaderNoExt::Null => Ok(NodePlan::Empty), NodeHeaderNoExt::Branch(has_value, nibble_count) => { - let padding = nibble_count % nibble_ops::NIBBLE_PER_BYTE != 0; + let nibble_with_padding = nibble_count % N::NIBBLE_PER_BYTE; + let padding_length = N::NIBBLE_PER_BYTE - nibble_with_padding; // check that the padding is valid (if any) - if padding && nibble_ops::pad_left(data[input.offset]) != 0 { + if nibble_with_padding > 0 && N::pad_left(padding_length as u8, data[input.offset]) != 0 { return Err(CodecError::from("Bad format")); } let partial = input.take( - (nibble_count + (nibble_ops::NIBBLE_PER_BYTE - 1)) / nibble_ops::NIBBLE_PER_BYTE + (nibble_count + (N::NIBBLE_PER_BYTE - 1)) / N::NIBBLE_PER_BYTE )?; - let partial_padding = nibble_ops::number_padding(nibble_count); - let bitmap_range = input.take(BITMAP_LENGTH)?; - let bitmap = Bitmap::decode(&data[bitmap_range])?; + let partial_padding = N::number_padding(nibble_count); + let bitmap_range = input.take(BITMAP::ENCODED_LEN)?; + let bitmap = BITMAP::decode(&data[bitmap_range])?; let value = if has_value { let count = >::decode(&mut input)?.0 as usize; Some(input.take(count)?) } else { None }; - let mut children = [ - None, None, None, None, None, None, None, None, - None, None, None, None, None, None, None, None, - ]; - for i in 0..nibble_ops::NIBBLE_LENGTH { + let mut error: ::std::result::Result<(), Self::Error> = Ok(()); + let children = BranchChildrenNodePlan::new((0..N::NIBBLE_LENGTH).map(|i| { if bitmap.value_at(i) { - let count = >::decode(&mut input)?.0 as usize; - let range = input.take(count)?; - children[i] = Some(if count == H::LENGTH { - NodeHandlePlan::Hash(range) + let count = match >::decode(&mut input) { + Ok(c) => c.0 as usize, + Err(e) => { + error = Err(e); + return None; + }, + }; + let range = match input.take(count) { + Ok(i) => i, + Err(e) => { + error = Err(e); + return None; + }, + }; + if count == H::LENGTH { + Some(NodeHandlePlan::Hash(range)) } else { - NodeHandlePlan::Inline(range) - }); + Some(NodeHandlePlan::Inline(range)) + } + } else { + None } - } + })); + error?; + Ok(NodePlan::NibbledBranch { partial: NibbleSlicePlan::new(partial, partial_padding), value, @@ -809,15 +1006,16 @@ impl NodeCodec for ReferenceNodeCodecNoExt { }) } NodeHeaderNoExt::Leaf(nibble_count) => { - let padding = nibble_count % nibble_ops::NIBBLE_PER_BYTE != 0; + let nibble_with_padding = nibble_count % N::NIBBLE_PER_BYTE; + let padding_length = N::NIBBLE_PER_BYTE - nibble_with_padding; // check that the padding is valid (if any) - if padding && nibble_ops::pad_left(data[input.offset]) != 0 { + if nibble_with_padding > 0 && N::pad_left(padding_length as u8, data[input.offset]) != 0 { return Err(CodecError::from("Bad format")); } let partial = input.take( - (nibble_count + (nibble_ops::NIBBLE_PER_BYTE - 1)) / nibble_ops::NIBBLE_PER_BYTE + (nibble_count + (N::NIBBLE_PER_BYTE - 1)) / N::NIBBLE_PER_BYTE )?; - let partial_padding = nibble_ops::number_padding(nibble_count); + let partial_padding = N::number_padding(nibble_count); let count = >::decode(&mut input)?.0 as usize; let value = input.take(count)?; Ok(NodePlan::Leaf { @@ -837,7 +1035,7 @@ impl NodeCodec for ReferenceNodeCodecNoExt { } fn leaf_node(partial: Partial, value: &[u8]) -> Vec { - let mut output = partial_encode(partial, NodeKindNoExt::Leaf); + let mut output = partial_encode::(partial, NodeKindNoExt::Leaf); value.encode_to(&mut output); output } @@ -864,25 +1062,25 @@ impl NodeCodec for ReferenceNodeCodecNoExt { maybe_value: Option<&[u8]>, ) -> Vec { let mut output = if maybe_value.is_some() { - partial_from_iterator_encode( + partial_from_iterator_encode::( partial, number_nibble, NodeKindNoExt::BranchWithValue, ) } else { - partial_from_iterator_encode( + partial_from_iterator_encode::( partial, number_nibble, NodeKindNoExt::BranchNoValue, ) }; let bitmap_index = output.len(); - let mut bitmap: [u8; BITMAP_LENGTH] = [0; BITMAP_LENGTH]; - (0..BITMAP_LENGTH).for_each(|_| output.push(0)); + let mut bitmap: BITMAP::Buffer = Default::default(); + (0..BITMAP::ENCODED_LEN).for_each(|_| output.push(0)); if let Some(value) = maybe_value { value.encode_to(&mut output); }; - Bitmap::encode(children.map(|maybe_child| match maybe_child.borrow() { + BITMAP::encode(children.map(|maybe_child| match maybe_child.borrow() { Some(ChildReference::Hash(h)) => { h.as_ref().encode_to(&mut output); true @@ -893,8 +1091,8 @@ impl NodeCodec for ReferenceNodeCodecNoExt { } None => false, }), bitmap.as_mut()); - output[bitmap_index..bitmap_index + BITMAP_LENGTH] - .copy_from_slice(&bitmap.as_ref()[..BITMAP_LENGTH]); + output[bitmap_index..bitmap_index + BITMAP::ENCODED_LEN] + .copy_from_slice(&bitmap.as_ref()[..BITMAP::ENCODED_LEN]); output } @@ -1100,6 +1298,56 @@ pub fn compare_implementations_no_extension( assert_eq!(root, root_new); } +/// Compare trie builder and in memory trie. +/// This uses the variant without extension nodes. +/// This uses a radix 4 trie. +pub fn compare_implementations_no_extension_q( + data: Vec<(Vec, Vec)>, + mut memdb: impl hash_db::HashDB, + mut hashdb: impl hash_db::HashDB, +) { + let root_new = { + let mut cb = TrieBuilder::new(&mut hashdb); + trie_visit::(data.clone().into_iter(), &mut cb); + cb.root.unwrap_or(Default::default()) + }; + let root = { + let mut root = Default::default(); + let mut t = RefTrieDBMutNoExtQ::new(&mut memdb, &mut root); + for i in 0..data.len() { + t.insert(&data[i].0[..], &data[i].1[..]).unwrap(); + } + t.root().clone() + }; + { + let db : &dyn hash_db::HashDB<_, _> = &memdb; + let t = RefTrieDBNoExtQ::new(&db, &root).unwrap(); + println!("{:?}", t); + } + + if root != root_new { + { + let db : &dyn hash_db::HashDB<_, _> = &hashdb; + let t = RefTrieDBNoExtQ::new(&db, &root_new).unwrap(); + println!("{:?}", t); + for a in t.iter().unwrap() { + println!("a:{:?}", a); + } + } + + { + let db : &dyn hash_db::HashDB<_, _> = &memdb; + let t = RefTrieDBNoExtQ::new(&db, &root).unwrap(); + println!("{:?}", t); + for a in t.iter().unwrap() { + println!("a:{:?}", a); + } + } + } + + assert_eq!(root, root_new); +} + /// `compare_implementations_no_extension` for unordered input (trie_root does /// ordering before running when trie_build expect correct ordering). pub fn compare_implementations_no_extension_unordered( @@ -1216,9 +1464,9 @@ mod tests { fn too_big_nibble_length() { // + 1 for 0 added byte of nibble encode let input = vec![0u8; (NIBBLE_SIZE_BOUND_NO_EXT as usize + 1) / 2 + 1]; - let enc = as NodeCodec> + let enc = as NodeCodec> ::leaf_node(((0, 0), &input), &[1]); - let dec = as NodeCodec> + let dec = as NodeCodec> ::decode(&enc).unwrap(); let o_sl = if let Node::Leaf(sl, _) = dec { Some(sl) diff --git a/trie-db/Cargo.toml b/trie-db/Cargo.toml index ddc703da..1801aa2c 100644 --- a/trie-db/Cargo.toml +++ b/trie-db/Cargo.toml @@ -11,7 +11,7 @@ edition = "2018" log = "0.4" smallvec = "1.0.0" hash-db = { path = "../hash-db", default-features = false, version = "0.15.2"} -hashbrown = { version = "0.6.3", default-features = false } +hashbrown = { version = "0.6.3", default-features = false, features = ["ahash"] } rustc-hex = { version = "2.1.0", default-features = false, optional = true } [dev-dependencies] diff --git a/trie-db/benches/bench.rs b/trie-db/benches/bench.rs index e2a63523..e7940d88 100644 --- a/trie-db/benches/bench.rs +++ b/trie-db/benches/bench.rs @@ -14,7 +14,7 @@ use criterion::{criterion_group, criterion_main, Bencher, black_box, Criterion}; -use trie_db::{NibbleSlice, proof::{generate_proof, verify_proof}, Trie}; +use trie_db::{NibbleSlice, proof::{generate_proof, verify_proof}, Trie, Radix16}; use trie_standardmap::{Alphabet, StandardMap, ValueMode}; criterion_group!(benches, @@ -49,7 +49,7 @@ fn nibble_common_prefix(b: &mut Criterion) { let (keys, values): (Vec<_>, Vec<_>) = st.make().into_iter().unzip(); b.bench_function("nibble_common_prefix", move |b| { let mixed: Vec<_> = keys.iter().zip(values.iter().rev()).map(|pair| { - (NibbleSlice::new(pair.0), NibbleSlice::new(pair.1)) + (NibbleSlice::::new(pair.0), NibbleSlice::::new(pair.1)) }).collect(); b.iter(&mut || { diff --git a/trie-db/fuzz/Cargo.toml b/trie-db/fuzz/Cargo.toml index 50ac2501..434d684c 100644 --- a/trie-db/fuzz/Cargo.toml +++ b/trie-db/fuzz/Cargo.toml @@ -10,8 +10,8 @@ cargo-fuzz = true [dependencies] hash-db = { path = "../../hash-db", version = "0.15.2" } -memory-db = { path = "../../memory-db", version = "0.18.1" } -reference-trie = { path = "../../test-support/reference-trie", version = "0.19.0" } +memory-db = { path = "../../memory-db", version = "0.20.0" } +reference-trie = { path = "../../test-support/reference-trie", version = "0.20.0" } keccak-hasher = { path = "../../test-support/keccak-hasher", version = "0.15.2" } [dependencies.trie-db] diff --git a/trie-db/src/iter_build.rs b/trie-db/src/iter_build.rs index fa0cabeb..4cde3c32 100644 --- a/trie-db/src/iter_build.rs +++ b/trie-db/src/iter_build.rs @@ -21,24 +21,13 @@ use hash_db::{Hasher, HashDB, Prefix}; use crate::rstd::{cmp::max, marker::PhantomData, vec::Vec}; use crate::triedbmut::{ChildReference}; use crate::nibble::NibbleSlice; -use crate::nibble::nibble_ops; +use crate::nibble::NibbleOps; use crate::node_codec::NodeCodec; use crate::{TrieLayout, TrieHash}; -macro_rules! exponential_out { - (@3, [$($inpp:expr),*]) => { exponential_out!(@2, [$($inpp,)* $($inpp),*]) }; - (@2, [$($inpp:expr),*]) => { exponential_out!(@1, [$($inpp,)* $($inpp),*]) }; - (@1, [$($inpp:expr),*]) => { [$($inpp,)* $($inpp),*] }; -} - type CacheNode = Option>; -#[inline(always)] -fn new_vec_slice_buffer() -> [CacheNode; 16] { - exponential_out!(@3, [None, None]) -} - -type ArrayNode = [CacheNode>; 16]; +type ArrayNode = ::ChildRefIndex; /// Struct containing iteration cache, can be at most the length of the lowest nibble. /// @@ -64,7 +53,7 @@ impl CacheAccum #[inline(always)] fn set_cache_value(&mut self, depth:usize, value: Option) { if self.0.is_empty() || self.0[self.0.len() - 1].2 < depth { - self.0.push((new_vec_slice_buffer(), None, depth)); + self.0.push((Default::default(), None, depth)); } let last = self.0.len() - 1; debug_assert!(self.0[last].2 <= depth); @@ -74,7 +63,7 @@ impl CacheAccum #[inline(always)] fn set_node(&mut self, depth: usize, nibble_index: usize, node: CacheNode>) { if self.0.is_empty() || self.0[self.0.len() - 1].2 < depth { - self.0.push((new_vec_slice_buffer(), None, depth)); + self.0.push((Default::default(), None, depth)); } let last = self.0.len() - 1; @@ -126,13 +115,13 @@ impl CacheAccum target_depth: usize, (k2, v2): &(impl AsRef<[u8]>, impl AsRef<[u8]>), ) { - let nibble_value = nibble_ops::left_nibble_at(&k2.as_ref()[..], target_depth); + let nibble_value = T::Nibble::left_nibble_at(&k2.as_ref()[..], target_depth); // is it a branch value (two candidate same ix) - let nkey = NibbleSlice::new_offset(&k2.as_ref()[..], target_depth + 1); + let nkey = NibbleSlice::::new_offset(&k2.as_ref()[..], target_depth + 1); let encoded = T::Codec::leaf_node(nkey.right(), &v2.as_ref()[..]); - let pr = NibbleSlice::new_offset( + let pr = NibbleSlice::::new_offset( &k2.as_ref()[..], - k2.as_ref().len() * nibble_ops::NIBBLE_PER_BYTE - nkey.len(), + k2.as_ref().len() * T::Nibble::NIBBLE_PER_BYTE - nkey.len(), ); let hash = callback.process(pr.left(), encoded, false); @@ -175,7 +164,7 @@ impl CacheAccum }; if !is_root { // put hash in parent - let nibble: u8 = nibble_ops::left_nibble_at(&ref_branch.as_ref()[..], llix); + let nibble: u8 = T::Nibble::left_nibble_at(&ref_branch.as_ref()[..], llix); self.set_node(llix, nibble as usize, Some(h)); } } @@ -200,11 +189,11 @@ impl CacheAccum v.as_ref().map(|v| v.as_ref()), ); self.reset_depth(branch_d); - let pr = NibbleSlice::new_offset(&key_branch, branch_d); + let pr = NibbleSlice::::new_offset(&key_branch, branch_d); let branch_hash = callback.process(pr.left(), encoded, is_root && nkey.is_none()); if let Some(nkeyix) = nkey { - let pr = NibbleSlice::new_offset(&key_branch, nkeyix.0); + let pr = NibbleSlice::::new_offset(&key_branch, nkeyix.0); let nib = pr.right_range_iter(nkeyix.1); let encoded = T::Codec::extension_node(nib, nkeyix.1, branch_hash); let h = callback.process(pr.left(), encoded, is_root); @@ -228,14 +217,14 @@ impl CacheAccum // encode branch let v = self.0[last].1.take(); let nkeyix = nkey.unwrap_or((0, 0)); - let pr = NibbleSlice::new_offset(&key_branch, nkeyix.0); + let pr = NibbleSlice::::new_offset(&key_branch, nkeyix.0); let encoded = T::Codec::branch_node_nibbled( pr.right_range_iter(nkeyix.1), nkeyix.1, self.0[last].0.as_ref().iter(), v.as_ref().map(|v| v.as_ref())); self.reset_depth(branch_d); let ext_length = nkey.as_ref().map(|nkeyix| nkeyix.0).unwrap_or(0); - let pr = NibbleSlice::new_offset( + let pr = NibbleSlice::::new_offset( &key_branch, branch_d - ext_length, ); @@ -267,10 +256,10 @@ pub fn trie_visit(input: I, callback: &mut F) let mut single = true; for (k, v) in iter_input { single = false; - let common_depth = nibble_ops::biggest_depth(&previous_value.0.as_ref()[..], &k.as_ref()[..]); + let common_depth = T::Nibble::biggest_depth(&previous_value.0.as_ref()[..], &k.as_ref()[..]); // 0 is a reserved value : could use option let depth_item = common_depth; - if common_depth == previous_value.0.as_ref().len() * nibble_ops::NIBBLE_PER_BYTE { + if common_depth == previous_value.0.as_ref().len() * T::Nibble::NIBBLE_PER_BYTE { // the new key include the previous one : branch value case // just stored value at branch depth depth_queue.set_cache_value(common_depth, Some(previous_value.1)); @@ -291,11 +280,11 @@ pub fn trie_visit(input: I, callback: &mut F) if single { // one single element corner case let (k2, v2) = previous_value; - let nkey = NibbleSlice::new_offset(&k2.as_ref()[..], last_depth); + let nkey = NibbleSlice::::new_offset(&k2.as_ref()[..], last_depth); let encoded = T::Codec::leaf_node(nkey.right(), &v2.as_ref()[..]); - let pr = NibbleSlice::new_offset( + let pr = NibbleSlice::::new_offset( &k2.as_ref()[..], - k2.as_ref().len() * nibble_ops::NIBBLE_PER_BYTE - nkey.len(), + k2.as_ref().len() * T::Nibble::NIBBLE_PER_BYTE - nkey.len(), ); callback.process(pr.left(), encoded, true); } else { @@ -557,6 +546,7 @@ mod test { compare_implementations_prefixed(data.clone()); compare_implementations_no_extension(data.clone()); compare_implementations_no_extension_prefixed(data.clone()); + compare_implementations_no_extension_q(data.clone()); } fn compare_implementations_prefixed(data: Vec<(Vec, Vec)>) { @@ -574,6 +564,11 @@ mod test { let hashdb = MemoryDB::, DBValue>::default(); reference_trie::compare_implementations_no_extension(data, memdb, hashdb); } + fn compare_implementations_no_extension_q(data: Vec<(Vec, Vec)>) { + let memdb = MemoryDB::<_, HashKey<_>, _>::default(); + let hashdb = MemoryDB::, DBValue>::default(); + reference_trie::compare_implementations_no_extension_q(data, memdb, hashdb); + } fn compare_implementations_no_extension_prefixed(data: Vec<(Vec, Vec)>) { let memdb = MemoryDB::<_, PrefixedKey<_>, _>::default(); let hashdb = MemoryDB::, DBValue>::default(); diff --git a/trie-db/src/iterator.rs b/trie-db/src/iterator.rs index 9fe56041..0c78545d 100644 --- a/trie-db/src/iterator.rs +++ b/trie-db/src/iterator.rs @@ -13,10 +13,10 @@ // limitations under the License. use super::{CError, DBValue, Result, Trie, TrieHash, TrieIterator, TrieLayout}; -use hash_db::{Hasher, EMPTY_PREFIX}; +use hash_db::{EMPTY_PREFIX}; use crate::triedb::TrieDB; use crate::node::{NodePlan, NodeHandle, OwnedNode}; -use crate::nibble::{NibbleSlice, NibbleVec, nibble_ops}; +use crate::nibble::{NibbleSlice, NibbleVec, NibbleOps}; use crate::rstd::{rc::Rc, vec::Vec}; @@ -31,13 +31,13 @@ enum Status { #[cfg_attr(feature = "std", derive(Debug))] #[derive(Eq, PartialEq)] -struct Crumb { - hash: Option, - node: Rc>, +struct Crumb { + hash: Option>, + node: Rc>, status: Status, } -impl Crumb { +impl Crumb { /// Move on to next status in the node's sequence. fn increment(&mut self) { self.status = match (self.status, self.node.node_plan()) { @@ -48,7 +48,7 @@ impl Crumb { | (Status::At, NodePlan::NibbledBranch { .. }) => Status::AtChild(0), (Status::AtChild(x), NodePlan::Branch { .. }) | (Status::AtChild(x), NodePlan::NibbledBranch { .. }) - if x < (nibble_ops::NIBBLE_LENGTH - 1) => Status::AtChild(x + 1), + if x < (L::Nibble::NIBBLE_LENGTH - 1) => Status::AtChild(x + 1), _ => Status::Exiting, } } @@ -57,8 +57,8 @@ impl Crumb { /// Iterator for going through all nodes in the trie in pre-order traversal order. pub struct TrieDBNodeIterator<'a, L: TrieLayout> { db: &'a TrieDB<'a, L>, - trail: Vec>, - key_nibbles: NibbleVec, + trail: Vec>, + key_nibbles: NibbleVec, } impl<'a, L: TrieLayout> TrieDBNodeIterator<'a, L> { @@ -79,7 +79,7 @@ impl<'a, L: TrieLayout> TrieDBNodeIterator<'a, L> { } /// Descend into a payload. - fn descend(&mut self, node: OwnedNode, node_hash: Option>) { + fn descend(&mut self, node: OwnedNode, node_hash: Option>) { self.trail.push(Crumb { hash: node_hash, status: Status::Entering, @@ -162,7 +162,7 @@ impl<'a, L: TrieLayout> TrieDBNodeIterator<'a, L> { crumb.status = Status::AtChild(i as usize); self.key_nibbles.push(i); - if let Some(child) = &children[i as usize] { + if let Some(child) = &children.at(i as usize) { full_key_nibbles += 1; partial = partial.mid(1); @@ -182,7 +182,7 @@ impl<'a, L: TrieLayout> TrieDBNodeIterator<'a, L> { if slice < partial { crumb.status = Status::Exiting; self.key_nibbles.append_partial(slice.right()); - self.key_nibbles.push((nibble_ops::NIBBLE_LENGTH - 1) as u8); + self.key_nibbles.push((L::Nibble::NIBBLE_LENGTH - 1) as u8); return Ok(false); } return Ok(slice.starts_with(&partial)); @@ -200,7 +200,7 @@ impl<'a, L: TrieLayout> TrieDBNodeIterator<'a, L> { self.key_nibbles.append_partial(slice.right()); self.key_nibbles.push(i); - if let Some(child) = &children[i as usize] { + if let Some(child) = &children.at(i as usize) { full_key_nibbles += 1; partial = partial.mid(1); @@ -257,14 +257,14 @@ impl<'a, L: TrieLayout> TrieIterator for TrieDBNodeIterator<'a, L> { } impl<'a, L: TrieLayout> Iterator for TrieDBNodeIterator<'a, L> { - type Item = Result<(NibbleVec, Option>, Rc>), TrieHash, CError>; + type Item = Result<(NibbleVec, Option>, Rc>), TrieHash, CError>; fn next(&mut self) -> Option { - enum IterStep { + enum IterStep { YieldNode, PopTrail, Continue, - Descend(Result<(OwnedNode, Option), O, E>), + Descend(Result<(OwnedNode, Option>), TrieHash, CError>), } loop { let iter_step = { @@ -289,7 +289,7 @@ impl<'a, L: TrieLayout> Iterator for TrieDBNodeIterator<'a, L> { (Status::At, NodePlan::Extension { partial: partial_plan, child }) => { let partial = partial_plan.build(node_data); self.key_nibbles.append_partial(partial.right()); - IterStep::Descend::, CError>( + IterStep::Descend::( self.db.get_raw_or_lookup( b.hash.unwrap_or_default(), child.build(node_data), @@ -309,10 +309,10 @@ impl<'a, L: TrieLayout> Iterator for TrieDBNodeIterator<'a, L> { }, (Status::AtChild(i), NodePlan::Branch { children, .. }) | (Status::AtChild(i), NodePlan::NibbledBranch { children, .. }) => { - if let Some(child) = &children[i] { + if let Some(child) = &children.at(i) { self.key_nibbles.pop(); self.key_nibbles.push(i as u8); - IterStep::Descend::, CError>( + IterStep::Descend::( self.db.get_raw_or_lookup( b.hash.unwrap_or_default(), child.build(node_data), @@ -355,10 +355,10 @@ impl<'a, L: TrieLayout> Iterator for TrieDBNodeIterator<'a, L> { self.trail.last_mut()? .increment(); }, - IterStep::Descend::, CError>(Ok((node, node_hash))) => { + IterStep::Descend::(Ok((node, node_hash))) => { self.descend(node, node_hash); }, - IterStep::Descend::, CError>(Err(err)) => { + IterStep::Descend::(Err(err)) => { // Increment here as there is an implicit PopTrail. self.trail.last_mut() .expect( @@ -392,7 +392,7 @@ mod tests { use reference_trie::{ RefTrieDB, RefTrieDBMut, TrieError, TrieMut, TrieIterator, TrieDBNodeIterator, NibbleSlice, NibbleVec, - node::Node, + node::Node, Radix16, }; use reference_trie::{RefTrieDBNoExt, RefTrieDBMutNoExt}; @@ -426,8 +426,8 @@ mod tests { (memdb, root) } - fn nibble_vec>(bytes: T, len: usize) -> NibbleVec { - let slice = NibbleSlice::new(bytes.as_ref()); + fn nibble_vec>(bytes: T, len: usize) -> NibbleVec { + let slice = NibbleSlice::::new(bytes.as_ref()); let mut v = NibbleVec::new(); for i in 0..len { @@ -733,7 +733,7 @@ mod tests { }; // Remove the leaf node from the DB. - let prefix = (&hex!("02")[..], None); + let prefix = (&hex!("02")[..], (0, 0)); memdb.remove(&leaf_hash, prefix); // Seek to missing node returns error. @@ -893,4 +893,3 @@ mod tests { assert!(iter.next().is_none()); } } - diff --git a/trie-db/src/lib.rs b/trie-db/src/lib.rs index 05b52686..ccb8bd77 100644 --- a/trie-db/src/lib.rs +++ b/trie-db/src/lib.rs @@ -15,19 +15,20 @@ //! Trie interface and implementation. + #[cfg(not(feature = "std"))] extern crate alloc; #[cfg(feature = "std")] mod rstd { - pub use std::{borrow, boxed, cmp, convert, fmt, hash, iter, marker, mem, ops, rc, result, vec}; + pub use std::{borrow, boxed, cmp, convert, fmt, hash, iter, marker, mem, ops, rc, result, vec, slice}; pub use std::collections::VecDeque; pub use std::error::Error; } #[cfg(not(feature = "std"))] mod rstd { - pub use core::{borrow, convert, cmp, iter, fmt, hash, marker, mem, ops, result}; + pub use core::{borrow, convert, cmp, iter, fmt, hash, marker, mem, ops, result, slice}; pub use alloc::{boxed, rc, vec}; pub use alloc::collections::VecDeque; pub trait Error {} @@ -66,8 +67,10 @@ pub use self::fatdb::{FatDB, FatDBIterator}; pub use self::fatdbmut::FatDBMut; pub use self::recorder::{Recorder, Record}; pub use self::lookup::Lookup; -pub use self::nibble::{NibbleSlice, NibbleVec, nibble_ops}; -pub use crate::node_codec::{NodeCodec, Partial}; +pub use self::nibble::{NibbleSlice, NibbleVec, NibbleOps, ChildIndex, + ChildIndex2, ChildIndex4, ChildIndex16, ChildIndex256, + Radix16, Radix4, Radix2, Radix256, ChildSliceIndex}; +pub use crate::node_codec::{NodeCodec, Partial, BitMap}; pub use crate::iter_build::{trie_visit, ProcessEncodedNode, TrieBuilder, TrieRoot, TrieRootUnhashed}; pub use crate::iterator::TrieDBNodeIterator; @@ -91,8 +94,8 @@ pub enum TrieError { IncompleteDatabase(T), /// A value was found in the trie with a nibble key that was not byte-aligned. /// The first parameter is the byte-aligned part of the prefix and the second parameter is the - /// remaining nibble. - ValueAtIncompleteKey(Vec, u8), + /// remaining nibble (number of nibbles and masked byte value). + ValueAtIncompleteKey(Vec, (u8, u8)), /// Corrupt Trie item DecoderError(T, E), InvalidHash(T, Vec), @@ -122,17 +125,7 @@ impl fmt::Display for TrieError where T: MaybeDebug, E: MaybeDebug { } #[cfg(feature = "std")] -impl Error for TrieError where T: fmt::Debug, E: Error { - fn description(&self) -> &str { - match *self { - TrieError::InvalidStateRoot(_) => "Invalid state root", - TrieError::IncompleteDatabase(_) => "Incomplete database", - TrieError::ValueAtIncompleteKey(_, _) => "Value at incomplete key", - TrieError::DecoderError(_, ref err) => err.description(), - TrieError::InvalidHash(_, _) => "Encoded node contains invalid hash reference", - } - } -} +impl Error for TrieError where T: fmt::Debug, E: Error { } /// Trie result type. /// Boxed to avoid copying around extra space for the `Hasher`s `Out` on successful queries. @@ -392,12 +385,20 @@ pub trait TrieLayout { /// no partial in branch, if false the trie will only /// use branch and node with partials in both. const USE_EXTENSION: bool; + /// Trie nibble constants. It defines trie radix. + type Nibble: NibbleOps; /// Hasher to use for this trie. type Hash: Hasher; /// Codec to use (needs to match hasher and nibble ops). - type Codec: NodeCodec::Out>; + type Codec: NodeCodec, Nibble=Self::Nibble>; + + /// Array to use with `iter_build`. + type ChildRefIndex: ChildIndex>>; + /// Array to use with `triedbmut`. + type NodeIndex: ChildIndex>>; } + /// This trait associates a trie definition with preferred methods. /// It also contains own default implementations and can be /// used to allow switching implementation. @@ -458,3 +459,5 @@ pub trait TrieConfiguration: Sized + TrieLayout { pub type TrieHash = <::Hash as Hasher>::Out; /// Alias accessor to `NodeCodec` associated `Error` type from a `TrieLayout`. pub type CError = <::Codec as NodeCodec>::Error; +/// Alias accessor to child slice index from a `TrieLayout`. +pub type TrieChildRangeIndex = <::Nibble as NibbleOps>::ChildRangeIndex; diff --git a/trie-db/src/lookup.rs b/trie-db/src/lookup.rs index e46bf8f1..c0991f9b 100644 --- a/trie-db/src/lookup.rs +++ b/trie-db/src/lookup.rs @@ -40,7 +40,7 @@ where /// function to decode or copy. pub fn look_up( mut self, - key: NibbleSlice, + key: NibbleSlice, ) -> Result, TrieHash, CError> { let mut partial = key; let mut hash = self.hash; @@ -86,7 +86,7 @@ where } Node::Branch(children, value) => match partial.is_empty() { true => return Ok(value.map(move |val| self.query.decode(val))), - false => match children[partial.at(0) as usize] { + false => match children.at(partial.at(0) as usize) { Some(x) => { partial = partial.mid(1); key_nibbles += 1; @@ -102,7 +102,7 @@ where match partial.len() == slice.len() { true => return Ok(value.map(move |val| self.query.decode(val))), - false => match children[partial.at(slice.len()) as usize] { + false => match children.at(partial.at(slice.len()) as usize) { Some(x) => { partial = partial.mid(slice.len() + 1); key_nibbles += slice.len() + 1; diff --git a/trie-db/src/nibble/leftnibbleslice.rs b/trie-db/src/nibble/leftnibbleslice.rs index c31301be..f386e9a2 100644 --- a/trie-db/src/nibble/leftnibbleslice.rs +++ b/trie-db/src/nibble/leftnibbleslice.rs @@ -13,24 +13,27 @@ // limitations under the License. use crate::rstd::cmp::{self, Ordering}; +use crate::rstd::marker::PhantomData; -use crate::nibble::{nibble_ops::{self, NIBBLE_PER_BYTE}, NibbleSlice}; +use crate::nibble::{NibbleOps, NibbleSlice}; /// A representation of a nibble slice which is left-aligned. The regular `NibbleSlice` is /// right-aligned, meaning it does not support efficient truncation from the right side. /// /// This is an immutable struct. No operations actually change it. -pub struct LeftNibbleSlice<'a> { +pub struct LeftNibbleSlice<'a, N> { bytes: &'a [u8], len: usize, + _marker: PhantomData, } -impl<'a> LeftNibbleSlice<'a> { +impl<'a, N: NibbleOps> LeftNibbleSlice<'a, N> { /// Constructs a byte-aligned nibble slice from a byte slice. pub fn new(bytes: &'a [u8]) -> Self { LeftNibbleSlice { bytes, - len: bytes.len() * NIBBLE_PER_BYTE, + len: bytes.len() * N::NIBBLE_PER_BYTE, + _marker: PhantomData, } } @@ -43,7 +46,7 @@ impl<'a> LeftNibbleSlice<'a> { /// out of bounds. pub fn at(&self, index: usize) -> Option { if index < self.len() { - Some(nibble_ops::left_nibble_at(self.bytes, index)) + Some(N::left_nibble_at(self.bytes, index)) } else { None } @@ -55,23 +58,24 @@ impl<'a> LeftNibbleSlice<'a> { LeftNibbleSlice { bytes: self.bytes, len: cmp::min(len, self.len), + _marker: PhantomData, } } /// Returns whether the given slice is a prefix of this one. - pub fn starts_with(&self, prefix: &LeftNibbleSlice<'a>) -> bool { + pub fn starts_with(&self, prefix: &LeftNibbleSlice<'a, N>) -> bool { self.truncate(prefix.len()) == *prefix } /// Returns whether another regular (right-aligned) nibble slice is contained in this one at /// the given offset. - pub fn contains(&self, partial: &NibbleSlice, offset: usize) -> bool { + pub fn contains(&self, partial: &NibbleSlice, offset: usize) -> bool { (0..partial.len()).all(|i| self.at(offset + i) == Some(partial.at(i))) } fn cmp(&self, other: &Self) -> Ordering { let common_len = cmp::min(self.len(), other.len()); - let common_byte_len = common_len / NIBBLE_PER_BYTE; + let common_byte_len = common_len / N::NIBBLE_PER_BYTE; // Quickly compare the common prefix of the byte slices. match self.bytes[..common_byte_len].cmp(&other.bytes[..common_byte_len]) { @@ -80,7 +84,7 @@ impl<'a> LeftNibbleSlice<'a> { } // Compare nibble-by-nibble (either 0 or 1 nibbles) any after the common byte prefix. - for i in (common_byte_len * NIBBLE_PER_BYTE)..common_len { + for i in (common_byte_len * N::NIBBLE_PER_BYTE)..common_len { let a = self.at(i).expect("i < len; len == self.len() qed"); let b = other.at(i).expect("i < len; len == other.len(); qed"); match a.cmp(&b) { @@ -94,7 +98,7 @@ impl<'a> LeftNibbleSlice<'a> { } } -impl<'a> PartialEq for LeftNibbleSlice<'a> { +impl<'a, N: NibbleOps> PartialEq for LeftNibbleSlice<'a, N> { fn eq(&self, other: &Self) -> bool { let len = self.len(); if other.len() != len { @@ -102,13 +106,13 @@ impl<'a> PartialEq for LeftNibbleSlice<'a> { } // Quickly compare the common prefix of the byte slices. - let byte_len = len / NIBBLE_PER_BYTE; + let byte_len = len / N::NIBBLE_PER_BYTE; if self.bytes[..byte_len] != other.bytes[..byte_len] { return false; } // Compare nibble-by-nibble (either 0 or 1 nibbles) any after the common byte prefix. - for i in (byte_len * NIBBLE_PER_BYTE)..len { + for i in (byte_len * N::NIBBLE_PER_BYTE)..len { let a = self.at(i).expect("i < len; len == self.len() qed"); let b = other.at(i).expect("i < len; len == other.len(); qed"); if a != b { @@ -120,22 +124,22 @@ impl<'a> PartialEq for LeftNibbleSlice<'a> { } } -impl<'a> Eq for LeftNibbleSlice<'a> {} +impl<'a, N: NibbleOps> Eq for LeftNibbleSlice<'a, N> {} -impl<'a> PartialOrd for LeftNibbleSlice<'a> { +impl<'a, N: NibbleOps> PartialOrd for LeftNibbleSlice<'a, N> { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } -impl<'a> Ord for LeftNibbleSlice<'a> { +impl<'a, N: NibbleOps> Ord for LeftNibbleSlice<'a, N> { fn cmp(&self, other: &Self) -> Ordering { self.cmp(other) } } #[cfg(feature = "std")] -impl<'a> std::fmt::Debug for LeftNibbleSlice<'a> { +impl<'a, N: NibbleOps> std::fmt::Debug for LeftNibbleSlice<'a, N> { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { for i in 0..self.len() { let nibble = self.at(i).expect("i < self.len(); qed"); @@ -151,17 +155,18 @@ impl<'a> std::fmt::Debug for LeftNibbleSlice<'a> { #[cfg(test)] mod tests { use super::*; + use crate::nibble::Radix16; #[test] fn test_len() { - assert_eq!(LeftNibbleSlice::new(&[]).len(), 0); - assert_eq!(LeftNibbleSlice::new(&b"hello"[..]).len(), 10); - assert_eq!(LeftNibbleSlice::new(&b"hello"[..]).truncate(7).len(), 7); + assert_eq!(LeftNibbleSlice::::new(&[]).len(), 0); + assert_eq!(LeftNibbleSlice::::new(&b"hello"[..]).len(), 10); + assert_eq!(LeftNibbleSlice::::new(&b"hello"[..]).truncate(7).len(), 7); } #[test] fn test_at() { - let slice = LeftNibbleSlice::new(&b"\x01\x23\x45\x67"[..]).truncate(7); + let slice = LeftNibbleSlice::::new(&b"\x01\x23\x45\x67"[..]).truncate(7); assert_eq!(slice.at(0), Some(0)); assert_eq!(slice.at(6), Some(6)); assert_eq!(slice.at(7), None); @@ -171,59 +176,59 @@ mod tests { #[test] fn test_starts_with() { assert!( - LeftNibbleSlice::new(b"hello").starts_with(&LeftNibbleSlice::new(b"heli").truncate(7)) + LeftNibbleSlice::::new(b"hello").starts_with(&LeftNibbleSlice::::new(b"heli").truncate(7)) ); assert!( - !LeftNibbleSlice::new(b"hello").starts_with(&LeftNibbleSlice::new(b"heli").truncate(8)) + !LeftNibbleSlice::::new(b"hello").starts_with(&LeftNibbleSlice::::new(b"heli").truncate(8)) ); } #[test] fn test_contains() { assert!( - LeftNibbleSlice::new(b"hello").contains(&NibbleSlice::new_offset(b"ello", 0), 2) + LeftNibbleSlice::::new(b"hello").contains(&NibbleSlice::new_offset(b"ello", 0), 2) ); assert!( - LeftNibbleSlice::new(b"hello").contains(&NibbleSlice::new_offset(b"ello", 1), 3) + LeftNibbleSlice::::new(b"hello").contains(&NibbleSlice::new_offset(b"ello", 1), 3) ); assert!( - !LeftNibbleSlice::new(b"hello").contains(&NibbleSlice::new_offset(b"allo", 1), 3) + !LeftNibbleSlice::::new(b"hello").contains(&NibbleSlice::new_offset(b"allo", 1), 3) ); assert!( - !LeftNibbleSlice::new(b"hello").contains(&NibbleSlice::new_offset(b"ello!", 1), 3) + !LeftNibbleSlice::::new(b"hello").contains(&NibbleSlice::new_offset(b"ello!", 1), 3) ); } #[test] fn test_cmp() { - assert!(LeftNibbleSlice::new(b"hallo") < LeftNibbleSlice::new(b"hello")); - assert!(LeftNibbleSlice::new(b"hello") > LeftNibbleSlice::new(b"hallo")); + assert!(LeftNibbleSlice::::new(b"hallo") < LeftNibbleSlice::::new(b"hello")); + assert!(LeftNibbleSlice::::new(b"hello") > LeftNibbleSlice::::new(b"hallo")); assert_eq!( - LeftNibbleSlice::new(b"hello").cmp(&LeftNibbleSlice::new(b"hello")), + LeftNibbleSlice::::new(b"hello").cmp(&LeftNibbleSlice::::new(b"hello")), Ordering::Equal ); assert!( - LeftNibbleSlice::new(b"hello\x10") - < LeftNibbleSlice::new(b"hello\x20").truncate(11) + LeftNibbleSlice::::new(b"hello\x10") + < LeftNibbleSlice::::new(b"hello\x20").truncate(11) ); assert!( - LeftNibbleSlice::new(b"hello\x20").truncate(11) - > LeftNibbleSlice::new(b"hello\x10") + LeftNibbleSlice::::new(b"hello\x20").truncate(11) + > LeftNibbleSlice::::new(b"hello\x10") ); assert!( - LeftNibbleSlice::new(b"hello\x10").truncate(11) - < LeftNibbleSlice::new(b"hello\x10") + LeftNibbleSlice::::new(b"hello\x10").truncate(11) + < LeftNibbleSlice::::new(b"hello\x10") ); assert!( - LeftNibbleSlice::new(b"hello\x10") - > LeftNibbleSlice::new(b"hello\x10").truncate(11) + LeftNibbleSlice::::new(b"hello\x10") + > LeftNibbleSlice::::new(b"hello\x10").truncate(11) ); assert_eq!( - LeftNibbleSlice::new(b"hello\x10").truncate(11) - .cmp(&LeftNibbleSlice::new(b"hello\x10").truncate(11)), + LeftNibbleSlice::::new(b"hello\x10").truncate(11) + .cmp(&LeftNibbleSlice::::new(b"hello\x10").truncate(11)), Ordering::Equal ); } -} \ No newline at end of file +} diff --git a/trie-db/src/nibble/mod.rs b/trie-db/src/nibble/mod.rs index 2d1b3df5..fe29bce8 100644 --- a/trie-db/src/nibble/mod.rs +++ b/trie-db/src/nibble/mod.rs @@ -14,129 +14,190 @@ //! Nibble oriented methods. -use crate::node::NodeKey; +use crate::node::{NodeKey, NodeHandle, NodeHandlePlan}; use crate::rstd::cmp; - +use hash_db::MaybeDebug; +use crate::rstd::vec::Vec; +use crate::rstd::vec; +use crate::rstd::marker::PhantomData; pub use self::leftnibbleslice::LeftNibbleSlice; mod nibblevec; mod nibbleslice; mod leftnibbleslice; -/// Utility methods to work on radix 16 nibble. -pub mod nibble_ops { - use super::*; +// Work-around absence of constant function for math pow. +const TWO_EXP: [usize; 9] = [1, 2, 4, 8, 16, 32, 64, 128, 256]; + +/// Ordered enumeration of the different possible number of nibble in +/// a byte. +pub enum Layout { + /// Radix 2 trie. Eight nibble per byte. + Radix2, // 1, 8, 2 + /// Radix 4 trie. Four nibble per byte. + Radix4, // 2, 4, 4 + /// Radix 16 trie. Two nibble per byte. + Radix16, // 4, 2, 16 + /// Radix 256 trie. One nibble per byte. + Radix256, // 8, 1, 256 +} +/// This trait contain Trie nibble specific definitions. +/// This trait is mostly a collection of associated constant and some generic +/// methods. +/// Generic methods should not need redefinition except for optimization +/// purpose. +pub trait NibbleOps: Default + Clone + PartialEq + Eq + PartialOrd + Ord + Copy + MaybeDebug { + /// See [`Layout`]. + const LAYOUT : Layout; /// Single nibble length in bit. - pub const BIT_PER_NIBBLE : usize = 4; + const BIT_PER_NIBBLE : usize = TWO_EXP[Self::LAYOUT as usize]; /// Number of nibble per byte. - pub const NIBBLE_PER_BYTE : usize = 2; + const NIBBLE_PER_BYTE : usize = 8 / Self::BIT_PER_NIBBLE; /// Number of child for a branch (trie radix). - pub const NIBBLE_LENGTH : usize = 16; - /// Nibble (half a byte). - pub const PADDING_BITMASK: u8 = 0x0F; - /// Size of header. - pub const CONTENT_HEADER_SIZE: u8 = 1; + const NIBBLE_LENGTH : usize = TWO_EXP[Self::BIT_PER_NIBBLE]; + /// Padding bitmasks, internally use for working on padding byte. + /// Length of this array is `Self::BIT_PER_NIBBLE`. + /// The first element of each pair is a bit mask to apply, + /// the second element is a right shift to apply in some case. + /// const PADDING_BITMASK: &'static [(u8, usize)] = &[ + /// Similar to following const function. + /// ```rust + /// const BIT_PER_NIBBLE: usize = 4; + /// const fn padding_bitmask(ix: usize) -> (u8, usize) { + /// //assert!(ix < 8 / BIT_PER_NIBBLE); + /// let offset = BIT_PER_NIBBLE * ix; + /// (255u8 >> offset, 8 - offset) + /// } + /// ``` + const PADDING_BITMASK: &'static [(u8, usize)]; // TODO EMCH rewrite to remove this const (does not help readability). + /// Last nibble index as u8, a convenience constant for iteration on all nibble. + const LAST_NIBBLE_INDEX: u8 = (Self::NIBBLE_PER_BYTE - 1) as u8; + + /// Buffer type for child slice index array. + type ChildRangeIndex: ChildIndex; - /// Mask a byte, keeping left nibble. + /// Pad left aligned representation for a given number of element. + /// Mask a byte from a `ix` > 0 (ix being content). + /// Result is a byte containing `ix` nibble of left aligned content and padded with 0. #[inline(always)] - pub fn pad_left(b: u8) -> u8 { - b & !PADDING_BITMASK + fn pad_left(ix: u8, b: u8) -> u8 { + debug_assert!(ix > 0); // 0 does not pad anything TODO EMCH allow 0 + b & !Self::PADDING_BITMASK[ix as usize].0 + //b & !(255u8 >> (Self::BIT_PER_NIBBLE * ix)) // TODO EMCH compare perf with that } - /// Mask a byte, keeping right byte. + /// Pad right aligned representation for a given number of element. + /// Mask a byte from a ix > 0 (ix being content). + /// Result is a byte containing `ix` nibble of right aligned content and padded with 0. #[inline(always)] - pub fn pad_right(b: u8) -> u8 { - b & PADDING_BITMASK + fn pad_right(ix: u8, b: u8) -> u8 { + // TODO EMCH change code to avoid this test (panic on 0 to see) + // it means there is calls to pad_right where we do not use the number + // of elements! + if ix > 0 { + b & !(255u8 << (Self::BIT_PER_NIBBLE * (Self::NIBBLE_PER_BYTE - ix as usize))) + //b & Self::PADDING_BITMASK[Self::NIBBLE_PER_BYTE - ix as usize].0 + } else { + b + } } /// Get u8 nibble value at a given index of a byte. + /// #[inline(always)] - pub fn at_left(ix: u8, b: u8) -> u8 { - if ix == 1 { - b & PADDING_BITMASK - } else { - b >> BIT_PER_NIBBLE - } + fn at_left(ix: u8, b: u8) -> u8 { + // TODO EMCH compare perf without padding bitmask + (b & Self::PADDING_BITMASK[ix as usize].0) + >> Self::PADDING_BITMASK[ix as usize].1 } /// Get u8 nibble value at a given index in a left aligned array. #[inline(always)] - pub fn left_nibble_at(v1: &[u8], ix: usize) -> u8 { - at_left( - (ix % NIBBLE_PER_BYTE) as u8, - v1[ix / NIBBLE_PER_BYTE] - ) + fn left_nibble_at(v1: &[u8], mut ix: usize) -> u8 { + let pad = ix % Self::NIBBLE_PER_BYTE; + ix = ix / Self::NIBBLE_PER_BYTE; + Self::at_left(pad as u8, v1[ix]) } /// Get u8 nibble value at a given index in a `NibbleSlice`. #[inline(always)] - pub fn at(s: &NibbleSlice, i: usize) -> u8 { - let ix = (s.offset + i) / NIBBLE_PER_BYTE; - let pad = (s.offset + i) % NIBBLE_PER_BYTE; - at_left(pad as u8, s.data[ix]) + fn at(s: &NibbleSlice, ix: usize) -> u8 { + // same as left with offset + Self::left_nibble_at(&s.data[..], s.offset + ix) } /// Push u8 nibble value at a given index into an existing byte. + /// Note that existing value must be null (padded with 0). #[inline(always)] - pub fn push_at_left(ix: u8, v: u8, into: u8) -> u8 { - into | if ix == 1 { - v - } else { - v << BIT_PER_NIBBLE - } + fn push_at_left(ix: u8, v: u8, into: u8) -> u8 { + //into | (v << (8 - (BIT_PER_NIBBLE * ix))) + into | (v << Self::PADDING_BITMASK[ix as usize].1) } #[inline] - /// Calculate the number of needed padding a array of nibble length `i`. - pub fn number_padding(i: usize) -> usize { - i % NIBBLE_PER_BYTE + /// Calculate the number of needed padding for an array of nibble length `i`. + fn number_padding(i: usize) -> usize { + (Self::NIBBLE_PER_BYTE - (i % Self::NIBBLE_PER_BYTE)) % Self::NIBBLE_PER_BYTE } - /// The nibble shifts needed to align. - /// We use two value, one is a left shift and - /// the other is a right shift. - pub const SPLIT_SHIFTS: (usize, usize) = (4, 4); - /// Count the biggest common depth between two left aligned packed nibble slice. - pub fn biggest_depth(v1: &[u8], v2: &[u8]) -> usize { + fn biggest_depth(v1: &[u8], v2: &[u8]) -> usize { let upper_bound = cmp::min(v1.len(), v2.len()); for a in 0 .. upper_bound { if v1[a] != v2[a] { - return a * NIBBLE_PER_BYTE + left_common(v1[a], v2[a]); + return a * Self::NIBBLE_PER_BYTE + Self::left_common(v1[a], v2[a]); } } - upper_bound * NIBBLE_PER_BYTE + upper_bound * Self::NIBBLE_PER_BYTE } /// Calculate the number of common nibble between two left aligned bytes. #[inline(always)] - pub fn left_common(a: u8, b: u8) -> usize { - if a == b { - 2 - } else if pad_left(a) == pad_left(b) { - 1 - } else { - 0 + fn left_common(a: u8, b: u8) -> usize { + ((a ^ b).leading_zeros() as usize) / Self::BIT_PER_NIBBLE +/* let mut i = 0; + while i < Self::NIBBLE_PER_BYTE { + //if (a >> Self::PADDING_BITMASK[i].1) + // != (b >> Self::PADDING_BITMASK[i].1) { + let offset = i * Self::BIT_PER_NIBBLE; + if (a >> offset) != (b >> offset) { + break; + } + i += 1; } + return i;*/ + } + + /// The nibble shifts needed to align. + /// We use two value, one is a left shift and + /// the other is a right shift. + #[inline(always)] + fn split_shifts(pad: usize) -> (usize, usize) { + debug_assert!(pad > 0); + let s1 = Self::PADDING_BITMASK[pad - 1].1; + let s2 = 8 - s1; + (s1, s2) } /// Shifts right aligned key to add a given left offset. /// Resulting in possibly padding at both left and right - /// (example usage when combining two keys). - pub fn shift_key(key: &mut NodeKey, offset: usize) -> bool { + /// (used when combining two keys). + fn shift_key(key: &mut NodeKey, ofset: usize) -> bool { let old_offset = key.0; - key.0 = offset; - if old_offset > offset { + key.0 = ofset; + if old_offset > ofset { // shift left - let (s1, s2) = nibble_ops::SPLIT_SHIFTS; + let shift = old_offset - ofset; + let (s1, s2) = Self::split_shifts(shift); let kl = key.1.len(); (0..kl - 1).for_each(|i| key.1[i] = key.1[i] << s2 | key.1[i+1] >> s1); key.1[kl - 1] = key.1[kl - 1] << s2; true - } else if old_offset < offset { + } else if old_offset < ofset { // shift right - let (s1, s2) = nibble_ops::SPLIT_SHIFTS; + let shift = ofset - old_offset; + let (s1, s2) = Self::split_shifts(shift); key.1.push(0); (1..key.1.len()).rev().for_each(|i| key.1[i] = key.1[i - 1] << s1 | key.1[i] >> s2); key.1[0] = key.1[0] >> s2; @@ -145,7 +206,88 @@ pub mod nibble_ops { false } } +} + +/// Radix 16 `NibbleOps` definition. +#[cfg_attr(feature = "std", derive(Debug))] +#[derive(Default, Clone, PartialEq, Eq, PartialOrd, Ord, Copy)] +pub struct Radix16; + +impl NibbleOps for Radix16 { + const LAYOUT: Layout = Layout::Radix16; + const PADDING_BITMASK: &'static [(u8, usize)] = &[(0xFF, 4), (0x0F, 0)]; + type ChildRangeIndex = ChildIndex16; + + #[inline] + fn number_padding(i: usize) -> usize { + i % Self::NIBBLE_PER_BYTE + } + #[inline] + fn split_shifts(pad: usize) -> (usize, usize) { + debug_assert!(pad > 0); + (4, 4) + } +} + +/// Radix 4 `NibbleOps` definition. +#[cfg_attr(feature = "std", derive(Debug))] +#[derive(Default, Clone, PartialEq, Eq, PartialOrd, Ord, Copy)] +pub struct Radix4; + +// new_padded_end merged +impl NibbleOps for Radix4 { + const LAYOUT: Layout = Layout::Radix4; + const PADDING_BITMASK: &'static [(u8, usize)] = &[ + (0b1111_1111, 6), + (0b0011_1111, 4), + (0b0000_1111, 2), + (0b0000_0011, 0), + ]; + type ChildRangeIndex = ChildIndex4; +} + +/// Radix 2 `NibbleOps` definition. +#[cfg_attr(feature = "std", derive(Debug))] +#[derive(Default, Clone, PartialEq, Eq, PartialOrd, Ord, Copy)] +pub struct Radix2; + +impl NibbleOps for Radix2 { + const LAYOUT: Layout = Layout::Radix2; + const PADDING_BITMASK: &'static [(u8, usize)] = &[ + (0b1111_1111, 7), + (0b0111_1111, 6), + (0b0011_1111, 5), + (0b0001_1111, 4), + (0b0000_1111, 3), + (0b0000_0111, 2), + (0b0000_0011, 1), + (0b0000_0001, 0), + ]; + type ChildRangeIndex = ChildIndex2; +} + +/// Radix 256 `NibbleOps` definition. +#[cfg_attr(feature = "std", derive(Debug))] +#[derive(Default, Clone, PartialEq, Eq, PartialOrd, Ord, Copy)] +pub struct Radix256; + +impl NibbleOps for Radix256 { + const LAYOUT: Layout = Layout::Radix256; + const PADDING_BITMASK: &'static [(u8, usize)] = &[ + (1, 0), + ]; + type ChildRangeIndex = ChildIndex256; + + #[inline] + fn split_shifts(_pad: usize) -> (usize, usize) { + unreachable!("pad > 0"); + } + + #[inline] + fn left_common(_a: u8, _b: u8) -> usize { + 0 + } } /// Backing storage for `NibbleVec`s. @@ -156,9 +298,10 @@ pub(crate) type BackingByteVec = smallvec::SmallVec<[u8; 36]>; /// a `NibbleSlice` can get costy. #[cfg_attr(feature = "std", derive(Debug))] #[derive(Clone, PartialEq, Eq)] -pub struct NibbleVec { +pub struct NibbleVec { inner: BackingByteVec, len: usize, + _marker: PhantomData, } /// Nibble-orientated view onto byte-slice, allowing nibble-precision offsets. @@ -184,14 +327,173 @@ pub struct NibbleVec { /// } /// ``` #[derive(Copy, Clone)] -pub struct NibbleSlice<'a> { +pub struct NibbleSlice<'a, N> { data: &'a [u8], offset: usize, + _marker: PhantomData, } /// Iterator type for a nibble slice. -pub struct NibbleSliceIterator<'a> { - p: &'a NibbleSlice<'a>, +pub struct NibbleSliceIterator<'a, N: NibbleOps> { + p: &'a NibbleSlice<'a, N>, i: usize, } +/// Technical trait only to access child slice from an encoded +/// representation of a branch. +pub trait ChildIndex: AsRef<[Option]> + + AsMut<[Option]> + Default + Eq + PartialEq + crate::MaybeDebug + + Clone { + + /// Constant length for the number of children. + /// TODO EMCH see if can delete + const NIBBLE_LENGTH : usize; + + #[inline] + fn from_iter(nodes: impl Iterator>) -> Self { + let mut index = Self::default(); + for (i, node) in nodes.enumerate() { + index.as_mut()[i] = node; + } + index + } + + #[inline] + fn at(&self, ix: usize) -> Option<&V> { + self.as_ref()[ix].as_ref() + } + + #[inline] + fn take(&mut self, ix: usize) -> Option { + self.as_mut()[ix].take() + } + + #[inline] + fn at_mut(&mut self, ix: usize) -> &mut Option { + &mut self.as_mut()[ix] + } + + #[inline] + fn iter_mut(&mut self) -> crate::rstd::slice::IterMut> { + self.as_mut().iter_mut() + } +} + +pub trait ChildSliceIndex: ChildIndex { + #[inline] + fn slice_at<'a>(&self, ix: usize, data: &'a [u8]) -> Option> { + self.at(ix).map(|plan| plan.build(data)) + } + + /// Iterator over the children slice. + fn iter<'a>(&'a self, data: &'a [u8]) -> IterChildSliceIndex<'a, Self> { + IterChildSliceIndex(self, 0, data) + } +} + +impl> ChildSliceIndex for I { } + +/// Iterator over `ChildSliceIndex` trait. +pub struct IterChildSliceIndex<'a, CS>(&'a CS, usize, &'a[u8]); + +impl<'a, CS: ChildSliceIndex> Iterator for IterChildSliceIndex<'a, CS> { + type Item = Option>; + fn next(&mut self) -> Option { + if self.1 == CS::NIBBLE_LENGTH { + return None; + } + self.1 += 1; + Some(self.0.slice_at(self.1 - 1, self.2)) + } +} + +macro_rules! child_slice_index { + ($me: ident, $size: expr) => { + #[cfg_attr(feature = "std", derive(Debug))] + #[derive(Eq, PartialEq, Clone)] + /// Child slice indexes for radix $size. + pub struct $me([Option; $size]); + + impl AsRef<[Option]> for $me { + fn as_ref(&self) -> &[Option] { + &self.0[..] + } + } + + impl AsMut<[Option]> for $me { + fn as_mut(&mut self) -> &mut [Option] { + &mut self.0[..] + } + } + + impl ChildIndex for $me + where + V: MaybeDebug + Eq + PartialEq + Clone, + { + const NIBBLE_LENGTH: usize = $size; + } + } +} + +child_slice_index!(ChildIndex16, 16); +child_slice_index!(ChildIndex4, 4); +child_slice_index!(ChildIndex2, 2); + +macro_rules! exponential_out { + (@3, [$($inpp:expr),*]) => { exponential_out!(@2, [$($inpp,)* $($inpp),*]) }; + (@2, [$($inpp:expr),*]) => { exponential_out!(@1, [$($inpp,)* $($inpp),*]) }; + (@1, [$($inpp:expr),*]) => { [$($inpp,)* $($inpp),*] }; +} + +impl Default for ChildIndex2 { + fn default() -> Self { + ChildIndex2(exponential_out!(@1, [None])) + } +} + +impl Default for ChildIndex4 { + fn default() -> Self { + ChildIndex4(exponential_out!(@2, [None])) + } +} + +impl Default for ChildIndex16 { + fn default() -> Self { + ChildIndex16(exponential_out!(@3, [None, None])) + } +} + +#[cfg_attr(feature = "std", derive(Debug))] +#[derive(Eq, PartialEq, Clone)] +/// Child slice indexes for radix 256. +/// +/// TODO EMCH no default impl for array of len 257, +/// but could use bench to see if worth implementing +/// (probably sparse vec implementation is better: +/// need to remove asref and asmut bound). +pub struct ChildIndex256(Vec>); + +impl Default for ChildIndex256 { + fn default() -> Self { + ChildIndex256(vec![None; 256]) + } +} + +impl AsRef<[Option]> for ChildIndex256 { + fn as_ref(&self) -> &[Option] { + &self.0[..] + } +} + +impl AsMut<[Option]> for ChildIndex256 { + fn as_mut(&mut self) -> &mut [Option] { + &mut self.0[..] + } +} + +impl ChildIndex for ChildIndex256 + where + V: MaybeDebug + Eq + PartialEq + Clone, +{ + const NIBBLE_LENGTH: usize = 256; +} diff --git a/trie-db/src/nibble/nibbleslice.rs b/trie-db/src/nibble/nibbleslice.rs index 769f9854..26c85e36 100644 --- a/trie-db/src/nibble/nibbleslice.rs +++ b/trie-db/src/nibble/nibbleslice.rs @@ -14,13 +14,16 @@ //! Nibble-orientated view onto byte-slice, allowing nibble-precision offsets. -use crate::rstd::{cmp::*, fmt}; -use super::{nibble_ops, NibbleSlice, NibbleSliceIterator, BackingByteVec}; +use crate::rstd::{cmp::*, marker::PhantomData}; +use super::{NibbleOps, NibbleSlice, NibbleSliceIterator, BackingByteVec}; use crate::node::NodeKey; use crate::node_codec::Partial; use hash_db::Prefix; -impl<'a> Iterator for NibbleSliceIterator<'a> { +#[cfg(feature = "std")] +use std::fmt; + +impl<'a, N: NibbleOps> Iterator for NibbleSliceIterator<'a, N> { type Item = u8; fn next(&mut self) -> Option { self.i += 1; @@ -31,7 +34,7 @@ impl<'a> Iterator for NibbleSliceIterator<'a> { } } -impl<'a> NibbleSlice<'a> { +impl<'a, N: NibbleOps> NibbleSlice<'a, N> { /// Create a new nibble slice with the given byte-slice. pub fn new(data: &'a [u8]) -> Self { NibbleSlice::new_slice(data, 0) } @@ -44,23 +47,24 @@ impl<'a> NibbleSlice<'a> { NibbleSlice { data, offset, + _marker: PhantomData, } } /// Get an iterator for the series of nibbles. - pub fn iter(&'a self) -> NibbleSliceIterator<'a> { + pub fn iter(&'a self) -> NibbleSliceIterator<'a, N> { NibbleSliceIterator { p: self, i: 0 } } /// Get nibble slice from a `NodeKey`. - pub fn from_stored(i: &NodeKey) -> NibbleSlice { + pub fn from_stored(i: &NodeKey) -> NibbleSlice { NibbleSlice::new_offset(&i.1[..], i.0) } /// Helper function to create a owned `NodeKey` from this `NibbleSlice`. pub fn to_stored(&self) -> NodeKey { - let split = self.offset / nibble_ops::NIBBLE_PER_BYTE; - let offset = self.offset % nibble_ops::NIBBLE_PER_BYTE; + let split = self.offset / N::NIBBLE_PER_BYTE; + let offset = self.offset % N::NIBBLE_PER_BYTE; (offset, self.data[split..].into()) } @@ -70,23 +74,23 @@ impl<'a> NibbleSlice<'a> { /// original padding). pub fn to_stored_range(&self, nb: usize) -> NodeKey { if nb >= self.len() { return self.to_stored() } - if (self.offset + nb) % nibble_ops::NIBBLE_PER_BYTE == 0 { + if (self.offset + nb) % N::NIBBLE_PER_BYTE == 0 { // aligned - let start = self.offset / nibble_ops::NIBBLE_PER_BYTE; - let end = (self.offset + nb) / nibble_ops::NIBBLE_PER_BYTE; + let start = self.offset / N::NIBBLE_PER_BYTE; + let end = (self.offset + nb) / N::NIBBLE_PER_BYTE; ( - self.offset % nibble_ops::NIBBLE_PER_BYTE, + self.offset % N::NIBBLE_PER_BYTE, BackingByteVec::from_slice(&self.data[start..end]), ) } else { // unaligned - let start = self.offset / nibble_ops::NIBBLE_PER_BYTE; - let end = (self.offset + nb) / nibble_ops::NIBBLE_PER_BYTE; + let start = self.offset / N::NIBBLE_PER_BYTE; + let end = (self.offset + nb) / N::NIBBLE_PER_BYTE; let ea = BackingByteVec::from_slice(&self.data[start..=end]); - let ea_offset = self.offset % nibble_ops::NIBBLE_PER_BYTE; - let n_offset = nibble_ops::number_padding(nb); + let ea_offset = self.offset % N::NIBBLE_PER_BYTE; + let n_offset = N::number_padding(nb); let mut result = (ea_offset, ea); - nibble_ops::shift_key(&mut result, n_offset); + N::shift_key(&mut result, n_offset); result.1.pop(); result } @@ -97,19 +101,20 @@ impl<'a> NibbleSlice<'a> { /// Get the length (in nibbles, naturally) of this slice. #[inline] - pub fn len(&self) -> usize { self.data.len() * nibble_ops::NIBBLE_PER_BYTE - self.offset } + pub fn len(&self) -> usize { self.data.len() * N::NIBBLE_PER_BYTE - self.offset } /// Get the nibble at position `i`. #[inline(always)] pub fn at(&self, i: usize) -> u8 { - nibble_ops::at(&self, i) + N::at(&self, i) } /// Return object which represents a view on to this slice (further) offset by `i` nibbles. - pub fn mid(&self, i: usize) -> NibbleSlice<'a> { + pub fn mid(&self, i: usize) -> NibbleSlice<'a, N> { NibbleSlice { data: self.data, offset: self.offset + i, + _marker: PhantomData, } } @@ -120,10 +125,11 @@ impl<'a> NibbleSlice<'a> { } /// Move back to a previously valid fix offset position. - pub fn back(&self, i: usize) -> NibbleSlice<'a> { + pub fn back(&self, i: usize) -> NibbleSlice<'a, N> { NibbleSlice { data: self.data, offset: i, + _marker: PhantomData, } } @@ -144,10 +150,10 @@ impl<'a> NibbleSlice<'a> { /// Return `Partial` representation of this slice: /// first encoded byte and following slice. pub fn right(&'a self) -> Partial { - let split = self.offset / nibble_ops::NIBBLE_PER_BYTE; - let nb = (self.len() % nibble_ops::NIBBLE_PER_BYTE) as u8; + let split = self.offset / N::NIBBLE_PER_BYTE; + let nb = (self.len() % N::NIBBLE_PER_BYTE) as u8; if nb > 0 { - ((nb, nibble_ops::pad_right(self.data[split])), &self.data[split + 1 ..]) + ((nb, N::pad_right(nb, self.data[split])), &self.data[split + 1 ..]) } else { ((0, 0), &self.data[split..]) } @@ -160,7 +166,7 @@ impl<'a> NibbleSlice<'a> { crate::rstd::iter::from_fn(move || { if first.0 > 0 { first.0 = 0; - Some(nibble_ops::pad_right(first.1)) + Some(N::pad_right(first.0, first.1)) } else { if ix < sl.len() { ix += 1; @@ -175,15 +181,15 @@ impl<'a> NibbleSlice<'a> { /// Return `Partial` bytes iterator over a range of byte.. /// Warning can be slow when unaligned (similar to `to_stored_range`). pub fn right_range_iter(&'a self, to: usize) -> impl Iterator + 'a { - let mut nib_res = to % nibble_ops::NIBBLE_PER_BYTE; - let aligned_i = (self.offset + to) % nibble_ops::NIBBLE_PER_BYTE; + let mut nib_res = to % N::NIBBLE_PER_BYTE; + let aligned_i = (self.offset + to) % N::NIBBLE_PER_BYTE; let aligned = aligned_i == 0; - let mut ix = self.offset / nibble_ops::NIBBLE_PER_BYTE; - let ix_lim = (self.offset + to) / nibble_ops::NIBBLE_PER_BYTE; + let mut ix = self.offset / N::NIBBLE_PER_BYTE; + let ix_lim = (self.offset + to) / N::NIBBLE_PER_BYTE; crate::rstd::iter::from_fn( move || { if aligned { if nib_res > 0 { - let v = nibble_ops::pad_right(self.data[ix]); + let v = N::pad_right(nib_res as u8, self.data[ix]); nib_res = 0; ix += 1; Some(v) @@ -194,11 +200,11 @@ impl<'a> NibbleSlice<'a> { None } } else { - let (s1, s2) = nibble_ops::SPLIT_SHIFTS; + let (s1, s2) = N::split_shifts(aligned_i); // unaligned if nib_res > 0 { let v = self.data[ix] >> s1; - let v = nibble_ops::pad_right(v); + let v = N::pad_right(nib_res as u8, v); nib_res = 0; Some(v) } else if ix < ix_lim { @@ -217,43 +223,43 @@ impl<'a> NibbleSlice<'a> { /// originates from a full key it will be the `Prefix of /// the node`. pub fn left(&'a self) -> Prefix { - let split = self.offset / nibble_ops::NIBBLE_PER_BYTE; - let ix = (self.offset % nibble_ops::NIBBLE_PER_BYTE) as u8; + let split = self.offset / N::NIBBLE_PER_BYTE; + let ix = (self.offset % N::NIBBLE_PER_BYTE) as u8; if ix == 0 { - (&self.data[..split], None) + (&self.data[..split], (0, 0)) } else { - (&self.data[..split], Some(nibble_ops::pad_left(self.data[split]))) + (&self.data[..split], (ix, N::pad_left(ix, self.data[split]))) } } /// Owned version of a `Prefix` from a `left` method call. - pub fn left_owned(&'a self) -> (BackingByteVec, Option) { + pub fn left_owned(&'a self) -> (BackingByteVec, (u8, u8)) { let (a, b) = self.left(); (a.into(), b) } } -impl<'a> Into for NibbleSlice<'a> { +impl<'a, N> Into for NibbleSlice<'a, N> { fn into(self) -> NodeKey { (self.offset, self.data.into()) } } -impl<'a> PartialEq for NibbleSlice<'a> { +impl<'a, N: NibbleOps> PartialEq for NibbleSlice<'a, N> { fn eq(&self, them: &Self) -> bool { self.len() == them.len() && self.starts_with(them) } } -impl<'a> Eq for NibbleSlice<'a> { } +impl<'a, N: NibbleOps> Eq for NibbleSlice<'a, N> { } -impl<'a> PartialOrd for NibbleSlice<'a> { +impl<'a, N: NibbleOps> PartialOrd for NibbleSlice<'a, N> { fn partial_cmp(&self, them: &Self) -> Option { Some(self.cmp(them)) } } -impl<'a> Ord for NibbleSlice<'a> { +impl<'a, N: NibbleOps> Ord for NibbleSlice<'a, N> { fn cmp(&self, them: &Self) -> Ordering { let s = min(self.len(), them.len()); let mut i = 0usize; @@ -269,7 +275,7 @@ impl<'a> Ord for NibbleSlice<'a> { } #[cfg(feature = "std")] -impl<'a> fmt::Debug for NibbleSlice<'a> { +impl<'a, N: NibbleOps> fmt::Debug for NibbleSlice<'a, N> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for i in 0..self.len() { match i { @@ -283,19 +289,23 @@ impl<'a> fmt::Debug for NibbleSlice<'a> { #[cfg(test)] mod tests { - use crate::nibble::{NibbleSlice, BackingByteVec}; + use crate::nibble::{NibbleSlice, BackingByteVec, Radix16, Radix4, NibbleOps}; static D: &'static [u8;3] = &[0x01u8, 0x23, 0x45]; #[test] fn basics() { - let n = NibbleSlice::new(D); + basics_inner::(); + } + + fn basics_inner() { + let n = NibbleSlice::::new(D); assert_eq!(n.len(), 6); assert!(!n.is_empty()); - let n = NibbleSlice::new_offset(D, 6); + let n = NibbleSlice::::new_offset(D, 6); assert!(n.is_empty()); - let n = NibbleSlice::new_offset(D, 3); + let n = NibbleSlice::::new_offset(D, 3); assert_eq!(n.len(), 3); for i in 0..3 { assert_eq!(n.at(i), i as u8 + 3); @@ -304,7 +314,11 @@ mod tests { #[test] fn iterator() { - let n = NibbleSlice::new(D); + iterator_inner::(); + } + + fn iterator_inner() { + let n = NibbleSlice::::new(D); let mut nibbles: Vec = vec![]; nibbles.extend(n.iter()); assert_eq!(nibbles, (0u8..6).collect::>()) @@ -312,7 +326,11 @@ mod tests { #[test] fn mid() { - let n = NibbleSlice::new(D); + mid_inner::(); + } + + fn mid_inner() { + let n = NibbleSlice::::new(D); let m = n.mid(2); for i in 0..4 { assert_eq!(m.at(i), i as u8 + 2); @@ -325,7 +343,7 @@ mod tests { #[test] fn encoded_pre() { - let n = NibbleSlice::new(D); + let n = NibbleSlice::::new(D); assert_eq!(n.to_stored(), (0, BackingByteVec::from_slice(&[0x01, 0x23, 0x45]))); assert_eq!(n.mid(1).to_stored(), (1, BackingByteVec::from_slice(&[0x01, 0x23, 0x45]))); assert_eq!(n.mid(2).to_stored(), (0, BackingByteVec::from_slice(&[0x23, 0x45]))); @@ -334,7 +352,7 @@ mod tests { #[test] fn from_encoded_pre() { - let n = NibbleSlice::new(D); + let n = NibbleSlice::::new(D); let stored: BackingByteVec = [0x01, 0x23, 0x45][..].into(); assert_eq!(n, NibbleSlice::from_stored(&(0, stored.clone()))); assert_eq!(n.mid(1), NibbleSlice::from_stored(&(1, stored))); @@ -342,7 +360,8 @@ mod tests { #[test] fn range_iter() { - let n = NibbleSlice::new(D); + let n = NibbleSlice::::new(D); + let n2 = NibbleSlice::::new(D); for i in [ vec![], vec![0x00], @@ -352,7 +371,8 @@ mod tests { vec![0x00, 0x12, 0x34], vec![0x01, 0x23, 0x45], ].iter().enumerate() { - range_iter_test(n, i.0, None, &i.1[..]); + range_iter_test::(n, i.0, None, &i.1[..]); + range_iter_test::(n2, i.0 * 2, None, &i.1[..]); } for i in [ vec![], @@ -362,7 +382,8 @@ mod tests { vec![0x12, 0x34], vec![0x01, 0x23, 0x45], ].iter().enumerate() { - range_iter_test(n, i.0, Some(1), &i.1[..]); + range_iter_test::(n, i.0, Some(1), &i.1[..]); + range_iter_test::(n2, i.0 * 2, Some(2), &i.1[..]); } for i in [ vec![], @@ -371,7 +392,8 @@ mod tests { vec![0x02, 0x34], vec![0x23, 0x45], ].iter().enumerate() { - range_iter_test(n, i.0, Some(2), &i.1[..]); + range_iter_test::(n, i.0, Some(2), &i.1[..]); + range_iter_test::(n2, i.0 * 2, Some(4), &i.1[..]); } for i in [ vec![], @@ -379,11 +401,12 @@ mod tests { vec![0x34], vec![0x03, 0x45], ].iter().enumerate() { - range_iter_test(n, i.0, Some(3), &i.1[..]); + range_iter_test::(n, i.0, Some(3), &i.1[..]); + range_iter_test::(n2, i.0 * 2, Some(6), &i.1[..]); } } - fn range_iter_test(n: NibbleSlice, nb: usize, mid: Option, res: &[u8]) { + fn range_iter_test(n: NibbleSlice, nb: usize, mid: Option, res: &[u8]) { let n = if let Some(i) = mid { n.mid(i) } else { n }; @@ -392,7 +415,10 @@ mod tests { #[test] fn shared() { - let n = NibbleSlice::new(D); + shared_inner::(); + } + fn shared_inner() { + let n = NibbleSlice::::new(D); let other = &[0x01u8, 0x23, 0x01, 0x23, 0x45, 0x67]; let m = NibbleSlice::new(other); @@ -408,8 +434,11 @@ mod tests { #[test] fn compare() { + compare_inner::(); + } + fn compare_inner() { let other = &[0x01u8, 0x23, 0x01, 0x23, 0x45]; - let n = NibbleSlice::new(D); + let n = NibbleSlice::::new(D); let m = NibbleSlice::new(other); assert!(n != m); diff --git a/trie-db/src/nibble/nibblevec.rs b/trie-db/src/nibble/nibblevec.rs index fcbc1b46..319cb5f6 100644 --- a/trie-db/src/nibble/nibblevec.rs +++ b/trie-db/src/nibble/nibblevec.rs @@ -14,24 +14,25 @@ //! An owning, nibble-oriented byte vector. -use crate::nibble::{NibbleSlice, BackingByteVec}; -use crate::nibble::nibble_ops; +use crate::nibble::{NibbleSlice, BackingByteVec, NibbleOps}; use hash_db::Prefix; use crate::node_codec::Partial; use super::NibbleVec; +use crate::rstd::marker::PhantomData; -impl Default for NibbleVec { +impl Default for NibbleVec { fn default() -> Self { NibbleVec::new() } } -impl NibbleVec { +impl NibbleVec { /// Make a new `NibbleVec`. pub fn new() -> Self { NibbleVec { inner: BackingByteVec::new(), len: 0, + _marker: PhantomData, } } @@ -45,21 +46,21 @@ impl NibbleVec { /// Try to get the nibble at the given offset. #[inline] pub fn at(&self, idx: usize) -> u8 { - let ix = idx / nibble_ops::NIBBLE_PER_BYTE; - let pad = idx % nibble_ops::NIBBLE_PER_BYTE; - nibble_ops::at_left(pad as u8, self.inner[ix]) + let ix = idx / N::NIBBLE_PER_BYTE; + let pad = idx % N::NIBBLE_PER_BYTE; + N::at_left(pad as u8, self.inner[ix]) } /// Push a nibble onto the `NibbleVec`. Ignores the high 4 bits. pub fn push(&mut self, nibble: u8) { - let i = self.len % nibble_ops::NIBBLE_PER_BYTE; + let i = self.len % N::NIBBLE_PER_BYTE; if i == 0 { - self.inner.push(nibble_ops::push_at_left(0, nibble, 0)); + self.inner.push(N::push_at_left(0, nibble, 0)); } else { let output = self.inner.last_mut() .expect("len != 0 since len % 2 != 0; inner has a last element; qed"); - *output = nibble_ops::push_at_left(i as u8, nibble, *output); + *output = N::push_at_left(i as u8, nibble, *output); } self.len += 1; } @@ -71,11 +72,11 @@ impl NibbleVec { } let byte = self.inner.pop().expect("len != 0; inner has last elem; qed"); self.len -= 1; - let i_new = self.len % nibble_ops::NIBBLE_PER_BYTE; + let i_new = self.len % N::NIBBLE_PER_BYTE; if i_new != 0 { - self.inner.push(nibble_ops::pad_left(byte)); + self.inner.push(N::pad_left(i_new as u8, byte)); } - Some(nibble_ops::at_left(i_new as u8, byte)) + Some(N::at_left(i_new as u8, byte)) } /// Remove then n last nibbles in a faster way than popping n times. @@ -86,39 +87,39 @@ impl NibbleVec { return; } let end = self.len - n; - let end_index = end / nibble_ops::NIBBLE_PER_BYTE - + if end % nibble_ops::NIBBLE_PER_BYTE == 0 { 0 } else { 1 }; + let end_index = end / N::NIBBLE_PER_BYTE + + if end % N::NIBBLE_PER_BYTE == 0 { 0 } else { 1 }; (end_index..self.inner.len()).for_each(|_| { self.inner.pop(); }); self.len = end; - let pos = self.len % nibble_ops::NIBBLE_PER_BYTE; + let pos = self.len % N::NIBBLE_PER_BYTE; if pos != 0 { let kl = self.inner.len() - 1; - self.inner[kl] = nibble_ops::pad_left(self.inner[kl]); + self.inner[kl] = N::pad_left(pos as u8, self.inner[kl]); } } /// Get `Prefix` representation of this `NibbleVec`. pub fn as_prefix(&self) -> Prefix { - let split = self.len / nibble_ops::NIBBLE_PER_BYTE; - let pos = (self.len % nibble_ops::NIBBLE_PER_BYTE) as u8; + let split = self.len / N::NIBBLE_PER_BYTE; + let pos = (self.len % N::NIBBLE_PER_BYTE) as u8; if pos == 0 { - (&self.inner[..split], None) + (&self.inner[..split], (0, 0)) } else { - (&self.inner[..split], Some(nibble_ops::pad_left(self.inner[split]))) + (&self.inner[..split], (pos, N::pad_left(pos, self.inner[split]))) } } /// Append another `NibbleVec`. Can be slow (alignement of second vec). - pub fn append(&mut self, v: &NibbleVec) { + pub fn append(&mut self, v: &NibbleVec) { if v.len == 0 { return; } let final_len = self.len + v.len; - let offset = self.len % nibble_ops::NIBBLE_PER_BYTE; - let final_offset = final_len % nibble_ops::NIBBLE_PER_BYTE; - let last_index = self.len / nibble_ops::NIBBLE_PER_BYTE; + let offset = self.len % N::NIBBLE_PER_BYTE; + let final_offset = final_len % N::NIBBLE_PER_BYTE; + let last_index = self.len / N::NIBBLE_PER_BYTE; if offset > 0 { - let (s1, s2) = nibble_ops::SPLIT_SHIFTS; - self.inner[last_index] = nibble_ops::pad_left(self.inner[last_index]) + let (s1, s2) = N::split_shifts(offset); + self.inner[last_index] = N::pad_left(offset as u8, self.inner[last_index]) | (v.inner[0] >> s2); (0..v.inner.len() - 1) .for_each(|i| self.inner.push(v.inner[i] << s1 | v.inner[i+1] >> s2)); @@ -133,23 +134,27 @@ impl NibbleVec { /// Append a `Partial`. Can be slow (alignement of partial). pub fn append_partial(&mut self, (start_byte, sl): Partial) { - if start_byte.0 == 1 { - self.push(nibble_ops::at_left(1, start_byte.1)); + for i in (1..=start_byte.0).rev() { + let ix = N::NIBBLE_PER_BYTE - i as usize; + self.push(N::at_left(ix as u8, start_byte.1)); } - let pad = self.inner.len() * nibble_ops::NIBBLE_PER_BYTE - self.len; + let pad = self.inner.len() * N::NIBBLE_PER_BYTE - self.len; if pad == 0 { self.inner.extend_from_slice(&sl[..]); } else { let kend = self.inner.len() - 1; if sl.len() > 0 { - self.inner[kend] = nibble_ops::pad_left(self.inner[kend]); - let (s1, s2) = nibble_ops::SPLIT_SHIFTS; + self.inner[kend] = N::pad_left( + (N::NIBBLE_PER_BYTE - pad) as u8, + self.inner[kend], + ); + let (s1, s2) = N::split_shifts(pad); self.inner[kend] |= sl[0] >> s1; (0..sl.len() - 1).for_each(|i| self.inner.push(sl[i] << s2 | sl[i+1] >> s1)); self.inner.push(sl[sl.len() - 1] << s2); } } - self.len += sl.len() * nibble_ops::NIBBLE_PER_BYTE; + self.len += sl.len() * N::NIBBLE_PER_BYTE; } /// Utility function for chaining two optional appending @@ -157,7 +162,7 @@ impl NibbleVec { /// Can be slow. pub(crate) fn append_optional_slice_and_nibble( &mut self, - o_slice: Option<&NibbleSlice>, + o_slice: Option<&NibbleSlice>, o_index: Option, ) -> usize { let mut res = 0; @@ -176,7 +181,7 @@ impl NibbleVec { /// Can be slow. pub(crate) fn clone_append_optional_slice_and_nibble( &self, - o_slice: Option<&NibbleSlice>, + o_slice: Option<&NibbleSlice>, o_index: Option, ) -> Self { let mut p = self.clone(); @@ -196,8 +201,8 @@ impl NibbleVec { } /// Try to treat this `NibbleVec` as a `NibbleSlice`. Works only if there is no padding. - pub fn as_nibbleslice(&self) -> Option { - if self.len % nibble_ops::NIBBLE_PER_BYTE == 0 { + pub fn as_nibbleslice(&self) -> Option> { + if self.len % N::NIBBLE_PER_BYTE == 0 { Some(NibbleSlice::new(self.inner())) } else { None @@ -209,13 +214,13 @@ impl NibbleVec { if self.len() < other.len() { return false; } - let byte_len = other.len() / nibble_ops::NIBBLE_PER_BYTE; + let byte_len = other.len() / N::NIBBLE_PER_BYTE; if &self.inner[..byte_len] != &other.inner[..byte_len] { return false; } - for pad in 0..(other.len() - byte_len * nibble_ops::NIBBLE_PER_BYTE) { - let self_nibble = nibble_ops::at_left(pad as u8, self.inner[byte_len]); - let other_nibble = nibble_ops::at_left(pad as u8, other.inner[byte_len]); + for pad in 0..(other.len() - byte_len * N::NIBBLE_PER_BYTE) { + let self_nibble = N::at_left(pad as u8, self.inner[byte_len]); + let other_nibble = N::at_left(pad as u8, other.inner[byte_len]); if self_nibble != other_nibble { return false; } @@ -224,8 +229,8 @@ impl NibbleVec { } } -impl<'a> From> for NibbleVec { - fn from(s: NibbleSlice<'a>) -> Self { +impl<'a, N: NibbleOps> From> for NibbleVec { + fn from(s: NibbleSlice<'a, N>) -> Self { let mut v = NibbleVec::new(); for i in 0..s.len() { v.push(s.at(i)); @@ -237,21 +242,26 @@ impl<'a> From> for NibbleVec { #[cfg(test)] mod tests { use crate::nibble::NibbleVec; - use crate::nibble::nibble_ops; + use crate::nibble::{Radix16, NibbleOps, Radix4}; #[test] fn push_pop() { - let mut v = NibbleVec::new(); + push_pop_inner::(); + push_pop_inner::(); + } - for i in 0..(nibble_ops::NIBBLE_PER_BYTE * 3) { - let iu8 = (i % nibble_ops::NIBBLE_PER_BYTE) as u8; + fn push_pop_inner() { + let mut v = NibbleVec::::new(); + + for i in 0..(N::NIBBLE_PER_BYTE * 3) { + let iu8 = (i % N::NIBBLE_PER_BYTE) as u8; v.push(iu8); assert_eq!(v.len() - 1, i); assert_eq!(v.at(i), iu8); } - for i in (0..(nibble_ops::NIBBLE_PER_BYTE * 3)).rev() { - let iu8 = (i % nibble_ops::NIBBLE_PER_BYTE) as u8; + for i in (0..(N::NIBBLE_PER_BYTE * 3)).rev() { + let iu8 = (i % N::NIBBLE_PER_BYTE) as u8; let a = v.pop(); assert_eq!(a, Some(iu8)); assert_eq!(v.len(), i); @@ -260,15 +270,55 @@ mod tests { #[test] fn append_partial() { - append_partial_inner(&[1, 2, 3], &[], ((1, 1), &[0x23])); - append_partial_inner(&[1, 2, 3], &[1], ((0, 0), &[0x23])); - append_partial_inner(&[0, 1, 2, 3], &[0], ((1, 1), &[0x23])); + append_partial_inner::(&[1, 2, 3], &[], ((1, 1), &[0x23])); + append_partial_inner::(&[1, 2, 3], &[1], ((0, 0), &[0x23])); + append_partial_inner::(&[0, 1, 2, 3], &[0], ((1, 1), &[0x23])); + append_partial_inner::(&[1, 0, 2, 0, 3], &[], ((1, 1), &[0x23])); + append_partial_inner::( + &[1, 0, 2, 0, 3, 0, 1, 0, 2], + &[], + ((1, 1), &[0x23, 0x12]), + ); + append_partial_inner::( + &[2, 1, 0, 2, 0, 3, 0, 1, 0, 2], + &[], + ((2, 0b1001), &[0x23, 0x12]), + ); + append_partial_inner::( + &[3, 2, 1, 0, 2, 0, 3, 0, 1, 0, 2], + &[], + ((3, 0b111001), &[0x23, 0x12])); + append_partial_inner::( + &[3, 1, 0, 2, 0, 3, 0, 1, 0, 2], + &[3], + ((1, 1), &[0x23, 0x12]), + ); + append_partial_inner::( + &[3, 2, 3, 1, 0, 2, 0, 3, 0, 1, 0, 2], + &[3, 2, 3], + ((1, 1), &[0x23, 0x12]), + ); + append_partial_inner::( + &[3, 2, 3, 2, 1, 0, 2, 0, 3, 0, 1, 0, 2], + &[3, 2, 3], + ((2, 0b1001), &[0x23, 0x12]), + ); + append_partial_inner::( + &[3, 2, 1, 0, 2, 0, 3, 0, 1, 0, 2], + &[3, 2], + ((1, 1), &[0x23, 0x12]), + ); + append_partial_inner::( + &[3, 2, 3, 2, 1, 0, 2, 0, 3, 0, 1, 0, 2], + &[3, 2], + ((3, 0b111001), &[0x23, 0x12]), + ); } - fn append_partial_inner(res: &[u8], init: &[u8], partial: ((u8, u8), &[u8])) { - let mut resv = NibbleVec::new(); + fn append_partial_inner(res: &[u8], init: &[u8], partial: ((u8, u8), &[u8])) { + let mut resv = NibbleVec::::new(); res.iter().for_each(|r| resv.push(*r)); - let mut initv = NibbleVec::new(); + let mut initv = NibbleVec::::new(); init.iter().for_each(|r| initv.push(*r)); initv.append_partial(partial); assert_eq!(resv, initv); @@ -277,7 +327,7 @@ mod tests { #[test] fn drop_lasts_test() { let test_trun = |a: &[u8], b: usize, c: (&[u8], usize)| { - let mut k = NibbleVec::new(); + let mut k = NibbleVec::::new(); for v in a { k.push(*v); } @@ -296,5 +346,4 @@ mod tests { test_trun(&[1, 2, 3], 3, (&[], 0)); test_trun(&[1, 2, 3], 4, (&[], 0)); } - } diff --git a/trie-db/src/node.rs b/trie-db/src/node.rs index ef50e8d6..75697b71 100644 --- a/trie-db/src/node.rs +++ b/trie-db/src/node.rs @@ -14,15 +14,39 @@ use hash_db::Hasher; use crate::nibble::{self, NibbleSlice}; -use crate::nibble::nibble_ops; +use crate::nibble::NibbleOps; use crate::node_codec::NodeCodec; +use crate::nibble::{ChildIndex, ChildSliceIndex}; + use crate::rstd::{borrow::Borrow, ops::Range}; +use crate::rstd::marker::PhantomData; /// Partial node key type: offset and owned value of a nibbleslice. /// Offset is applied on first byte of array (bytes are right aligned). pub type NodeKey = (usize, nibble::BackingByteVec); +#[derive(Eq, PartialEq, Clone)] +#[cfg_attr(feature = "std", derive(Debug))] +/// Alias to branch children slice, it is equivalent to '&[&[u8]]'. +/// Reason for using it is https://github.com/rust-lang/rust/issues/43408. +pub struct BranchChildrenSlice<'a, I> { + index: I, + data: &'a[u8], +} + +impl<'a, I: ChildSliceIndex> BranchChildrenSlice<'a, I> { + /// Similar to `Index` but returns a copied value. + pub fn at(&self, index: usize) -> Option> { + self.index.slice_at(index, self.data) + } + + /// Iterator over children node handles. + pub fn iter(&'a self) -> impl Iterator>> { + self.index.iter(self.data) + } +} + /// A reference to a trie node which may be stored within another trie node. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum NodeHandle<'a> { @@ -43,18 +67,18 @@ pub fn decode_hash(data: &[u8]) -> Option { /// Type of node in the trie and essential information thereof. #[derive(Eq, PartialEq, Clone)] #[cfg_attr(feature = "std", derive(Debug))] -pub enum Node<'a> { +pub enum Node<'a, N: NibbleOps> { /// Null trie node; could be an empty root or an empty branch entry. Empty, /// Leaf node; has key slice and value. Value may not be empty. - Leaf(NibbleSlice<'a>, &'a [u8]), + Leaf(NibbleSlice<'a, N>, &'a [u8]), /// Extension node; has key slice and node data. Data may not be null. - Extension(NibbleSlice<'a>, NodeHandle<'a>), + Extension(NibbleSlice<'a, N>, NodeHandle<'a>), /// Branch node; has slice of child nodes (each possibly null) /// and an optional immediate node data. - Branch([Option>; nibble_ops::NIBBLE_LENGTH], Option<&'a [u8]>), + Branch(BranchChildrenSlice<'a, N::ChildRangeIndex>, Option<&'a [u8]>), /// Branch node with support for a nibble (when extension nodes are not used). - NibbledBranch(NibbleSlice<'a>, [Option>; nibble_ops::NIBBLE_LENGTH], Option<&'a [u8]>), + NibbledBranch(NibbleSlice<'a, N>, BranchChildrenSlice<'a, N::ChildRangeIndex>, Option<&'a [u8]>), } /// A `NodeHandlePlan` is a decoding plan for constructing a `NodeHandle` from an encoded trie @@ -81,33 +105,55 @@ impl NodeHandlePlan { /// `NibbleSlicePlan` is created by parsing a byte slice and can be reused multiple times. #[derive(Eq, PartialEq, Clone)] #[cfg_attr(feature = "std", derive(Debug))] -pub struct NibbleSlicePlan { +pub struct NibbleSlicePlan { bytes: Range, offset: usize, + _marker: PhantomData, } -impl NibbleSlicePlan { +impl NibbleSlicePlan { /// Construct a nibble slice decode plan. pub fn new(bytes: Range, offset: usize) -> Self { NibbleSlicePlan { bytes, - offset + offset, + _marker: PhantomData, } } /// Returns the nibble length of the slice. pub fn len(&self) -> usize { - (self.bytes.end - self.bytes.start) * nibble_ops::NIBBLE_PER_BYTE - self.offset + (self.bytes.end - self.bytes.start) * N::NIBBLE_PER_BYTE - self.offset } /// Build a nibble slice by decoding a byte slice according to the plan. It is the /// responsibility of the caller to ensure that the node plan was created for the argument /// data, otherwise the call may decode incorrectly or panic. - pub fn build<'a, 'b>(&'a self, data: &'b [u8]) -> NibbleSlice<'b> { + pub fn build<'a, 'b>(&'a self, data: &'b [u8]) -> NibbleSlice<'b, N> { NibbleSlice::new_offset(&data[self.bytes.clone()], self.offset) } } +/// TODO try non public +#[derive(Eq, PartialEq, Clone)] +#[cfg_attr(feature = "std", derive(Debug))] +pub struct BranchChildrenNodePlan { + index: I, +} + +impl> BranchChildrenNodePlan { + /// Similar to `Index` but return a copied value. + pub fn at(&self, index: usize) -> Option { + self.index.at(index).cloned() + } + + /// Build from sequence of content. + pub fn new(nodes: impl Iterator>) -> Self { + let index = ChildIndex::from_iter(nodes); + BranchChildrenNodePlan { index } + } +} + /// A `NodePlan` is a blueprint for decoding a node from a byte slice. The `NodePlan` is created /// by parsing an encoded node and can be reused multiple times. This is useful as a `Node` borrows /// from a byte slice and this struct does not. @@ -116,38 +162,38 @@ impl NibbleSlicePlan { /// ranges that can be used to index into a large byte slice. #[derive(Eq, PartialEq, Clone)] #[cfg_attr(feature = "std", derive(Debug))] -pub enum NodePlan { +pub enum NodePlan { /// Null trie node; could be an empty root or an empty branch entry. Empty, /// Leaf node; has a partial key plan and value. Leaf { - partial: NibbleSlicePlan, + partial: NibbleSlicePlan, value: Range, }, /// Extension node; has a partial key plan and child data. Extension { - partial: NibbleSlicePlan, + partial: NibbleSlicePlan, child: NodeHandlePlan, }, /// Branch node; has slice of child nodes (each possibly null) /// and an optional immediate node data. Branch { value: Option>, - children: [Option; nibble_ops::NIBBLE_LENGTH], + children: BranchChildrenNodePlan, }, /// Branch node with support for a nibble (when extension nodes are not used). NibbledBranch { - partial: NibbleSlicePlan, + partial: NibbleSlicePlan, value: Option>, - children: [Option; nibble_ops::NIBBLE_LENGTH], + children: BranchChildrenNodePlan, }, } -impl NodePlan { +impl NodePlan { /// Build a node by decoding a byte slice according to the node plan. It is the responsibility /// of the caller to ensure that the node plan was created for the argument data, otherwise the /// call may decode incorrectly or panic. - pub fn build<'a, 'b>(&'a self, data: &'b [u8]) -> Node<'b> { + pub fn build<'a, 'b>(&'a self, data: &'b [u8]) -> Node<'b, N> { match self { NodePlan::Empty => Node::Empty, NodePlan::Leaf { partial, value } => @@ -155,18 +201,18 @@ impl NodePlan { NodePlan::Extension { partial, child } => Node::Extension(partial.build(data), child.build(data)), NodePlan::Branch { value, children } => { - let mut child_slices = [None; nibble_ops::NIBBLE_LENGTH]; - for i in 0..nibble_ops::NIBBLE_LENGTH { - child_slices[i] = children[i].as_ref().map(|child| child.build(data)); - } + let child_slices = BranchChildrenSlice { + index: children.index.clone(), + data, + }; let value_slice = value.clone().map(|value| &data[value]); Node::Branch(child_slices, value_slice) }, NodePlan::NibbledBranch { partial, value, children } => { - let mut child_slices = [None; nibble_ops::NIBBLE_LENGTH]; - for i in 0..nibble_ops::NIBBLE_LENGTH { - child_slices[i] = children[i].as_ref().map(|child| child.build(data)); - } + let child_slices = BranchChildrenSlice { + index: children.index.clone(), + data, + }; let value_slice = value.clone().map(|value| &data[value]); Node::NibbledBranch(partial.build(data), child_slices, value_slice) }, @@ -178,14 +224,14 @@ impl NodePlan { /// the `OwnedNode`. This is useful for trie iterators. #[cfg_attr(feature = "std", derive(Debug))] #[derive(PartialEq, Eq)] -pub struct OwnedNode> { +pub struct OwnedNode, N: NibbleOps> { data: D, - plan: NodePlan, + plan: NodePlan, } -impl> OwnedNode { +impl, N: NibbleOps> OwnedNode { /// Construct an `OwnedNode` by decoding an owned data source according to some codec. - pub fn new(data: D) -> Result { + pub fn new>(data: D) -> Result { let plan = C::decode_plan(data.borrow())?; Ok(OwnedNode { data, plan }) } @@ -196,12 +242,12 @@ impl> OwnedNode { } /// Returns a reference to the node decode plan. - pub fn node_plan(&self) -> &NodePlan { + pub fn node_plan(&self) -> &NodePlan { &self.plan } /// Construct a `Node` by borrowing data from this struct. - pub fn node(&self) -> Node { + pub fn node(&self) -> Node { self.plan.build(self.data.borrow()) } } diff --git a/trie-db/src/node_codec.rs b/trie-db/src/node_codec.rs index ad33ebf5..d5ca4694 100644 --- a/trie-db/src/node_codec.rs +++ b/trie-db/src/node_codec.rs @@ -18,7 +18,7 @@ use crate::MaybeDebug; use crate::node::{Node, NodePlan}; use crate::ChildReference; - +use crate::NibbleOps; use crate::rstd::{borrow::Borrow, Error, hash, vec::Vec}; @@ -33,6 +33,8 @@ pub trait NodeCodec: Sized { /// Codec error type. type Error: Error; + type Nibble: NibbleOps; + /// Output type of encoded node hasher. type HashOut: AsRef<[u8]> + AsMut<[u8]> + Default + MaybeDebug + PartialEq + Eq + hash::Hash + Send + Sync + Clone + Copy; @@ -41,10 +43,10 @@ pub trait NodeCodec: Sized { fn hashed_null_node() -> Self::HashOut; /// Decode bytes to a `NodePlan`. Returns `Self::E` on failure. - fn decode_plan(data: &[u8]) -> Result; + fn decode_plan(data: &[u8]) -> Result, Self::Error>; /// Decode bytes to a `Node`. Returns `Self::E` on failure. - fn decode(data: &[u8]) -> Result { + fn decode(data: &[u8]) -> Result, Self::Error> { Ok(Self::decode_plan(data)?.build(data)) } @@ -83,3 +85,24 @@ pub trait NodeCodec: Sized { value: Option<&[u8]> ) -> Vec; } + +/// Bitmap encoder for the number of children nodes. +pub trait BitMap: Sized { + /// length to encode the bitmap + const ENCODED_LEN: usize; + /// Codec error type. + type Error: Error; + + /// Codec buffer to use. + type Buffer: AsRef<[u8]> + AsMut<[u8]> + Default; + + /// Decode bitmap from its encoded full slice. + fn decode(data: &[u8]) -> Result; + + /// Return wether the bitmap registered a value for a branch + /// child index. + fn value_at(&self, i: usize) -> bool; + + /// Encode bitmap, output slice must be of right length. + fn encode>(has_children: I , output: &mut [u8]); +} diff --git a/trie-db/src/proof/generate.rs b/trie-db/src/proof/generate.rs index 817e9333..843a0d1f 100644 --- a/trie-db/src/proof/generate.rs +++ b/trie-db/src/proof/generate.rs @@ -15,51 +15,52 @@ //! Generation of compact proofs for Merkle-Patricia tries. use crate::rstd::{ - boxed::Box, convert::TryInto, marker::PhantomData, ops::Range, vec, vec::Vec, + boxed::Box, convert::TryInto, ops::Range, vec, vec::Vec, }; use hash_db::Hasher; +use crate::nibble::NibbleOps; use crate::{ - CError, ChildReference, nibble::LeftNibbleSlice, nibble_ops::NIBBLE_LENGTH, NibbleSlice, node::{NodeHandle, NodeHandlePlan, NodePlan, OwnedNode}, NodeCodec, Recorder, - Result as TrieResult, Trie, TrieError, TrieHash, - TrieLayout, + CError, ChildReference, nibble::LeftNibbleSlice, NibbleSlice, + node::{NodeHandle, NodeHandlePlan, NodePlan, OwnedNode, BranchChildrenNodePlan}, + NodeCodec, Recorder, Result as TrieResult, Trie, TrieError, TrieHash, + TrieLayout, TrieChildRangeIndex, }; -struct StackEntry<'a, C: NodeCodec> { +struct StackEntry<'a, L: TrieLayout> { /// The prefix is the nibble path to the node in the trie. - prefix: LeftNibbleSlice<'a>, - node: OwnedNode>, + prefix: LeftNibbleSlice<'a, L::Nibble>, + node: OwnedNode, L::Nibble>, /// The hash of the node or None if it is referenced inline. - node_hash: Option, + node_hash: Option>, /// Whether the value should be omitted in the generated proof. omit_value: bool, /// The next entry in the stack is a child of the preceding entry at this index. For branch /// nodes, the index is in [0, NIBBLE_LENGTH] and for extension nodes, the index is in [0, 1]. child_index: usize, /// The child references to use in constructing the proof nodes. - children: Vec>>, + children: Vec>>>, /// The index into the proof vector that the encoding of this entry should be placed at. output_index: Option, - _marker: PhantomData, } -impl<'a, C: NodeCodec> StackEntry<'a, C> { +impl<'a, L: TrieLayout> StackEntry<'a, L> { fn new( - prefix: LeftNibbleSlice<'a>, + prefix: LeftNibbleSlice<'a, L::Nibble>, node_data: Vec, - node_hash: Option, + node_hash: Option>, output_index: Option, - ) -> TrieResult + ) -> TrieResult, CError> { - let node = OwnedNode::new::(node_data) + let node = OwnedNode::new::(node_data) .map_err(|err| Box::new( TrieError::DecoderError(node_hash.unwrap_or_default(), err) ))?; let children_len = match node.node_plan() { NodePlan::Empty | NodePlan::Leaf { .. } => 0, NodePlan::Extension { .. } => 1, - NodePlan::Branch { .. } | NodePlan::NibbledBranch { .. } => NIBBLE_LENGTH, + NodePlan::Branch { .. } | NodePlan::NibbledBranch { .. } => L::Nibble::NIBBLE_LENGTH, }; Ok(StackEntry { prefix, @@ -69,19 +70,18 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { child_index: 0, children: vec![None; children_len], output_index, - _marker: PhantomData::default(), }) } /// Encode this entry to an encoded trie node with data properly omitted. - fn encode_node(mut self) -> TrieResult, C::HashOut, C::Error> { + fn encode_node(mut self) -> TrieResult, TrieHash, CError> { let node_data = self.node.data(); Ok(match self.node.node_plan() { NodePlan::Empty => node_data.to_vec(), NodePlan::Leaf { .. } if !self.omit_value => node_data.to_vec(), NodePlan::Leaf { partial, value: _ } => { let partial = partial.build(node_data); - C::leaf_node(partial.right(), &[]) + L::Codec::leaf_node(partial.right(), &[]) } NodePlan::Extension { .. } if self.child_index == 0 => node_data.to_vec(), NodePlan::Extension { partial: partial_plan, child: _ } => { @@ -92,7 +92,7 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { child_index > 0; \ the branch guard guarantees that child_index > 0" ); - C::extension_node( + L::Codec::extension_node( partial.right_iter(), partial.len(), child @@ -105,7 +105,7 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { self.child_index, &mut self.children )?; - C::branch_node( + L::Codec::branch_node( self.children.into_iter(), value_with_omission(node_data, value, self.omit_value) ) @@ -118,7 +118,7 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { self.child_index, &mut self.children )?; - C::branch_node_nibbled( + L::Codec::branch_node_nibbled( partial.right_iter(), partial.len(), self.children.into_iter(), @@ -135,20 +135,20 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { /// - children has size NIBBLE_LENGTH. fn complete_branch_children( node_data: &[u8], - child_handles: &[Option; NIBBLE_LENGTH], + child_handles: &BranchChildrenNodePlan>, child_index: usize, - children: &mut [Option>], - ) -> TrieResult<(), C::HashOut, C::Error> + children: &mut [Option>>], + ) -> TrieResult<(), TrieHash, CError> { - for i in child_index..NIBBLE_LENGTH { - children[i] = child_handles[i] + for i in child_index..L::Nibble::NIBBLE_LENGTH { + children[i] = child_handles.at(i) .as_ref() .map(|child_plan| child_plan .build(node_data) .try_into() .map_err(|hash| Box::new( - TrieError::InvalidHash(C::HashOut::default(), hash) + TrieError::InvalidHash(TrieHash::::default(), hash) )) ) .transpose()?; @@ -177,12 +177,12 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { } NodePlan::Branch { children, .. } | NodePlan::NibbledBranch { children, .. } => { assert!( - self.child_index < NIBBLE_LENGTH, + self.child_index < L::Nibble::NIBBLE_LENGTH, "extension nodes have at most NIBBLE_LENGTH children; \ set_child is called when the only child is popped from the stack; \ child_index is StackEntry<'a, C> { /// node reference will be an omitted child. If the child is inline-referenced in the trie, the /// proof node reference will also be inline. fn replacement_child_ref(encoded_child: &[u8], child: &NodeHandlePlan) - -> ChildReference + -> ChildReference> { match child { - NodeHandlePlan::Hash(_) => ChildReference::Inline(C::HashOut::default(), 0), + NodeHandlePlan::Hash(_) => ChildReference::Inline(TrieHash::::default(), 0), NodeHandlePlan::Inline(_) => { - let mut hash = C::HashOut::default(); + let mut hash = TrieHash::::default(); assert!( encoded_child.len() <= hash.as_ref().len(), "the encoding of the raw inline node is checked to be at most the hash length @@ -235,7 +235,7 @@ pub fn generate_proof<'a, T, L, I, K>(trie: &T, keys: I) // The stack of nodes through a path in the trie. Each entry is a child node of the preceding // entry. - let mut stack = >>::new(); + let mut stack = >>::new(); // The mutated trie nodes comprising the final proof. let mut proof_nodes = Vec::new(); @@ -269,7 +269,7 @@ pub fn generate_proof<'a, T, L, I, K>(trie: &T, keys: I) loop { let step = match stack.last_mut() { - Some(entry) => match_key_to_node::( + Some(entry) => match_key_to_node::( entry.node.data(), entry.node.node_plan(), &mut entry.omit_value, @@ -364,15 +364,15 @@ enum Step<'a> { /// Determine the next algorithmic step to take by matching the current key against the current top /// entry on the stack. -fn match_key_to_node<'a, C: NodeCodec>( +fn match_key_to_node<'a, L: TrieLayout>( node_data: &'a [u8], - node_plan: &NodePlan, + node_plan: &NodePlan, omit_value: &mut bool, child_index: &mut usize, - children: &mut [Option>], - key: &LeftNibbleSlice, + children: &mut [Option>>], + key: &LeftNibbleSlice, prefix_len: usize, -) -> TrieResult, C::HashOut, C::Error> +) -> TrieResult, TrieHash, CError> { Ok(match node_plan { NodePlan::Empty => Step::FoundValue(None), @@ -399,10 +399,10 @@ fn match_key_to_node<'a, C: NodeCodec>( } } NodePlan::Branch { value, children: child_handles } => - match_key_to_branch_node::( + match_key_to_branch_node::( node_data, value, - &child_handles, + child_handles, omit_value, child_index, children, @@ -411,10 +411,10 @@ fn match_key_to_node<'a, C: NodeCodec>( NibbleSlice::new(&[]), )?, NodePlan::NibbledBranch { partial: partial_plan, value, children: child_handles } => - match_key_to_branch_node::( + match_key_to_branch_node::( node_data, value, - &child_handles, + child_handles, omit_value, child_index, children, @@ -425,17 +425,17 @@ fn match_key_to_node<'a, C: NodeCodec>( }) } -fn match_key_to_branch_node<'a, 'b, C: NodeCodec>( +fn match_key_to_branch_node<'a, 'b, L: TrieLayout>( node_data: &'a [u8], value_range: &'b Option>, - child_handles: &'b [Option; NIBBLE_LENGTH], + child_handles: &'b BranchChildrenNodePlan>, omit_value: &mut bool, child_index: &mut usize, - children: &mut [Option>], - key: &'b LeftNibbleSlice<'b>, + children: &mut [Option>>], + key: &'b LeftNibbleSlice<'b, L::Nibble>, prefix_len: usize, - partial: NibbleSlice<'b>, -) -> TrieResult, C::HashOut, C::Error> + partial: NibbleSlice<'b, L::Nibble>, +) -> TrieResult, TrieHash, CError> { if !key.contains(&partial, prefix_len) { return Ok(Step::FoundValue(None)); @@ -457,20 +457,20 @@ fn match_key_to_branch_node<'a, 'b, C: NodeCodec>( as usize; assert!(*child_index <= new_index); while *child_index < new_index { - children[*child_index] = child_handles[*child_index] + children[*child_index] = child_handles.at(*child_index) .as_ref() .map(|child_plan| child_plan .build(node_data) .try_into() .map_err(|hash| Box::new( - TrieError::InvalidHash(C::HashOut::default(), hash) + TrieError::InvalidHash(TrieHash::::default(), hash) )) ) .transpose()?; *child_index += 1; } - if let Some(child_plan) = &child_handles[*child_index] { + if let Some(child_plan) = &child_handles.at(*child_index) { Ok(Step::Descend { child_prefix_len: prefix_len + partial.len() + 1, child: child_plan.build(node_data), @@ -496,11 +496,11 @@ fn value_with_omission<'a>( /// Unwind the stack until the given key is prefixed by the entry at the top of the stack. If the /// key is None, unwind the stack completely. As entries are popped from the stack, they are /// encoded into proof nodes and added to the finalized proof. -fn unwind_stack( - stack: &mut Vec>, +fn unwind_stack( + stack: &mut Vec>, proof_nodes: &mut Vec>, - maybe_key: Option<&LeftNibbleSlice>, -) -> TrieResult<(), C::HashOut, C::Error> + maybe_key: Option<&LeftNibbleSlice>, +) -> TrieResult<(), TrieHash, CError> { while let Some(entry) = stack.pop() { match maybe_key { diff --git a/trie-db/src/proof/verify.rs b/trie-db/src/proof/verify.rs index eba4cb6a..083a0f46 100644 --- a/trie-db/src/proof/verify.rs +++ b/trie-db/src/proof/verify.rs @@ -13,11 +13,11 @@ //! Verification of compact proofs for Merkle-Patricia tries. use crate::rstd::{ - convert::TryInto, iter::Peekable, marker::PhantomData, result::Result, vec, vec::Vec, + convert::TryInto, iter::Peekable, result::Result, vec, vec::Vec, }; use crate::{ - CError, ChildReference, nibble::LeftNibbleSlice, nibble_ops::NIBBLE_LENGTH, - node::{Node, NodeHandle}, NodeCodec, TrieHash, TrieLayout, + CError, ChildReference, nibble::LeftNibbleSlice, NibbleOps, + node::{Node, NodeHandle, BranchChildrenSlice}, NodeCodec, TrieHash, TrieLayout, }; use hash_db::Hasher; @@ -94,10 +94,10 @@ impl std::error::Error for } } -struct StackEntry<'a, C: NodeCodec> { +struct StackEntry<'a, L: TrieLayout> { /// The prefix is the nibble path to the node in the trie. - prefix: LeftNibbleSlice<'a>, - node: Node<'a>, + prefix: LeftNibbleSlice<'a, L::Nibble>, + node: Node<'a, L::Nibble>, is_inline: bool, /// The value associated with this trie node. value: Option<&'a [u8]>, @@ -105,20 +105,19 @@ struct StackEntry<'a, C: NodeCodec> { /// nodes, the index is in [0, NIBBLE_LENGTH] and for extension nodes, the index is in [0, 1]. child_index: usize, /// The child references to use in reconstructing the trie nodes. - children: Vec>>, - _marker: PhantomData, + children: Vec>>>, } -impl<'a, C: NodeCodec> StackEntry<'a, C> { - fn new(node_data: &'a [u8], prefix: LeftNibbleSlice<'a>, is_inline: bool) - -> Result> +impl<'a, L: TrieLayout> StackEntry<'a, L> { + fn new(node_data: &'a [u8], prefix: LeftNibbleSlice<'a, L::Nibble>, is_inline: bool) + -> Result, CError>> { - let node = C::decode(node_data) + let node = L::Codec::decode(node_data) .map_err(Error::DecodeError)?; let children_len = match node { Node::Empty | Node::Leaf(..) => 0, Node::Extension(..) => 1, - Node::Branch(..) | Node::NibbledBranch(..) => NIBBLE_LENGTH, + Node::Branch(..) | Node::NibbledBranch(..) => L::Nibble::NIBBLE_LENGTH, }; let value = match node { Node::Empty | Node::Extension(_, _) => None, @@ -132,16 +131,15 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { value, child_index: 0, children: vec![None; children_len], - _marker: PhantomData::default(), }) } /// Encode this entry to an encoded trie node with data properly reconstructed. - fn encode_node(mut self) -> Result, Error> { + fn encode_node(mut self) -> Result, Error, CError>> { self.complete_children()?; Ok(match self.node { Node::Empty => - C::empty_node().to_vec(), + L::Codec::empty_node().to_vec(), Node::Leaf(partial, _) => { let value = self.value .expect( @@ -149,24 +147,24 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { value is only ever reassigned in the ValueMatch::MatchesLeaf match \ clause, which assigns only to Some" ); - C::leaf_node(partial.right(), value) + L::Codec::leaf_node(partial.right(), value) } Node::Extension(partial, _) => { let child = self.children[0] .expect("the child must be completed since child_index is 1"); - C::extension_node( + L::Codec::extension_node( partial.right_iter(), partial.len(), child ) } Node::Branch(_, _) => - C::branch_node( + L::Codec::branch_node( self.children.iter(), self.value, ), Node::NibbledBranch(partial, _, _) => - C::branch_node_nibbled( + L::Codec::branch_node_nibbled( partial.right_iter(), partial.len(), self.children.iter(), @@ -177,17 +175,17 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { fn advance_child_index( &mut self, - child_prefix: LeftNibbleSlice<'a>, + child_prefix: LeftNibbleSlice<'a, L::Nibble>, proof_iter: &mut I, - ) -> Result> + ) -> Result, CError>> where I: Iterator>, { - match self.node { + match &mut self.node { Node::Extension(_, child) => { // Guaranteed because of sorted keys order. assert_eq!(self.child_index, 0); - Self::make_child_entry(proof_iter, child, child_prefix) + Self::make_child_entry(proof_iter, *child, child_prefix) } Node::Branch(children, _) | Node::NibbledBranch(_, children, _) => { // because this is a branch @@ -196,14 +194,14 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { .expect("it's less than prefix.len(); qed") as usize; while self.child_index < child_index { - if let Some(child) = children[self.child_index] { + if let Some(child) = children.at(self.child_index) { let child_ref = child.try_into() .map_err(Error::InvalidChildReference)?; self.children[self.child_index] = Some(child_ref); } self.child_index += 1; } - let child = children[self.child_index] + let child = children.at(self.child_index) .expect("guaranteed by advance_item"); Self::make_child_entry(proof_iter, child, child_prefix) } @@ -212,17 +210,17 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { } /// Populate the remaining references in `children` with references copied the node itself. - fn complete_children(&mut self) -> Result<(), Error> { - match self.node { + fn complete_children(&mut self) -> Result<(), Error, CError>> { + match &mut self.node { Node::Extension(_, child) if self.child_index == 0 => { - let child_ref = child.try_into() + let child_ref = child.clone().try_into() .map_err(Error::InvalidChildReference)?; self.children[self.child_index] = Some(child_ref); self.child_index += 1; } Node::Branch(children, _) | Node::NibbledBranch(_, children, _) => { - while self.child_index < NIBBLE_LENGTH { - if let Some(child) = children[self.child_index] { + while self.child_index < L::Nibble::NIBBLE_LENGTH { + if let Some(child) = children.at(self.child_index) { let child_ref = child.try_into() .map_err(Error::InvalidChildReference)?; self.children[self.child_index] = Some(child_ref); @@ -238,8 +236,8 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { fn make_child_entry( proof_iter: &mut I, child: NodeHandle<'a>, - prefix: LeftNibbleSlice<'a>, - ) -> Result> + prefix: LeftNibbleSlice<'a, L::Nibble>, + ) -> Result, CError>> where I: Iterator>, { @@ -254,7 +252,7 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { } } NodeHandle::Hash(data) => { - let mut hash = C::HashOut::default(); + let mut hash = TrieHash::::default(); if data.len() != hash.as_ref().len() { return Err(Error::InvalidChildReference(data.to_vec())); } @@ -265,7 +263,7 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { } fn advance_item(&mut self, items_iter: &mut Peekable) - -> Result, Error> + -> Result, Error, CError>> where I: Iterator)> { @@ -302,7 +300,7 @@ impl<'a, C: NodeCodec> StackEntry<'a, C> { } } -enum ValueMatch<'a> { +enum ValueMatch<'a, N> { /// The key matches a leaf node, so the value at the key must be present. MatchesLeaf, /// The key matches a branch node, so the value at the key may or may not be present. @@ -312,14 +310,16 @@ enum ValueMatch<'a> { /// The key matches a location in trie, but the value was not omitted. NotOmitted, /// The key may match below a child of this node. Parameter is the prefix of the child node. - IsChild(LeftNibbleSlice<'a>), + IsChild(LeftNibbleSlice<'a, N>), } /// Determines whether a node on the stack carries a value at the given key or whether any nodes /// in the subtrie do. The prefix of the node is given by the first `prefix_len` nibbles of `key`. -fn match_key_to_node<'a>(key: &LeftNibbleSlice<'a>, prefix_len: usize, node: &Node) - -> ValueMatch<'a> -{ +fn match_key_to_node<'a, N: NibbleOps>( + key: &LeftNibbleSlice<'a, N>, + prefix_len: usize, + node: &Node +) -> ValueMatch<'a, N> { match node { Node::Empty => ValueMatch::NotFound, Node::Leaf(partial, value) => { @@ -357,12 +357,12 @@ fn match_key_to_node<'a>(key: &LeftNibbleSlice<'a>, prefix_len: usize, node: &No /// Determines whether a branch node on the stack carries a value at the given key or whether any /// nodes in the subtrie do. The key of the branch node value is given by the first /// `prefix_plus_partial_len` nibbles of `key`. -fn match_key_to_branch_node<'a>( - key: &LeftNibbleSlice<'a>, +fn match_key_to_branch_node<'a, N: NibbleOps>( + key: &LeftNibbleSlice<'a, N>, prefix_plus_partial_len: usize, - children: &[Option; NIBBLE_LENGTH], + children: &BranchChildrenSlice, value: &Option<&[u8]>, -) -> ValueMatch<'a> +) -> ValueMatch<'a, N> { if key.len() == prefix_plus_partial_len { if value.is_none() { @@ -374,7 +374,7 @@ fn match_key_to_branch_node<'a>( let index = key.at(prefix_plus_partial_len) .expect("it's less than prefix.len(); qed") as usize; - if children[index].is_some() { + if children.at(index).is_some() { ValueMatch::IsChild(key.truncate(prefix_plus_partial_len + 1)) } else { ValueMatch::NotFound @@ -382,8 +382,8 @@ fn match_key_to_branch_node<'a>( } } -enum Step<'a> { - Descend(LeftNibbleSlice<'a>), +enum Step<'a, N> { + Descend(LeftNibbleSlice<'a, N>), UnwindStack, } @@ -423,7 +423,7 @@ pub fn verify_proof<'a, L, I, K, V>(root: &::Out, proof: &[Ve // A stack of child references to fill in omitted branch children for later trie nodes in the // proof. - let mut stack: Vec> = Vec::new(); + let mut stack: Vec> = Vec::new(); let root_node = match proof_iter.next() { Some(node) => node, @@ -482,4 +482,4 @@ pub fn verify_proof<'a, L, I, K, V>(root: &::Out, proof: &[Ve } Ok(()) -} \ No newline at end of file +} diff --git a/trie-db/src/trie_codec.rs b/trie-db/src/trie_codec.rs index 7d4783f8..26eef79d 100644 --- a/trie-db/src/trie_codec.rs +++ b/trie-db/src/trie_codec.rs @@ -25,20 +25,21 @@ //! expected to save roughly (n - 1) hashes in size where n is the number of nodes in the partial //! trie. -use hash_db::HashDB; +use hash_db::{HashDB}; +use crate::nibble::NibbleOps; use crate::{ CError, ChildReference, DBValue, NibbleVec, NodeCodec, Result, - TrieHash, TrieError, TrieDB, TrieDBNodeIterator, TrieLayout, - nibble_ops::NIBBLE_LENGTH, node::{Node, NodeHandle, NodeHandlePlan, NodePlan, OwnedNode}, + TrieHash, TrieError, TrieDB, TrieDBNodeIterator, TrieLayout, TrieChildRangeIndex, + node::{Node, NodeHandle, NodePlan, OwnedNode, BranchChildrenNodePlan}, }; use crate::rstd::{ - boxed::Box, convert::TryInto, marker::PhantomData, rc::Rc, result, vec, vec::Vec, + boxed::Box, convert::TryInto, rc::Rc, result, vec, vec::Vec, }; -struct EncoderStackEntry { +struct EncoderStackEntry { /// The prefix is the nibble path to the node in the trie. - prefix: NibbleVec, - node: Rc>, + prefix: NibbleVec, + node: Rc>, /// The next entry in the stack is a child of the preceding entry at this index. For branch /// nodes, the index is in [0, NIBBLE_LENGTH] and for extension nodes, the index is in [0, 1]. child_index: usize, @@ -47,10 +48,9 @@ struct EncoderStackEntry { /// The encoding of the subtrie nodes rooted at this entry, which is built up in /// `encode_compact`. output_index: usize, - _marker: PhantomData, } -impl EncoderStackEntry { +impl EncoderStackEntry { /// Given the prefix of the next child node, identify its index and advance `child_index` to /// that. For a given entry, this must be called sequentially only with strictly increasing /// child prefixes. Returns an error if the child prefix is not a child of this entry or if @@ -59,7 +59,7 @@ impl EncoderStackEntry { /// Preconditions: /// - self.prefix + partial must be a prefix of child_prefix. /// - if self.node is a branch, then child_prefix must be longer than self.prefix + partial. - fn advance_child_index(&mut self, child_prefix: &NibbleVec) + fn advance_child_index(&mut self, child_prefix: &NibbleVec) -> result::Result<(), &'static str> { match self.node.node_plan() { @@ -95,7 +95,7 @@ impl EncoderStackEntry { } /// Generates the encoding of the subtrie rooted at this entry. - fn encode_node(&self) -> Result, C::HashOut, C::Error> { + fn encode_node(&self) -> Result, TrieHash, CError> { let node_data = self.node.data(); Ok(match self.node.node_plan() { NodePlan::Empty | NodePlan::Leaf { .. } => node_data.to_vec(), @@ -104,22 +104,32 @@ impl EncoderStackEntry { node_data.to_vec() } else { let partial = partial.build(node_data); - let empty_child = ChildReference::Inline(C::HashOut::default(), 0); - C::extension_node(partial.right_iter(), partial.len(), empty_child) + let empty_child = ChildReference::Inline(TrieHash::::default(), 0); + L::Codec::extension_node(partial.right_iter(), partial.len(), empty_child) } } NodePlan::Branch { value, children } => { - C::branch_node( - Self::branch_children(node_data, &children, &self.omit_children)?.iter(), + let mut result: Result<(), TrieHash, CError> = Ok(()); + let result = &mut result; + let children = (0..L::Nibble::NIBBLE_LENGTH).map(|i| { + Self::branch_children(i, node_data, children, &self.omit_children, result) + }); + L::Codec::branch_node( + children, value.clone().map(|range| &node_data[range]) ) } NodePlan::NibbledBranch { partial, value, children } => { let partial = partial.build(node_data); - C::branch_node_nibbled( + let mut result: Result<(), TrieHash, CError> = Ok(()); + let result = &mut result; + let children = (0..L::Nibble::NIBBLE_LENGTH).map(|i| { + Self::branch_children(i, node_data, children, &self.omit_children, result) + }); + L::Codec::branch_node_nibbled( partial.right_iter(), partial.len(), - Self::branch_children(node_data, &children, &self.omit_children)?.iter(), + children, value.clone().map(|range| &node_data[range]) ) } @@ -132,29 +142,33 @@ impl EncoderStackEntry { /// - omit_children has size NIBBLE_LENGTH. /// - omit_children[i] is only true if child_handles[i] is Some fn branch_children( + i: usize, node_data: &[u8], - child_handles: &[Option; NIBBLE_LENGTH], + child_handles: &BranchChildrenNodePlan>, omit_children: &[bool], - ) -> Result<[Option>; NIBBLE_LENGTH], C::HashOut, C::Error> + result: &mut Result<(), TrieHash, CError>, + ) -> Option>> { - let empty_child = ChildReference::Inline(C::HashOut::default(), 0); - let mut children = [None; NIBBLE_LENGTH]; - for i in 0..NIBBLE_LENGTH { - children[i] = if omit_children[i] { - Some(empty_child) - } else if let Some(child_plan) = &child_handles[i] { - let child_ref = child_plan - .build(node_data) - .try_into() - .map_err(|hash| Box::new( - TrieError::InvalidHash(C::HashOut::default(), hash) - ))?; - Some(child_ref) - } else { - None - }; + let empty_child = ChildReference::Inline(TrieHash::::default(), 0); + if omit_children[i] { + Some(empty_child) + } else if let Some(child_plan) = &child_handles.at(i) { + let child_ref = child_plan + .build(node_data) + .try_into() + .map_err(|hash| Box::new( + TrieError::InvalidHash(TrieHash::::default(), hash) + )); + match child_ref { + Ok(child_ref) => Some(child_ref), + Err(e) => { + *result = Err(e); + None + }, + } + } else { + None } - Ok(children) } } @@ -173,7 +187,7 @@ pub fn encode_compact(db: &TrieDB) -> Result>, TrieHash, CE // The stack of nodes through a path in the trie. Each entry is a child node of the preceding // entry. - let mut stack: Vec> = Vec::new(); + let mut stack: Vec> = Vec::new(); // TrieDBNodeIterator guarantees that: // - It yields at least one node. @@ -219,7 +233,8 @@ pub fn encode_compact(db: &TrieDB) -> Result>, TrieHash, CE let children_len = match node.node_plan() { NodePlan::Empty | NodePlan::Leaf { .. } => 0, NodePlan::Extension { .. } => 1, - NodePlan::Branch { .. } | NodePlan::NibbledBranch { .. } => NIBBLE_LENGTH, + NodePlan::Branch { .. } + | NodePlan::NibbledBranch { .. } => L::Nibble::NIBBLE_LENGTH, }; stack.push(EncoderStackEntry { prefix, @@ -227,7 +242,6 @@ pub fn encode_compact(db: &TrieDB) -> Result>, TrieHash, CE child_index: 0, omit_children: vec![false; children_len], output_index: output.len(), - _marker: PhantomData::default(), }); // Insert a placeholder into output which will be replaced when this new entry is // popped from the stack. @@ -250,17 +264,16 @@ pub fn encode_compact(db: &TrieDB) -> Result>, TrieHash, CE Ok(output) } -struct DecoderStackEntry<'a, C: NodeCodec> { - node: Node<'a>, +struct DecoderStackEntry<'a, L: TrieLayout> { + node: Node<'a, L::Nibble>, /// The next entry in the stack is a child of the preceding entry at this index. For branch /// nodes, the index is in [0, NIBBLE_LENGTH] and for extension nodes, the index is in [0, 1]. child_index: usize, /// The reconstructed child references. - children: Vec>>, - _marker: PhantomData, + children: Vec>>>, } -impl<'a, C: NodeCodec> DecoderStackEntry<'a, C> { +impl<'a, L: TrieLayout> DecoderStackEntry<'a, L> { /// Advance the child index until either it exceeds the number of children or the child is /// marked as omitted. Omitted children are indicated by an empty inline reference. For each /// child that is passed over and not omitted, copy over the child reference from the node to @@ -269,16 +282,16 @@ impl<'a, C: NodeCodec> DecoderStackEntry<'a, C> { /// Returns true if the child index is past the last child, meaning the `children` references /// list is complete. If this returns true and the entry is an extension node, then /// `children[0]` is guaranteed to be Some. - fn advance_child_index(&mut self) -> Result { - match self.node { + fn advance_child_index(&mut self) -> Result, CError> { + match &mut self.node { Node::Extension(_, child) if self.child_index == 0 => { match child { NodeHandle::Inline(data) if data.is_empty() => return Ok(false), _ => { - let child_ref = child.try_into() + let child_ref = child.clone().try_into() .map_err(|hash| Box::new( - TrieError::InvalidHash(C::HashOut::default(), hash) + TrieError::InvalidHash(TrieHash::::default(), hash) ))?; self.children[self.child_index] = Some(child_ref); } @@ -286,14 +299,14 @@ impl<'a, C: NodeCodec> DecoderStackEntry<'a, C> { self.child_index += 1; } Node::Branch(children, _) | Node::NibbledBranch(_, children, _) => { - while self.child_index < NIBBLE_LENGTH { - match children[self.child_index] { + while self.child_index < L::Nibble::NIBBLE_LENGTH { + match children.at(self.child_index) { Some(NodeHandle::Inline(data)) if data.is_empty() => return Ok(false), Some(child) => { let child_ref = child.try_into() .map_err(|hash| Box::new( - TrieError::InvalidHash(C::HashOut::default(), hash) + TrieError::InvalidHash(TrieHash::::default(), hash) ))?; self.children[self.child_index] = Some(child_ref); } @@ -309,7 +322,7 @@ impl<'a, C: NodeCodec> DecoderStackEntry<'a, C> { /// Push the partial key of this entry's node (including the branch nibble) to the given /// prefix. - fn push_to_prefix(&self, prefix: &mut NibbleVec) { + fn push_to_prefix(&self, prefix: &mut NibbleVec) { match self.node { Node::Empty => {} Node::Leaf(partial, _) | Node::Extension(partial, _) => { @@ -327,7 +340,7 @@ impl<'a, C: NodeCodec> DecoderStackEntry<'a, C> { /// Pop the partial key of this entry's node (including the branch nibble) from the given /// prefix. - fn pop_from_prefix(&self, prefix: &mut NibbleVec) { + fn pop_from_prefix(&self, prefix: &mut NibbleVec) { match self.node { Node::Empty => {} Node::Leaf(partial, _) | Node::Extension(partial, _) => { @@ -350,20 +363,20 @@ impl<'a, C: NodeCodec> DecoderStackEntry<'a, C> { fn encode_node(self) -> Vec { match self.node { Node::Empty => - C::empty_node().to_vec(), + L::Codec::empty_node().to_vec(), Node::Leaf(partial, value) => - C::leaf_node(partial.right(), value), + L::Codec::leaf_node(partial.right(), value), Node::Extension(partial, _) => - C::extension_node( + L::Codec::extension_node( partial.right_iter(), partial.len(), self.children[0] .expect("required by method precondition; qed"), ), Node::Branch(_, value) => - C::branch_node(self.children.into_iter(), value), + L::Codec::branch_node(self.children.into_iter(), value), Node::NibbledBranch(partial, _, value) => - C::branch_node_nibbled( + L::Codec::branch_node_nibbled( partial.right_iter(), partial.len(), self.children.iter(), @@ -393,7 +406,7 @@ pub fn decode_compact(db: &mut DB, encoded: &[Vec]) { // The stack of nodes through a path in the trie. Each entry is a child node of the preceding // entry. - let mut stack: Vec> = Vec::new(); + let mut stack: Vec> = Vec::new(); // The prefix of the next item to be read from the slice of encoded items. let mut prefix = NibbleVec::new(); @@ -405,13 +418,12 @@ pub fn decode_compact(db: &mut DB, encoded: &[Vec]) let children_len = match node { Node::Empty | Node::Leaf(..) => 0, Node::Extension(..) => 1, - Node::Branch(..) | Node::NibbledBranch(..) => NIBBLE_LENGTH, + Node::Branch(..) | Node::NibbledBranch(..) => L::Nibble::NIBBLE_LENGTH, }; let mut last_entry = DecoderStackEntry { node, child_index: 0, children: vec![None; children_len], - _marker: PhantomData::default(), }; loop { diff --git a/trie-db/src/triedb.rs b/trie-db/src/triedb.rs index 71d27f32..196e7c02 100644 --- a/trie-db/src/triedb.rs +++ b/trie-db/src/triedb.rs @@ -20,8 +20,9 @@ use super::node::{NodeHandle, Node, OwnedNode, decode_hash}; use super::lookup::Lookup; use super::{Result, DBValue, Trie, TrieItem, TrieError, TrieIterator, Query, TrieLayout, CError, TrieHash}; -use super::nibble::NibbleVec; +#[cfg(feature = "std")] +use super::nibble::NibbleVec; #[cfg(feature = "std")] use crate::rstd::{fmt, vec::Vec}; @@ -90,7 +91,7 @@ where parent_hash: TrieHash, node_handle: NodeHandle, partial_key: Prefix, - ) -> Result<(OwnedNode, Option>), TrieHash, CError> { + ) -> Result<(OwnedNode, Option>), TrieHash, CError> { let (node_hash, node_data) = match node_handle { NodeHandle::Hash(data) => { let node_hash = decode_hash::(data) @@ -153,7 +154,7 @@ where { trie: &'db TrieDB<'db, L>, node_key: NodeHandle<'a>, - partial_key: NibbleVec, + partial_key: NibbleVec, index: Option, } @@ -193,7 +194,7 @@ where .finish() }, Node::Branch(ref nodes, ref value) => { - let nodes: Vec> = nodes.into_iter() + let nodes: Vec> = nodes.iter() .enumerate() .filter_map(|(i, n)| n.map(|n| (i, n))) .map(|(i, n)| TrieAwareDebugNode { @@ -314,9 +315,9 @@ impl<'a, L: TrieLayout> Iterator for TrieDBIterator<'a, L> { if let Some(value) = maybe_value { let (key_slice, maybe_extra_nibble) = prefix.as_prefix(); let key = key_slice.to_vec(); - if let Some(extra_nibble) = maybe_extra_nibble { + if maybe_extra_nibble.0 > 0 { return Some(Err(Box::new( - TrieError::ValueAtIncompleteKey(key, extra_nibble) + TrieError::ValueAtIncompleteKey(key, maybe_extra_nibble) ))); } return Some(Ok((key, value.to_vec()))); diff --git a/trie-db/src/triedbmut.rs b/trie-db/src/triedbmut.rs index 51351555..34820c62 100644 --- a/trie-db/src/triedbmut.rs +++ b/trie-db/src/triedbmut.rs @@ -18,12 +18,12 @@ use super::{DBValue, node::NodeKey}; use super::{Result, TrieError, TrieMut, TrieLayout, TrieHash, CError}; use super::lookup::Lookup; use super::node::{NodeHandle as EncodedNodeHandle, Node as EncodedNode, decode_hash}; - use hash_db::{HashDB, Hasher, Prefix, EMPTY_PREFIX}; +use crate::ChildIndex; use hashbrown::HashSet; use crate::node_codec::NodeCodec; -use crate::nibble::{NibbleVec, NibbleSlice, nibble_ops, BackingByteVec}; +use crate::nibble::{NibbleVec, NibbleSlice, NibbleOps, BackingByteVec}; use crate::rstd::{ boxed::Box, convert::TryFrom, hash::Hash, mem, ops::Index, result, vec::Vec, VecDeque, }; @@ -34,15 +34,16 @@ use log::trace; #[cfg(feature = "std")] use crate::rstd::fmt::{self, Debug}; - // For lookups into the Node storage buffer. // This is deliberately non-copyable. #[cfg_attr(feature = "std", derive(Debug))] -struct StorageHandle(usize); +#[derive(PartialEq, Eq, Clone)] +pub struct StorageHandle(usize); // Handles to nodes in the trie. #[cfg_attr(feature = "std", derive(Debug))] -enum NodeHandle { +#[derive(PartialEq, Eq, Clone)] +pub enum NodeHandle { /// Loaded into memory. InMemory(StorageHandle), /// Either a hash or an inline node @@ -55,19 +56,17 @@ impl From for NodeHandle { } } -fn empty_children() -> Box<[Option>; 16]> { - Box::new([ - None, None, None, None, None, None, None, None, - None, None, None, None, None, None, None, None, - ]) +fn empty_children() -> Box { + Box::new(Default::default()) } /// Type alias to indicate the nible covers a full key, /// therefore its left side is a full prefix. -type NibbleFullKey<'key> = NibbleSlice<'key>; +type NibbleFullKey<'key, N> = NibbleSlice<'key, N>; /// Node types in the Trie. -enum Node { +/// TODO EMCH consider passing TrieLayout as parameter ?? +enum Node { /// Empty node. Empty, /// A leaf node contains the end of a key and a value. @@ -79,10 +78,10 @@ enum Node { /// a flag indicating it is an extension. /// The child node is always a branch. Extension(NodeKey, NodeHandle), - /// A branch has up to 16 children and an optional value. - Branch(Box<[Option>; 16]>, Option), + /// A branch has up to number of children per nibble and an optional value. + Branch(Box, Option), /// Branch node with support for a nibble (to avoid extension node). - NibbledBranch(NodeKey, Box<[Option>; 16]>, Option), + NibbledBranch(NodeKey, Box, Option), } #[cfg(feature = "std")] @@ -99,7 +98,7 @@ impl<'a> Debug for ToHex<'a> { } #[cfg(feature = "std")] -impl Debug for Node { +impl Debug for Node { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match *self { Self::Empty => write!(fmt, "Empty"), @@ -115,17 +114,18 @@ impl Debug for Node { } } -impl Node +impl Node where O: AsRef<[u8]> + AsMut<[u8]> + Default + crate::MaybeDebug - + PartialEq + Eq + Hash + Send + Sync + Clone + Copy + + PartialEq + Eq + Hash + Send + Sync + Clone + Copy, + OC: ChildIndex>, { // load an inline node into memory or get the hash to do the lookup later. fn inline_or_hash( parent_hash: H::Out, child: EncodedNodeHandle, db: &dyn HashDB, - storage: &mut NodeStorage + storage: &mut NodeStorage ) -> Result, H::Out, C::Error> where C: NodeCodec, @@ -150,7 +150,7 @@ where node_hash: H::Out, data: &'a[u8], db: &dyn HashDB, - storage: &'b mut NodeStorage, + storage: &'b mut NodeStorage, ) -> Result where C: NodeCodec, H: Hasher, @@ -167,34 +167,44 @@ where ) }, EncodedNode::Branch(encoded_children, val) => { - let mut child = |i:usize| match encoded_children[i] { - Some(child) => Self::inline_or_hash::(node_hash, child, db, storage) - .map(Some), - None => Ok(None), - }; + let mut error: Result<(), H::Out, C::Error> = Ok(()); + let iter = (0..OC::NIBBLE_LENGTH).map(|i| { + if let Some(child) = encoded_children.at(i) { + match Self::inline_or_hash::(node_hash, child, db, storage) { + Ok(v) => Some(v), + Err(e) => { + error = Err(e); + None + }, + } + } else { + None + } + }); - let children = Box::new([ - child(0)?, child(1)?, child(2)?, child(3)?, - child(4)?, child(5)?, child(6)?, child(7)?, - child(8)?, child(9)?, child(10)?, child(11)?, - child(12)?, child(13)?, child(14)?, child(15)?, - ]); + let children = Box::new(OC::from_iter(iter)); + error?; Node::Branch(children, val.map(|v| v.to_vec())) }, EncodedNode::NibbledBranch(k, encoded_children, val) => { - let mut child = |i:usize| match encoded_children[i] { - Some(child) => Self::inline_or_hash::(node_hash, child, db, storage) - .map(Some), - None => Ok(None), - }; + let mut error: Result<(), H::Out, C::Error> = Ok(()); + let iter = (0..OC::NIBBLE_LENGTH).map(|i| { + if let Some(child) = encoded_children.at(i) { + match Self::inline_or_hash::(node_hash, child, db, storage) { + Ok(v) => Some(v), + Err(e) => { + error = Err(e); + None + }, + } + } else { + None + } + }); - let children = Box::new([ - child(0)?, child(1)?, child(2)?, child(3)?, - child(4)?, child(5)?, child(6)?, child(7)?, - child(8)?, child(9)?, child(10)?, child(11)?, - child(12)?, child(13)?, child(14)?, child(15)?, - ]); + let children = Box::new(OC::from_iter(iter)); + error?; Node::NibbledBranch(k.into(), children, val.map(|v| v.to_vec())) }, @@ -203,16 +213,17 @@ where } // TODO: parallelize - fn into_encoded(self, mut child_cb: F) -> Vec + fn into_encoded(self, mut child_cb: F) -> Vec where - C: NodeCodec, - F: FnMut(NodeHandle, Option<&NibbleSlice>, Option) -> ChildReference, + N: NibbleOps, + C: NodeCodec, + F: FnMut(NodeHandle, Option<&NibbleSlice>, Option) -> ChildReference, H: Hasher, { match self { Node::Empty => C::empty_node().to_vec(), Node::Leaf(partial, value) => { - let pr = NibbleSlice::new_offset(&partial.1[..], partial.0); + let pr = NibbleSlice::::new_offset(&partial.1[..], partial.0); C::leaf_node(pr.right(), &value) }, Node::Extension(partial, child) => { @@ -238,7 +249,7 @@ where ) }, Node::NibbledBranch(partial, mut children, value) => { - let pr = NibbleSlice::new_offset(&partial.1[..], partial.0); + let pr = NibbleSlice::::new_offset(&partial.1[..], partial.0); let it = pr.right_iter(); C::branch_node_nibbled( it, @@ -262,25 +273,25 @@ where } // post-inspect action. -enum Action { +enum Action { // Replace a node with a new one. - Replace(Node), + Replace(Node), // Restore the original node. This trusts that the node is actually the original. - Restore(Node), + Restore(Node), // if it is a new node, just clears the storage. Delete, } // post-insert action. Same as action without delete -enum InsertAction { +enum InsertAction { // Replace a node with a new one. - Replace(Node), + Replace(Node), // Restore the original node. - Restore(Node), + Restore(Node), } -impl InsertAction { - fn into_action(self) -> Action { +impl InsertAction { + fn into_action(self) -> Action { match self { InsertAction::Replace(n) => Action::Replace(n), InsertAction::Restore(n) => Action::Restore(n), @@ -288,7 +299,7 @@ impl InsertAction { } // unwrap the node, disregarding replace or restore state. - fn unwrap_node(self) -> Node { + fn unwrap_node(self) -> Node { match self { InsertAction::Replace(n) | InsertAction::Restore(n) => n, } @@ -296,15 +307,15 @@ impl InsertAction { } // What kind of node is stored here. -enum Stored { +enum Stored { // A new node. - New(Node), + New(Node), // A cached node, loaded from the DB. - Cached(Node, H), + Cached(Node, H), } /// Used to build a collection of child nodes from a collection of `NodeHandle`s -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Eq, PartialEq)] #[cfg_attr(feature = "std", derive(Debug))] pub enum ChildReference { // `HO` is e.g. `H256`, i.e. the output of a `Hasher` Hash(HO), @@ -339,12 +350,12 @@ impl<'a, HO> TryFrom> for ChildReference } /// Compact and cache-friendly storage for Trie nodes. -struct NodeStorage { - nodes: Vec>, +struct NodeStorage { + nodes: Vec>, free_indices: VecDeque, } -impl NodeStorage { +impl NodeStorage { /// Create a new storage. fn empty() -> Self { NodeStorage { @@ -354,7 +365,7 @@ impl NodeStorage { } /// Allocate a new node in the storage. - fn alloc(&mut self, stored: Stored) -> StorageHandle { + fn alloc(&mut self, stored: Stored) -> StorageHandle { if let Some(idx) = self.free_indices.pop_front() { self.nodes[idx] = stored; StorageHandle(idx) @@ -365,7 +376,7 @@ impl NodeStorage { } /// Remove a node from the storage, consuming the handle and returning the node. - fn destroy(&mut self, handle: StorageHandle) -> Stored { + fn destroy(&mut self, handle: StorageHandle) -> Stored { let idx = handle.0; self.free_indices.push_back(idx); @@ -373,10 +384,10 @@ impl NodeStorage { } } -impl<'a, H> Index<&'a StorageHandle> for NodeStorage { - type Output = Node; +impl<'a, H, C> Index<&'a StorageHandle> for NodeStorage { + type Output = Node; - fn index(&self, handle: &'a StorageHandle) -> &Node { + fn index(&self, handle: &'a StorageHandle) -> &Node { match self.nodes[handle.0] { Stored::New(ref node) => node, Stored::Cached(ref node, _) => node, @@ -415,11 +426,11 @@ pub struct TrieDBMut<'a, L> where L: TrieLayout, { - storage: NodeStorage>, + storage: NodeStorage, L::NodeIndex>, db: &'a mut dyn HashDB, root: &'a mut TrieHash, root_handle: NodeHandle>, - death_row: HashSet<(TrieHash, (BackingByteVec, Option))>, + death_row: HashSet<(TrieHash, (BackingByteVec, (u8, u8)))>, /// The number of hash operations this trie has performed. /// Note that none are performed until changes are committed. hash_count: usize, @@ -495,16 +506,16 @@ where // If restored or replaced, returns the new node along with a flag of whether it was changed. fn inspect( &mut self, - stored: Stored>, - key: &mut NibbleFullKey, + stored: Stored, L::NodeIndex>, + key: &mut NibbleFullKey, inspector: F, - ) -> Result>, bool)>, TrieHash, CError> + ) -> Result, L::NodeIndex>, bool)>, TrieHash, CError> where F: FnOnce( &mut Self, - Node>, - &mut NibbleFullKey, - ) -> Result>, TrieHash, CError>, + Node, L::NodeIndex>, + &mut NibbleFullKey, + ) -> Result, L::NodeIndex>, TrieHash, CError>, { let current_key = key.clone(); Ok(match stored { @@ -530,7 +541,7 @@ where // Walk the trie, attempting to find the key's node. fn lookup<'x, 'key>( &'x self, - mut partial: NibbleSlice<'key>, + mut partial: NibbleSlice<'key, L::Nibble>, handle: &NodeHandle>, ) -> Result, TrieHash, CError> where 'x: 'key @@ -565,7 +576,7 @@ where return Ok(value.as_ref().map(|v| v.to_vec())); } else { let idx = partial.at(0); - match children[idx as usize].as_ref() { + match children.at(idx as usize) { Some(child) => (1, child), None => return Ok(None), } @@ -577,7 +588,7 @@ where return Ok(value.as_ref().map(|v| v.to_vec())); } else if partial.starts_with(&slice) { let idx = partial.at(0); - match children[idx as usize].as_ref() { + match children.at(idx as usize) { Some(child) => (1 + slice.len(), child), None => return Ok(None), } @@ -597,7 +608,7 @@ where fn insert_at( &mut self, handle: NodeHandle>, - key: &mut NibbleFullKey, + key: &mut NibbleFullKey, value: DBValue, old_val: &mut Option, ) -> Result<(StorageHandle, bool), TrieHash, CError> { @@ -617,11 +628,11 @@ where /// The insertion inspector. fn insert_inspector( &mut self, - node: Node>, - key: &mut NibbleFullKey, + node: Node, L::NodeIndex>, + key: &mut NibbleFullKey, value: DBValue, old_val: &mut Option, - ) -> Result>, TrieHash, CError> { + ) -> Result, L::NodeIndex>, TrieHash, CError> { let partial = key.clone(); #[cfg(feature = "std")] @@ -650,10 +661,10 @@ where } else { let idx = partial.at(0) as usize; key.advance(1); - if let Some(child) = children[idx].take() { + if let Some(child) = children.take(idx) { // Original had something there. recurse down into it. let (new_child, changed) = self.insert_at(child, key, value, old_val)?; - children[idx] = Some(new_child.into()); + *children.at_mut(idx) = Some(new_child.into()); if !changed { // The new node we composed didn't change. // It means our branch is untouched too. @@ -664,7 +675,7 @@ where let leaf = self.storage.alloc( Stored::New(Node::Leaf(key.to_stored(), value)) ); - children[idx] = Some(leaf.into()); + *children.at_mut(idx) = Some(leaf.into()); } InsertAction::Replace(Node::Branch(children, stored_value)) @@ -704,11 +715,11 @@ where let nbranch_partial = existing_key.mid(common + 1).to_stored(); let low = Node::NibbledBranch(nbranch_partial, children, stored_value); let ix = existing_key.at(common); - let mut children = empty_children(); + let mut children = empty_children::(); let alloc_storage = self.storage.alloc(Stored::New(low)); - children[ix as usize] = Some(alloc_storage.into()); + *children.at_mut(ix as usize) = Some(alloc_storage.into()); if partial.len() - common == 0 { InsertAction::Replace(Node::NibbledBranch( @@ -722,7 +733,7 @@ where let stored_leaf = Node::Leaf(partial.mid(common + 1).to_stored(), value); let leaf = self.storage.alloc(Stored::New(stored_leaf)); - children[ix as usize] = Some(leaf.into()); + *children.at_mut(ix as usize) = Some(leaf.into()); InsertAction::Replace(Node::NibbledBranch( existing_key.to_stored_range(common), children, @@ -738,10 +749,10 @@ where trace!(target: "trie", "branch: ROUTE,AUGMENT"); let idx = partial.at(common) as usize; key.advance(common + 1); - if let Some(child) = children[idx].take() { + if let Some(child) = children.take(idx) { // Original had something there. recurse down into it. let (new_child, changed) = self.insert_at(child, key, value, old_val)?; - children[idx] = Some(new_child.into()); + *children.at_mut(idx) = Some(new_child.into()); if !changed { // The new node we composed didn't change. // It means our branch is untouched too. @@ -757,7 +768,7 @@ where let leaf = self.storage.alloc( Stored::New(Node::Leaf(key.to_stored(), value)), ); - children[idx] = Some(leaf.into()); + *children.at_mut(idx) = Some(leaf.into()); } InsertAction::Replace(Node::NibbledBranch( existing_key.to_stored(), @@ -793,7 +804,7 @@ where ); // one of us isn't empty: transmute to branch here - let mut children = empty_children(); + let mut children = empty_children::(); let branch = if L::USE_EXTENSION && existing_key.is_empty() { // always replace since branch isn't leaf. Node::Branch(children, Some(stored_value)) @@ -803,7 +814,7 @@ where existing_key.mid(common + 1).to_stored(), stored_value, ); - children[idx] = Some(self.storage.alloc(Stored::New(new_leaf)).into()); + *children.at_mut(idx) = Some(self.storage.alloc(Stored::New(new_leaf)).into()); if L::USE_EXTENSION { Node::Branch(children, None) @@ -896,8 +907,8 @@ where assert!(!existing_key.is_empty()); let idx = existing_key.at(0) as usize; - let mut children = empty_children(); - children[idx] = if existing_key.len() == 1 { + let mut children = empty_children::(); + *children.at_mut(idx) = if existing_key.len() == 1 { // direct extension, just replace. Some(child_branch) } else { @@ -964,7 +975,7 @@ where fn remove_at( &mut self, handle: NodeHandle>, - key: &mut NibbleFullKey, + key: &mut NibbleFullKey, old_val: &mut Option, ) -> Result, TrieHash, CError> { let stored = match handle { @@ -987,10 +998,10 @@ where /// The removal inspector. fn remove_inspector( &mut self, - node: Node>, - key: &mut NibbleFullKey, + node: Node, L::NodeIndex>, + key: &mut NibbleFullKey, old_val: &mut Option, - ) -> Result>, TrieHash, CError> { + ) -> Result, L::NodeIndex>, TrieHash, CError> { let partial = key.clone(); Ok(match (node, partial.is_empty()) { (Node::Empty, _) => Action::Delete, @@ -1009,7 +1020,7 @@ where }, (Node::Branch(mut children, value), false) => { let idx = partial.at(0) as usize; - if let Some(child) = children[idx].take() { + if let Some(child) = children.take(idx) { #[cfg(feature = "std")] trace!( target: "trie", @@ -1020,7 +1031,7 @@ where key.advance(1); match self.remove_at(child, key, old_val)? { Some((new, changed)) => { - children[idx] = Some(new.into()); + *children.at_mut(idx) = Some(new.into()); let branch = Node::Branch(children, value); match changed { // child was changed, so we were too. @@ -1065,7 +1076,7 @@ where // common == existing_length && common < partial.len() : check children let idx = partial.at(common) as usize; - if let Some(child) = children[idx].take() { + if let Some(child) = children.take(idx) { #[cfg(feature = "std")] trace!( target: "trie", @@ -1076,7 +1087,7 @@ where key.advance(common + 1); match self.remove_at(child, key, old_val)? { Some((new, changed)) => { - children[idx] = Some(new.into()); + *children.at_mut(idx) = Some(new.into()); let branch = Node::NibbledBranch(encoded, children, value); match changed { // child was changed, so we were too. @@ -1117,7 +1128,7 @@ where target: "trie", "restoring leaf wrong partial, partial={:?}, existing={:?}", partial, - NibbleSlice::from_stored(&encoded), + NibbleSlice::::from_stored(&encoded), ); Action::Restore(Node::Leaf(encoded, value)) } @@ -1168,9 +1179,9 @@ where /// - Extension node followed by anything other than a Branch node. fn fix( &mut self, - node: Node>, - key: NibbleSlice, - ) -> Result>, TrieHash, CError> { + node: Node, L::NodeIndex>, + key: NibbleSlice, + ) -> Result, L::NodeIndex>, TrieHash, CError> { match node { Node::Branch(mut children, value) => { // if only a single value, transmute to leaf/extension and feed through fixed. @@ -1181,8 +1192,8 @@ where Many, }; let mut used_index = UsedIndex::None; - for i in 0..16 { - match (children[i].is_none(), &used_index) { + for i in 0..L::Nibble::NIBBLE_LENGTH { + match (children.at(i).is_none(), &used_index) { (false, &UsedIndex::None) => used_index = UsedIndex::One(i as u8), (false, &UsedIndex::One(_)) => { used_index = UsedIndex::Many; @@ -1198,8 +1209,8 @@ where (UsedIndex::One(a), None) => { // only one onward node. make an extension. - let new_partial = NibbleSlice::new_offset(&[a], 1).to_stored(); - let child = children[a as usize].take() + let new_partial = NibbleSlice::::new_offset(&[a], 1).to_stored(); + let child = children.take(a as usize) .expect("used_index only set if occupied; qed"); let new_node = Node::Extension(new_partial, child); self.fix(new_node, key) @@ -1208,7 +1219,7 @@ where // make a leaf. #[cfg(feature = "std")] trace!(target: "trie", "fixing: branch -> leaf"); - Ok(Node::Leaf(NibbleSlice::new(&[]).to_stored(), value)) + Ok(Node::Leaf(NibbleSlice::::new(&[]).to_stored(), value)) } (_, value) => { // all is well. @@ -1227,8 +1238,8 @@ where Many, }; let mut used_index = UsedIndex::None; - for i in 0..16 { - match (children[i].is_none(), &used_index) { + for i in 0..L::Nibble::NIBBLE_LENGTH { + match (children.at(i).is_none(), &used_index) { (false, &UsedIndex::None) => used_index = UsedIndex::One(i as u8), (false, &UsedIndex::One(_)) => { used_index = UsedIndex::Many; @@ -1243,17 +1254,18 @@ where panic!("Branch with no subvalues. Something went wrong."), (UsedIndex::One(a), None) => { // only one onward node. use child instead - let child = children[a as usize].take() + let child = children.take(a as usize) .expect("used_index only set if occupied; qed"); let mut key2 = key.clone(); - key2.advance((enc_nibble.1.len() * nibble_ops::NIBBLE_PER_BYTE) - enc_nibble.0); + key2.advance((enc_nibble.1.len() * L::Nibble::NIBBLE_PER_BYTE) - enc_nibble.0); let (start, alloc_start, prefix_end) = match key2.left() { - (start, None) => (start, None, Some(nibble_ops::push_at_left(0, a, 0))), - (start, Some(v)) => { + (start, (0, _v)) => (start, None, (1, L::Nibble::push_at_left(0, a, 0))), + (start, (nb, v)) if nb == L::Nibble::LAST_NIBBLE_INDEX => { let mut so: BackingByteVec = start.into(); - so.push(nibble_ops::pad_left(v) | a); - (start, Some(so), None) + so.push(L::Nibble::pad_left(L::Nibble::LAST_NIBBLE_INDEX, v) | a); + (start, Some(so), (0, 0)) }, + (start, (nb, v)) => (start, None, (nb + 1, L::Nibble::push_at_left(nb, a, v))), }; let child_prefix = (alloc_start.as_ref().map(|start| &start[..]).unwrap_or(start), prefix_end); let stored = match child { @@ -1276,11 +1288,11 @@ where match child_node { Node::Leaf(sub_partial, value) => { let mut enc_nibble = enc_nibble; - combine_key( + combine_key::( &mut enc_nibble, - (nibble_ops::NIBBLE_PER_BYTE - 1, &[a][..]), + (L::Nibble::NIBBLE_PER_BYTE - 1, &[a][..]), ); - combine_key( + combine_key::( &mut enc_nibble, (sub_partial.0, &sub_partial.1[..]), ); @@ -1288,11 +1300,11 @@ where }, Node::NibbledBranch(sub_partial, ch_children, ch_value) => { let mut enc_nibble = enc_nibble; - combine_key( + combine_key::( &mut enc_nibble, - (nibble_ops::NIBBLE_PER_BYTE - 1, &[a][..]), + (L::Nibble::NIBBLE_PER_BYTE - 1, &[a][..]), ); - combine_key( + combine_key::( &mut enc_nibble, (sub_partial.0, &sub_partial.1[..]), ); @@ -1320,15 +1332,15 @@ where // recursively, so there might be some prefix from branch. let last = partial.1[partial.1.len() - 1] & (255 >> 4); let mut key2 = key.clone(); - key2.advance((partial.1.len() * nibble_ops::NIBBLE_PER_BYTE) - partial.0 - 1); + key2.advance((partial.1.len() * L::Nibble::NIBBLE_PER_BYTE) - partial.0 - 1); let (start, alloc_start, prefix_end) = match key2.left() { - (start, None) => (start, None, Some(nibble_ops::push_at_left(0, last, 0))), - (start, Some(v)) => { + (start, (0, _v)) => (start, None, (1, L::Nibble::push_at_left(0, last, 0))), + (start, (nb, v)) if nb == L::Nibble::LAST_NIBBLE_INDEX => { let mut so: BackingByteVec = start.into(); - // Complete last byte with `last`. - so.push(nibble_ops::pad_left(v) | last); - (start, Some(so), None) + so.push(L::Nibble::pad_left(L::Nibble::LAST_NIBBLE_INDEX, v) | last); + (start, Some(so), (0, 0)) }, + (start, (nb, v)) => (start, None, (nb + 1, L::Nibble::push_at_left(nb, last, v))), }; let child_prefix = (alloc_start.as_ref().map(|start| &start[..]).unwrap_or(start), prefix_end); @@ -1356,7 +1368,7 @@ where } // subpartial let mut partial = partial; - combine_key(&mut partial, (sub_partial.0, &sub_partial.1[..])); + combine_key::(&mut partial, (sub_partial.0, &sub_partial.1[..])); #[cfg(feature = "std")] trace!( target: "trie", @@ -1373,7 +1385,7 @@ where } // subpartial oly let mut partial = partial; - combine_key(&mut partial, (sub_partial.0, &sub_partial.1[..])); + combine_key::(&mut partial, (sub_partial.0, &sub_partial.1[..])); #[cfg(feature = "std")] trace!( target: "trie", @@ -1422,7 +1434,7 @@ where match self.storage.destroy(handle) { Stored::New(node) => { let mut k = NibbleVec::new(); - let encoded_root = node.into_encoded::<_, L::Codec, L::Hash>( + let encoded_root = node.into_encoded::<_, L::Codec, L::Hash, L::Nibble>( |child, o_slice, o_index| { let mov = k.append_optional_slice_and_nibble(o_slice, o_index); let cr = self.commit_child(child, &mut k); @@ -1455,7 +1467,7 @@ where fn commit_child( &mut self, handle: NodeHandle>, - prefix: &mut NibbleVec, + prefix: &mut NibbleVec, ) -> ChildReference> { match handle { NodeHandle::Hash(hash) => ChildReference::Hash(hash), @@ -1466,7 +1478,7 @@ where let encoded = { let commit_child = | node_handle, - o_slice: Option<&NibbleSlice>, + o_slice: Option<&NibbleSlice>, o_index: Option | { let mov = prefix.append_optional_slice_and_nibble(o_slice, o_index); @@ -1474,7 +1486,7 @@ where prefix.drop_lasts(mov); cr }; - node.into_encoded::<_, L::Codec, L::Hash>(commit_child) + node.into_encoded::<_, L::Codec, L::Hash, L::Nibble>(commit_child) }; if encoded.len() >= L::Hash::LENGTH { let hash = self.db.insert(prefix.as_prefix(), &encoded[..]); @@ -1592,14 +1604,14 @@ where } /// combine two NodeKeys -fn combine_key(start: &mut NodeKey, end: (usize, &[u8])) { - debug_assert!(start.0 < nibble_ops::NIBBLE_PER_BYTE); - debug_assert!(end.0 < nibble_ops::NIBBLE_PER_BYTE); - let final_offset = (start.0 + end.0) % nibble_ops::NIBBLE_PER_BYTE; - let _shifted = nibble_ops::shift_key(start, final_offset); +fn combine_key(start: &mut NodeKey, end: (usize, &[u8])) { + debug_assert!(start.0 < N::NIBBLE_PER_BYTE); + debug_assert!(end.0 < N::NIBBLE_PER_BYTE); + let final_offset = (start.0 + end.0) % N::NIBBLE_PER_BYTE; + let _shifted = N::shift_key(start, final_offset); let st = if end.0 > 0 { let sl = start.1.len(); - start.1[sl - 1] |= nibble_ops::pad_right(end.1[0]); + start.1[sl - 1] |= N::pad_right((N::NIBBLE_PER_BYTE - end.0) as u8, end.1[0]); 1 } else { 0 @@ -1617,8 +1629,11 @@ mod tests { use hash_db::{Hasher, HashDB}; use keccak_hasher::KeccakHasher; use reference_trie::{RefTrieDBMutNoExt, RefTrieDBMut, TrieMut, NodeCodec, - ReferenceNodeCodec, reference_trie_root, reference_trie_root_no_extension}; + ReferenceNodeCodec, reference_trie_root, reference_trie_root_no_extension, + Radix16, ChildIndex16, BitMap16, + }; use crate::nibble::BackingByteVec; + use super::NodeHandle; fn populate_trie<'db>( db: &'db mut dyn HashDB, @@ -1663,7 +1678,7 @@ mod tests { } fn reference_hashed_null_node() -> ::Out { - as NodeCodec>::hashed_null_node() + as NodeCodec>::hashed_null_node() } #[test] @@ -2080,7 +2095,7 @@ mod tests { let b: &[u8] = [0x56, 0x78][..].into(); let test_comb = |a: (_, &BackingByteVec), b, c| { let mut a = (a.0, a.1.clone()); - super::combine_key(&mut a, b); + super::combine_key::(&mut a, b); assert_eq!((a.0, &a.1[..]), c); }; test_comb((0, &a), (0, &b), (0, &[0x12, 0x34, 0x56, 0x78][..])); @@ -2092,8 +2107,7 @@ mod tests { #[test] fn nice_debug_for_node() { use super::Node; - let e: Node = Node::Leaf((1, vec![1, 2, 3].into()), vec![4, 5, 6]); + let e: Node>> = Node::Leaf((1, vec![1, 2, 3].into()), vec![4, 5, 6]); assert_eq!(format!("{:?}", e), "Leaf((1, 010203), 040506)"); } - }