From 5c4cdcb7212eb9dd65e0f7490b901b2acfed8176 Mon Sep 17 00:00:00 2001 From: mriise Date: Mon, 24 Aug 2020 01:52:02 -0700 Subject: [PATCH 01/18] Revert "(cargo-release) version 0.11.3" This reverts commit eebdb2c9a815bf069ed9f9f8517194aa0bb9451c. --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index c4cc3ca3..10821803 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,7 +3,7 @@ name = "multihash" description = "Implementation of the multihash format" repository = "https://github.com/multiformats/rust-multihash" keywords = ["multihash", "ipfs"] -version = "0.11.3" +version = "0.11.2" authors = ["dignifiedquire "] license = "MIT" readme = "README.md" From 18f51a26985123ad7146dea2c37f89d894199209 Mon Sep 17 00:00:00 2001 From: mriise Date: Sat, 27 Mar 2021 22:36:27 -0700 Subject: [PATCH 02/18] lazy move to const generics --- Cargo.toml | 3 +- src/arb.rs | 4 +- src/hasher.rs | 48 ++++-------- src/hasher_impl.rs | 171 ++++++++++++++++++++++-------------------- src/lib.rs | 3 +- src/multihash.rs | 41 +++++----- src/multihash_impl.rs | 6 +- 7 files changed, 129 insertions(+), 147 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index d8d2fe3b..3f0b78a9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,7 +21,7 @@ derive = ["multihash-derive"] arb = ["quickcheck", "rand"] secure-hashes = ["blake2b", "blake2s", "blake3", "sha2", "sha3"] scale-codec = ["parity-scale-codec"] -serde-codec = ["serde", "generic-array/serde"] +serde-codec = ["serde"] blake2b = ["blake2b_simd"] blake2s = ["blake2s_simd"] @@ -32,7 +32,6 @@ sha3 = ["digest", "sha-3"] strobe = ["strobe-rs"] [dependencies] -generic-array = "0.14.4" parity-scale-codec = { version = "1.3.5", optional = true, default-features = false, features = ["derive"] } quickcheck = { version = "0.9.2", optional = true } rand = { version = "0.7.3", optional = true } diff --git a/src/arb.rs b/src/arb.rs index 8196bb6b..b17d4953 100644 --- a/src/arb.rs +++ b/src/arb.rs @@ -4,10 +4,10 @@ use rand::{ Rng, }; -use crate::{MultihashGeneric, U64}; +use crate::{MultihashGeneric}; /// Generates a random valid multihash. -impl Arbitrary for MultihashGeneric { +impl Arbitrary for MultihashGeneric<64> { fn arbitrary(g: &mut G) -> Self { // In real world lower multihash codes are more likely to happen, hence distribute them // with bias towards smaller values. diff --git a/src/hasher.rs b/src/hasher.rs index dd224488..098ed08c 100644 --- a/src/hasher.rs +++ b/src/hasher.rs @@ -1,25 +1,13 @@ use crate::error::Error; use core::fmt::Debug; -use generic_array::typenum::marker_traits::Unsigned; -use generic_array::{ArrayLength, GenericArray}; - -/// Size marker trait. -pub trait Size: - ArrayLength + Debug + Default + Eq + core::hash::Hash + Send + Sync + 'static -{ -} - -impl + Debug + Default + Eq + core::hash::Hash + Send + Sync + 'static> Size - for T -{ -} +use std::usize; /// Stack allocated digest trait. -pub trait Digest: +pub trait Digest: AsRef<[u8]> + AsMut<[u8]> - + From> - + Into> + + From<[u8; S]> + + Into<[u8; S]> + Clone + core::hash::Hash + Debug @@ -31,15 +19,15 @@ pub trait Digest: { /// Size of the digest. fn size(&self) -> u8 { - S::to_u8() + S as u8 } /// Wraps the digest bytes. fn wrap(digest: &[u8]) -> Result { - if digest.len() != S::to_usize() { + if digest.len() != S { return Err(Error::InvalidSize(digest.len() as _)); } - let mut array = GenericArray::default(); + let mut array = [0; S]; let len = digest.len().min(array.len()); array[..len].copy_from_slice(&digest[..len]); Ok(array.into()) @@ -56,22 +44,20 @@ pub trait Digest: use unsigned_varint::io::read_u64; let size = read_u64(&mut r)?; - if size > S::to_u64() || size > u8::max_value() as u64 { + if size > S as u64 || size > u8::max_value() as u64 { return Err(Error::InvalidSize(size)); } - let mut digest = GenericArray::default(); + let mut digest = [0; S]; r.read_exact(&mut digest[..size as usize])?; Ok(Self::from(digest)) } } /// Trait implemented by a hash function implementation. -pub trait StatefulHasher: Default + Send + Sync { - /// The maximum Digest size for that hasher (it is stack allocated). - type Size: Size; +pub trait StatefulHasher: Default + Send + Sync { /// The Digest type to distinguish the output of different `Hasher` implementations. - type Digest: Digest; + type Digest: Digest; /// Consume input and update internal state. fn update(&mut self, input: &[u8]); @@ -106,16 +92,13 @@ pub trait StatefulHasher: Default + Send + Sync { /// [Multihashes]: https://github.com/multiformats/multihash /// [associated type]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#specifying-placeholder-types-in-trait-definitions-with-associated-types /// [`MultihashDigest`]: crate::MultihashDigest -pub trait Hasher: Default + Send + Sync { - /// The maximum Digest size for that hasher (it is stack allocated). - type Size: Size; - +pub trait Hasher: Default + Send + Sync { /// The Digest type to distinguish the output of different `Hasher` implementations. - type Digest: Digest; + type Digest: Digest; /// Returns the allocated size of the digest. fn size() -> u8 { - Self::Size::to_u8() + S as u8 } /// Hashes the given `input` data and returns its hash digest. @@ -124,8 +107,7 @@ pub trait Hasher: Default + Send + Sync { Self: Sized; } -impl Hasher for T { - type Size = T::Size; +impl, const S: usize> Hasher for T{ type Digest = T::Digest; fn digest(input: &[u8]) -> Self::Digest { diff --git a/src/hasher_impl.rs b/src/hasher_impl.rs index 422e6b54..752137b5 100644 --- a/src/hasher_impl.rs +++ b/src/hasher_impl.rs @@ -1,35 +1,38 @@ use crate::error::Error; -use crate::hasher::{Digest, Size, StatefulHasher}; +use crate::hasher::{Digest, StatefulHasher}; use core::convert::TryFrom; -use generic_array::GenericArray; macro_rules! derive_digest { ($name:ident) => { /// Multihash digest. - #[derive(Clone, Debug, Default, Eq, Hash, PartialEq)] - pub struct $name(GenericArray); + #[derive(Clone, Debug, Eq, Hash, PartialEq)] + pub struct $name([u8; S]); - impl Copy for $name where S::ArrayType: Copy {} + impl Default for $name { + fn default() -> Self { + [0u8; S].into() + } + } - impl AsRef<[u8]> for $name { + impl AsRef<[u8]> for $name { fn as_ref(&self) -> &[u8] { &self.0 } } - impl AsMut<[u8]> for $name { + impl AsMut<[u8]> for $name { fn as_mut(&mut self) -> &mut [u8] { &mut self.0 } } - impl From> for $name { - fn from(array: GenericArray) -> Self { + impl From<[u8; S]> for $name { + fn from(array: [u8; S]) -> Self { Self(array) } } - impl From<$name> for GenericArray { + impl From<$name> for [u8; S] { fn from(digest: $name) -> Self { digest.0 } @@ -38,7 +41,7 @@ macro_rules! derive_digest { /// Convert slice to `Digest`. /// /// It errors when the length of the slice does not match the size of the `Digest`. - impl TryFrom<&[u8]> for $name { + impl TryFrom<&[u8]> for $name { type Error = Error; fn try_from(slice: &[u8]) -> Result { @@ -46,14 +49,14 @@ macro_rules! derive_digest { } } - impl Digest for $name {} + impl Digest for $name {} }; } macro_rules! derive_write { ($name:ident) => { #[cfg(feature = "std")] - impl std::io::Write for $name { + impl std::io::Write for $name { fn write(&mut self, buf: &[u8]) -> std::io::Result { self.update(buf); Ok(buf.len()) @@ -73,25 +76,22 @@ macro_rules! derive_hasher_blake { /// Multihash hasher. #[derive(Debug)] - pub struct $name { - _marker: PhantomData, + pub struct $name { state: $module::State, } - impl Default for $name { + impl Default for $name { fn default() -> Self { let mut params = $module::Params::new(); - params.hash_length(S::to_usize()); + params.hash_length(S); Self { - _marker: PhantomData, state: params.to_state(), } } } - impl StatefulHasher for $name { - type Size = S; - type Digest = $digest; + impl StatefulHasher for $name { + type Digest = $digest; fn update(&mut self, input: &[u8]) { self.state.update(input); @@ -99,7 +99,9 @@ macro_rules! derive_hasher_blake { fn finalize(&self) -> Self::Digest { let digest = self.state.finalize(); - GenericArray::clone_from_slice(digest.as_bytes()).into() + let mut array = [0; S]; + array.clone_from_slice(digest.as_bytes()); + array.into() } fn reset(&mut self) { @@ -115,63 +117,54 @@ macro_rules! derive_hasher_blake { #[cfg(feature = "blake2b")] pub mod blake2b { use super::*; - use core::marker::PhantomData; - use generic_array::typenum::{U32, U64}; derive_hasher_blake!(blake2b_simd, Blake2bHasher, Blake2bDigest); /// 256 bit blake2b hasher. - pub type Blake2b256 = Blake2bHasher; + pub type Blake2b256 = Blake2bHasher<32>; /// 512 bit blake2b hasher. - pub type Blake2b512 = Blake2bHasher; + pub type Blake2b512 = Blake2bHasher<64>; } #[cfg(feature = "blake2s")] pub mod blake2s { use super::*; - use core::marker::PhantomData; - use generic_array::typenum::{U16, U32}; derive_hasher_blake!(blake2s_simd, Blake2sHasher, Blake2sDigest); /// 256 bit blake2b hasher. - pub type Blake2s128 = Blake2sHasher; + pub type Blake2s128 = Blake2sHasher<16>; /// 512 bit blake2b hasher. - pub type Blake2s256 = Blake2sHasher; + pub type Blake2s256 = Blake2sHasher<32>; } #[cfg(feature = "blake3")] pub mod blake3 { use super::*; - use core::marker::PhantomData; - use generic_array::typenum::U32; // derive_hasher_blake!(blake3, Blake3Hasher, Blake3Digest); derive_digest!(Blake3Digest); /// Multihash hasher. #[derive(Debug)] - pub struct Blake3Hasher { - _marker: PhantomData, + pub struct Blake3Hasher { hasher: ::blake3::Hasher, } - impl Default for Blake3Hasher { + impl Default for Blake3Hasher { fn default() -> Self { let hasher = ::blake3::Hasher::new(); Self { - _marker: PhantomData, hasher, } } } - impl StatefulHasher for Blake3Hasher { - type Size = S; - type Digest = Blake3Digest; + impl StatefulHasher for Blake3Hasher { + type Digest = Blake3Digest; fn update(&mut self, input: &[u8]) { self.hasher.update(input); @@ -179,7 +172,9 @@ pub mod blake3 { fn finalize(&self) -> Self::Digest { let digest = self.hasher.finalize(); //default is 32 bytes anyway - GenericArray::clone_from_slice(digest.as_bytes()).into() + let mut array = [0; S]; + array.clone_from_slice(digest.as_bytes()); + array.into() } fn reset(&mut self) { @@ -190,21 +185,20 @@ pub mod blake3 { derive_write!(Blake3Hasher); /// blake3-256 hasher. - pub type Blake3_256 = Blake3Hasher; + pub type Blake3_256 = Blake3Hasher<32>; } #[cfg(feature = "digest")] macro_rules! derive_hasher_sha { - ($module:ty, $name:ident, $size:ty, $digest:ident) => { + ($module:ty, $name:ident, $size:expr, $digest:ident) => { /// Multihash hasher. #[derive(Debug, Default)] pub struct $name { state: $module, } - impl $crate::hasher::StatefulHasher for $name { - type Size = $size; - type Digest = $digest; + impl $crate::hasher::StatefulHasher for $name { + type Digest = $digest; fn update(&mut self, input: &[u8]) { use digest::Digest; @@ -213,7 +207,11 @@ macro_rules! derive_hasher_sha { fn finalize(&self) -> Self::Digest { use digest::Digest; - Self::Digest::from(self.state.clone().finalize()) + // TODO: this extra array seems excessive to convert from a generic array + let a = self.state.clone().finalize().as_slice(); + let array = [0; S]; + array.copy_from_slice(a); + Self::Digest::from(array) } fn reset(&mut self) { @@ -248,65 +246,68 @@ pub mod sha1 { #[cfg(feature = "sha2")] pub mod sha2 { use super::*; - use generic_array::typenum::{U32, U64}; derive_digest!(Sha2Digest); - derive_hasher_sha!(sha_2::Sha256, Sha2_256, U32, Sha2Digest); - derive_hasher_sha!(sha_2::Sha512, Sha2_512, U64, Sha2Digest); + derive_hasher_sha!(sha_2::Sha256, Sha2_256, 32, Sha2Digest); + derive_hasher_sha!(sha_2::Sha512, Sha2_512, 64, Sha2Digest); } #[cfg(feature = "sha3")] pub mod sha3 { use super::*; - use generic_array::typenum::{U28, U32, U48, U64}; derive_digest!(Sha3Digest); - derive_hasher_sha!(sha_3::Sha3_224, Sha3_224, U28, Sha3Digest); - derive_hasher_sha!(sha_3::Sha3_256, Sha3_256, U32, Sha3Digest); - derive_hasher_sha!(sha_3::Sha3_384, Sha3_384, U48, Sha3Digest); - derive_hasher_sha!(sha_3::Sha3_512, Sha3_512, U64, Sha3Digest); + derive_hasher_sha!(sha_3::Sha3_224, Sha3_224, 28, Sha3Digest); + derive_hasher_sha!(sha_3::Sha3_256, Sha3_256, 32, Sha3Digest); + derive_hasher_sha!(sha_3::Sha3_384, Sha3_384, 48, Sha3Digest); + derive_hasher_sha!(sha_3::Sha3_512, Sha3_512, 64, Sha3Digest); derive_digest!(KeccakDigest); - derive_hasher_sha!(sha_3::Keccak224, Keccak224, U28, KeccakDigest); - derive_hasher_sha!(sha_3::Keccak256, Keccak256, U32, KeccakDigest); - derive_hasher_sha!(sha_3::Keccak384, Keccak384, U48, KeccakDigest); - derive_hasher_sha!(sha_3::Keccak512, Keccak512, U64, KeccakDigest); + derive_hasher_sha!(sha_3::Keccak224, Keccak224, 28, KeccakDigest); + derive_hasher_sha!(sha_3::Keccak256, Keccak256, 32, KeccakDigest); + derive_hasher_sha!(sha_3::Keccak384, Keccak384, 48, KeccakDigest); + derive_hasher_sha!(sha_3::Keccak512, Keccak512, 64, KeccakDigest); } pub mod identity { use super::*; use crate::error::Error; - use generic_array::typenum::U32; /// Multihash digest. - #[derive(Clone, Debug, Default, Eq, Hash, PartialEq)] - pub struct IdentityDigest(u8, GenericArray); + #[derive(Clone, Debug, Eq, Hash, PartialEq)] + pub struct IdentityDigest(u8, [u8; S]); - impl AsRef<[u8]> for IdentityDigest { + impl Default for IdentityDigest { + fn default() -> Self { + Self {0: 0, 1: [0u8; S]} + } + } + + impl AsRef<[u8]> for IdentityDigest { fn as_ref(&self) -> &[u8] { &self.1[..self.0 as usize] } } - impl AsMut<[u8]> for IdentityDigest { + impl AsMut<[u8]> for IdentityDigest { fn as_mut(&mut self) -> &mut [u8] { &mut self.1[..self.0 as usize] } } - impl From> for IdentityDigest { - fn from(array: GenericArray) -> Self { + impl From<[u8; S]> for IdentityDigest { + fn from(array: [u8; S]) -> Self { Self(array.len() as u8, array) } } - impl From> for GenericArray { + impl From> for [u8; S] { fn from(digest: IdentityDigest) -> Self { digest.1 } } - impl Digest for IdentityDigest { + impl Digest for IdentityDigest { fn size(&self) -> u8 { self.0 } @@ -314,10 +315,10 @@ pub mod identity { // A custom implementation is needed as an identity hash value might be shorter than the // allocated Digest. fn wrap(digest: &[u8]) -> Result { - if digest.len() > S::to_usize() { + if digest.len() > S { return Err(Error::InvalidSize(digest.len() as _)); } - let mut array = GenericArray::default(); + let mut array = [0; S]; let len = digest.len().min(array.len()); array[..len].copy_from_slice(&digest[..len]); Ok(Self(len as u8, array)) @@ -333,10 +334,10 @@ pub mod identity { use unsigned_varint::io::read_u64; let size = read_u64(&mut r)?; - if size > S::to_u64() || size > u8::max_value() as u64 { + if size > S as u64|| size > u8::max_value() as u64 { return Err(Error::InvalidSize(size)); } - let mut digest = GenericArray::default(); + let mut digest = [0; S]; r.read_exact(&mut digest[..size as usize])?; Ok(Self(size as u8, digest)) } @@ -347,15 +348,21 @@ pub mod identity { /// # Panics /// /// Panics if the input is bigger than the maximum size. - #[derive(Debug, Default)] - pub struct IdentityHasher { - bytes: GenericArray, + #[derive(Debug)] + pub struct IdentityHasher { + bytes: [u8; S], i: usize, } - impl StatefulHasher for IdentityHasher { - type Size = S; - type Digest = IdentityDigest; + impl Default for IdentityHasher { + fn default() -> Self { + Self {i: 0, bytes: [0u8; S]} + } + } + + + impl StatefulHasher for IdentityHasher { + type Digest = IdentityDigest; fn update(&mut self, input: &[u8]) { let start = self.i.min(self.bytes.len()); @@ -369,7 +376,7 @@ pub mod identity { } fn reset(&mut self) { - self.bytes = Default::default(); + self.bytes = [0; S]; self.i = 0; } } @@ -381,7 +388,7 @@ pub mod identity { /// # Panics /// /// Panics if the input is bigger than 32 bytes. - pub type Identity256 = IdentityHasher; + pub type Identity256 = IdentityHasher<32>; } pub mod unknown { @@ -399,13 +406,13 @@ pub mod strobe { derive_digest!(StrobeDigest); /// Strobe hasher. - pub struct StrobeHasher { + pub struct StrobeHasher { _marker: PhantomData, strobe: Strobe, initialized: bool, } - impl Default for StrobeHasher { + impl Default for StrobeHasher { fn default() -> Self { Self { _marker: PhantomData, @@ -415,7 +422,7 @@ pub mod strobe { } } - impl StatefulHasher for StrobeHasher { + impl StatefulHasher for StrobeHasher { type Size = S; type Digest = StrobeDigest; diff --git a/src/lib.rs b/src/lib.rs index 828f9ff0..abb39cbb 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -62,9 +62,8 @@ mod multihash; mod multihash_impl; pub use crate::error::{Error, Result}; -pub use crate::hasher::{Digest, Hasher, Size, StatefulHasher}; +pub use crate::hasher::{Digest, Hasher, StatefulHasher}; pub use crate::multihash::{Multihash as MultihashGeneric, MultihashDigest}; -pub use generic_array::typenum::{self, U128, U16, U20, U28, U32, U48, U64}; #[cfg(feature = "derive")] pub use multihash_derive as derive; diff --git a/src/multihash.rs b/src/multihash.rs index 50339710..da21fb95 100644 --- a/src/multihash.rs +++ b/src/multihash.rs @@ -1,22 +1,19 @@ -use crate::hasher::{Digest, Size}; +use crate::hasher::{Digest}; use crate::Error; use core::convert::TryFrom; #[cfg(feature = "std")] use core::convert::TryInto; use core::fmt::Debug; -use generic_array::{ArrayLength, GenericArray}; +use std::usize; /// Trait that implements hashing. /// /// It is usually implemented by a custom code table enum that derives the [`Multihash` derive]. /// /// [`Multihash` derive]: crate::derive -pub trait MultihashDigest: +pub trait MultihashDigest: TryFrom + Into + Send + Sync + Unpin + Copy + Eq + Debug + 'static { - /// The maximum size a hash will allocate. - type AllocSize: Size; - /// Calculate the hash of some input data. /// /// # Example @@ -28,7 +25,7 @@ pub trait MultihashDigest: /// let hash = Code::Sha3_256.digest(b"Hello world!"); /// println!("{:02x?}", hash); /// ``` - fn digest(&self, input: &[u8]) -> Multihash; + fn digest(&self, input: &[u8]) -> Multihash; /// Create a multihash from an existing [`Digest`]. /// @@ -43,10 +40,9 @@ pub trait MultihashDigest: /// println!("{:02x?}", hash); /// ``` #[allow(clippy::needless_lifetimes)] - fn multihash_from_digest<'a, S, D>(digest: &'a D) -> Multihash + fn multihash_from_digest<'a, D>(digest: &'a D) -> Multihash where - S: Size, - D: Digest, + D: Digest, Self: From<&'a D>; } @@ -74,26 +70,26 @@ pub trait MultihashDigest: #[cfg_attr(feature = "serde-codec", derive(serde::Deserialize))] #[cfg_attr(feature = "serde-codec", derive(serde::Serialize))] #[cfg_attr(feature = "serde-codec", serde(bound = "S: Size"))] -#[derive(Clone, Debug, Default, Eq, Ord, PartialEq, PartialOrd)] -pub struct Multihash { +#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)] +pub struct Multihash { /// The code of the Multihash. code: u64, /// The actual size of the digest in bytes (not the allocated size). size: u8, /// The digest. - digest: GenericArray, + digest: [u8; S], } -impl Copy for Multihash where >::ArrayType: Copy {} +impl Copy for Multihash {} -impl Multihash { +impl Multihash { /// Wraps the digest in a multihash. pub fn wrap(code: u64, input_digest: &[u8]) -> Result { - if input_digest.len() > S::to_usize() { + if input_digest.len() > S { return Err(Error::InvalidSize(input_digest.len() as _)); } let size = input_digest.len(); - let mut digest = GenericArray::default(); + let mut digest = [0; S]; digest[..size].copy_from_slice(input_digest); Ok(Self { code, @@ -165,7 +161,7 @@ impl Multihash { // Don't hash the whole allocated space, but just the actual digest #[allow(clippy::derive_hash_xor_eq)] -impl core::hash::Hash for Multihash { +impl core::hash::Hash for Multihash { fn hash(&self, state: &mut T) { self.code.hash(state); self.digest().hash(state); @@ -173,7 +169,7 @@ impl core::hash::Hash for Multihash { } #[cfg(feature = "std")] -impl From> for Vec { +impl From> for Vec { fn from(multihash: Multihash) -> Self { multihash.to_bytes() } @@ -266,21 +262,20 @@ where /// /// Currently the maximum size for a digest is 255 bytes. #[cfg(feature = "std")] -pub fn read_multihash(mut r: R) -> Result<(u64, u8, GenericArray), Error> +pub fn read_multihash(mut r: R) -> Result<(u64, u8, [u8; S]), Error> where R: std::io::Read, - S: Size, { use unsigned_varint::io::read_u64; let code = read_u64(&mut r)?; let size = read_u64(&mut r)?; - if size > S::to_u64() || size > u8::MAX as u64 { + if size > S as u64 || size > u8::MAX as u64 { return Err(Error::InvalidSize(size)); } - let mut digest = GenericArray::default(); + let mut digest = [0; S]; r.read_exact(&mut digest[..size as usize])?; Ok((code, size as u8, digest)) } diff --git a/src/multihash_impl.rs b/src/multihash_impl.rs index cd31a172..98ab80dd 100644 --- a/src/multihash_impl.rs +++ b/src/multihash_impl.rs @@ -7,15 +7,15 @@ use multihash_derive::Multihash; /// /// [`Multihash` derive]: crate::derive #[derive(Copy, Clone, Debug, Eq, Multihash, PartialEq)] -#[mh(alloc_size = crate::U64)] +#[mh(alloc_size = U64)] pub enum Code { /// SHA-256 (32-byte hash size) #[cfg(feature = "sha2")] - #[mh(code = 0x12, hasher = crate::Sha2_256, digest = crate::Sha2Digest)] + #[mh(code = 0x12, hasher = crate::Sha2_256, digest = crate::Sha2Digest<32>)] Sha2_256, /// SHA-512 (64-byte hash size) #[cfg(feature = "sha2")] - #[mh(code = 0x13, hasher = crate::Sha2_512, digest = crate::Sha2Digest)] + #[mh(code = 0x13, hasher = crate::Sha2_512, digest = crate::Sha2Digest<64>)] Sha2_512, /// SHA3-224 (28-byte hash size) #[cfg(feature = "sha3")] From daa059e65847f008fef9dbb33b75c8d5d4daddff Mon Sep 17 00:00:00 2001 From: mriise Date: Tue, 30 Mar 2021 11:18:10 -0700 Subject: [PATCH 03/18] remove std::usize --- src/hasher.rs | 2 +- src/multihash.rs | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/src/hasher.rs b/src/hasher.rs index 098ed08c..402733f8 100644 --- a/src/hasher.rs +++ b/src/hasher.rs @@ -1,6 +1,6 @@ use crate::error::Error; use core::fmt::Debug; -use std::usize; + /// Stack allocated digest trait. pub trait Digest: diff --git a/src/multihash.rs b/src/multihash.rs index da21fb95..cc10ccf8 100644 --- a/src/multihash.rs +++ b/src/multihash.rs @@ -4,7 +4,6 @@ use core::convert::TryFrom; #[cfg(feature = "std")] use core::convert::TryInto; use core::fmt::Debug; -use std::usize; /// Trait that implements hashing. /// From ba1a6a92d0492f1e5c6b990eb2e40eea912af012 Mon Sep 17 00:00:00 2001 From: mriise Date: Tue, 30 Mar 2021 11:49:24 -0700 Subject: [PATCH 04/18] use usize for maximum hash size instead of u8 --- src/hasher.rs | 10 +++++----- src/hasher_impl.rs | 12 ++++++------ 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/hasher.rs b/src/hasher.rs index 402733f8..6e851271 100644 --- a/src/hasher.rs +++ b/src/hasher.rs @@ -17,9 +17,9 @@ pub trait Digest: + Sync + 'static { - /// Size of the digest. - fn size(&self) -> u8 { - S as u8 + /// Size of the digest. Maximum for Some of the Blake family is 2^64-1 bytes + fn size(&self) -> usize { + S } /// Wraps the digest bytes. @@ -97,8 +97,8 @@ pub trait Hasher: Default + Send + Sync { type Digest: Digest; /// Returns the allocated size of the digest. - fn size() -> u8 { - S as u8 + fn size() -> usize { + S } /// Hashes the given `input` data and returns its hash digest. diff --git a/src/hasher_impl.rs b/src/hasher_impl.rs index 752137b5..bd404bba 100644 --- a/src/hasher_impl.rs +++ b/src/hasher_impl.rs @@ -275,7 +275,7 @@ pub mod identity { /// Multihash digest. #[derive(Clone, Debug, Eq, Hash, PartialEq)] - pub struct IdentityDigest(u8, [u8; S]); + pub struct IdentityDigest(usize, [u8; S]); impl Default for IdentityDigest { fn default() -> Self { @@ -297,7 +297,7 @@ pub mod identity { impl From<[u8; S]> for IdentityDigest { fn from(array: [u8; S]) -> Self { - Self(array.len() as u8, array) + Self(array.len(), array) } } @@ -308,7 +308,7 @@ pub mod identity { } impl Digest for IdentityDigest { - fn size(&self) -> u8 { + fn size(&self) -> usize { self.0 } @@ -321,7 +321,7 @@ pub mod identity { let mut array = [0; S]; let len = digest.len().min(array.len()); array[..len].copy_from_slice(&digest[..len]); - Ok(Self(len as u8, array)) + Ok(Self(len, array)) } // A custom implementation is needed as an identity hash also stores the actual size of @@ -339,7 +339,7 @@ pub mod identity { } let mut digest = [0; S]; r.read_exact(&mut digest[..size as usize])?; - Ok(Self(size as u8, digest)) + Ok(Self(size as usize, digest)) } } @@ -372,7 +372,7 @@ pub mod identity { } fn finalize(&self) -> Self::Digest { - IdentityDigest(self.i as u8, self.bytes.clone()) + IdentityDigest(self.i, self.bytes.clone()) } fn reset(&mut self) { From 2e193e56ce605cefcf7921118b837e17ca030297 Mon Sep 17 00:00:00 2001 From: Volker Mische Date: Tue, 20 Apr 2021 10:03:08 +0200 Subject: [PATCH 05/18] fix: make derive macro work --- derive/src/multihash.rs | 91 +++++++++++++++-------------------------- src/multihash_impl.rs | 30 +++++++------- 2 files changed, 49 insertions(+), 72 deletions(-) diff --git a/derive/src/multihash.rs b/derive/src/multihash.rs index 45720ab1..2e35d3f5 100644 --- a/derive/src/multihash.rs +++ b/derive/src/multihash.rs @@ -43,7 +43,7 @@ impl Parse for MhAttr { /// Attributes of the top-level derive. #[derive(Debug)] enum DeriveAttr { - AllocSize(utils::Attr), + AllocSize(utils::Attr), NoAllocSizeErrors(kw::no_alloc_size_errors), } @@ -161,7 +161,7 @@ impl<'a> From<&'a VariantInfo<'a>> for Hash { /// Parse top-level enum [#mh()] attributes. /// /// Returns the `alloc_size` and whether errors regarding to `alloc_size` should be reported or not. -fn parse_code_enum_attrs(ast: &syn::DeriveInput) -> (syn::Type, bool) { +fn parse_code_enum_attrs(ast: &syn::DeriveInput) -> (syn::LitInt, bool) { let mut alloc_size = None; let mut no_alloc_size_errors = false; @@ -226,33 +226,12 @@ fn error_code_duplicates(hashes: &[Hash]) { #[derive(Debug)] struct ParseError(proc_macro2::Span); -/// Parse a path containing a `typenum` unsigned integer (e.g. `U64`) into a u64 -fn parse_unsigned_typenum(typenum_path: &syn::Type) -> Result { - match typenum_path { - syn::Type::Path(type_path) => match type_path.path.segments.last() { - Some(path_segment) => { - let typenum_ident = &path_segment.ident; - let typenum = typenum_ident.to_string(); - match typenum.as_str().split_at(1) { - ("U", byte_size) => byte_size - .parse::() - .map_err(|_| ParseError(typenum_ident.span())), - _ => Err(ParseError(typenum_ident.span())), - } - } - None => Err(ParseError(type_path.path.span())), - }, - _ => Err(ParseError(typenum_path.span())), - } -} - /// Returns the max size as u64. /// -/// Emits an error if the `#mh(alloc_size)` attribute doesn't contain a valid unsigned integer -/// `typenum`. -fn parse_alloc_size_attribute(alloc_size: &syn::Type) -> u64 { - parse_unsigned_typenum(&alloc_size).unwrap_or_else(|_| { - let msg = "`alloc_size` attribute must be a `typenum`, e.g. #[mh(alloc_size = U64)]"; +/// Emits an error if the `#mh(alloc_size)` attribute doesn't contain a valid unsigned integer. +fn parse_alloc_size_attribute(alloc_size: &syn::LitInt) -> u64 { + alloc_size.base10_parse().unwrap_or_else(|_| { + let msg = "`alloc_size` attribute must be an integer, e.g. #[mh(alloc_size = 64)]"; #[cfg(test)] panic!(msg); #[cfg(not(test))] @@ -261,38 +240,39 @@ fn parse_alloc_size_attribute(alloc_size: &syn::Type) -> u64 { } /// Return a warning/error if the specified alloc_size is smaller than the biggest digest -fn error_alloc_size(hashes: &[Hash], expected_alloc_size_type: &syn::Type) { +fn error_alloc_size(hashes: &[Hash], expected_alloc_size_type: &syn::LitInt) { let expected_alloc_size = parse_alloc_size_attribute(expected_alloc_size_type); let maybe_error: Result<(), ParseError> = hashes .iter() .try_for_each(|hash| { - // The digest type must have a size parameter of the shape `U`, else we error. + // The digest type must have an integer as size parameter, else we error. match hash.digest.segments.last() { Some(path_segment) => match &path_segment.arguments { syn::PathArguments::AngleBracketed(arguments) => match arguments.args.last() { - Some(syn::GenericArgument::Type(path)) => { - match parse_unsigned_typenum(&path) { - Ok(max_digest_size) => { - if max_digest_size > expected_alloc_size { - let msg = format!("The `#mh(alloc_size) attribute must be bigger than the maximum defined digest size (U{})", - max_digest_size); - #[cfg(test)] - panic!(msg); - #[cfg(not(test))] - { - let digest = &hash.digest.to_token_stream().to_string().replace(" ", ""); - let line = &hash.digest.span().start().line; - proc_macro_error::emit_error!( - &expected_alloc_size_type, msg; - note = "the bigger digest is `{}` at line {}", digest, line; - ); - } - } - Ok(()) - }, - Err(err) => Err(err), - } + Some(syn::GenericArgument::Const(syn::Expr::Lit(expr_lit))) => match &expr_lit.lit { + syn::Lit::Int(lit_int) => match lit_int.base10_parse::() { + Ok(max_digest_size) => { + if max_digest_size > expected_alloc_size { + let msg = format!("The `#mh(alloc_size) attribute must be bigger than the maximum defined digest size ({})", + max_digest_size); + #[cfg(test)] + panic!(msg); + #[cfg(not(test))] + { + let digest = &hash.digest.to_token_stream().to_string().replace(" ", ""); + let line = &hash.digest.span().start().line; + proc_macro_error::emit_error!( + &expected_alloc_size_type, msg; + note = "the bigger digest is `{}` at line {}", digest, line; + ); + } + } + Ok(()) + }, + _ => Err(ParseError(lit_int.span())), + }, + _ => Err(ParseError(expr_lit.span())), }, _ => Err(ParseError(arguments.args.span())), }, @@ -338,9 +318,7 @@ pub fn multihash(s: Structure) -> TokenStream { /// A Multihash with the same allocated size as the Multihashes produces by this derive. pub type Multihash = #mh_crate::MultihashGeneric::<#alloc_size>; - impl #mh_crate::MultihashDigest for #code_enum { - type AllocSize = #alloc_size; - + impl #mh_crate::MultihashDigest<#alloc_size> for #code_enum { fn digest(&self, input: &[u8]) -> Multihash { use #mh_crate::Hasher; match self { @@ -349,10 +327,9 @@ pub fn multihash(s: Structure) -> TokenStream { } } - fn multihash_from_digest<'a, S, D>(digest: &'a D) -> Multihash + fn multihash_from_digest<'a, D>(digest: &'a D) -> Multihash where - S: #mh_crate::Size, - D: #mh_crate::Digest, + D: #mh_crate::Digest<#alloc_size>, Self: From<&'a D>, { let code = Self::from(&digest); diff --git a/src/multihash_impl.rs b/src/multihash_impl.rs index 98ab80dd..3de778de 100644 --- a/src/multihash_impl.rs +++ b/src/multihash_impl.rs @@ -7,7 +7,7 @@ use multihash_derive::Multihash; /// /// [`Multihash` derive]: crate::derive #[derive(Copy, Clone, Debug, Eq, Multihash, PartialEq)] -#[mh(alloc_size = U64)] +#[mh(alloc_size = 64)] pub enum Code { /// SHA-256 (32-byte hash size) #[cfg(feature = "sha2")] @@ -19,61 +19,61 @@ pub enum Code { Sha2_512, /// SHA3-224 (28-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x17, hasher = crate::Sha3_224, digest = crate::Sha3Digest)] + #[mh(code = 0x17, hasher = crate::Sha3_224, digest = crate::Sha3Digest<28>)] Sha3_224, /// SHA3-256 (32-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x16, hasher = crate::Sha3_256, digest = crate::Sha3Digest)] + #[mh(code = 0x16, hasher = crate::Sha3_256, digest = crate::Sha3Digest<32>)] Sha3_256, /// SHA3-384 (48-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x15, hasher = crate::Sha3_384, digest = crate::Sha3Digest)] + #[mh(code = 0x15, hasher = crate::Sha3_384, digest = crate::Sha3Digest<48>)] Sha3_384, /// SHA3-512 (64-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x14, hasher = crate::Sha3_512, digest = crate::Sha3Digest)] + #[mh(code = 0x14, hasher = crate::Sha3_512, digest = crate::Sha3Digest<64>)] Sha3_512, /// Keccak-224 (28-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x1a, hasher = crate::Keccak224, digest = crate::KeccakDigest)] + #[mh(code = 0x1a, hasher = crate::Keccak224, digest = crate::KeccakDigest<28>)] Keccak224, /// Keccak-256 (32-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x1b, hasher = crate::Keccak256, digest = crate::KeccakDigest)] + #[mh(code = 0x1b, hasher = crate::Keccak256, digest = crate::KeccakDigest<32>)] Keccak256, /// Keccak-384 (48-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x1c, hasher = crate::Keccak384, digest = crate::KeccakDigest)] + #[mh(code = 0x1c, hasher = crate::Keccak384, digest = crate::KeccakDigest<48>)] Keccak384, /// Keccak-512 (64-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x1d, hasher = crate::Keccak512, digest = crate::KeccakDigest)] + #[mh(code = 0x1d, hasher = crate::Keccak512, digest = crate::KeccakDigest<64>)] Keccak512, /// BLAKE2b-256 (32-byte hash size) #[cfg(feature = "blake2b")] - #[mh(code = 0xb220, hasher = crate::Blake2b256, digest = crate::Blake2bDigest)] + #[mh(code = 0xb220, hasher = crate::Blake2b256, digest = crate::Blake2bDigest<32>)] Blake2b256, /// BLAKE2b-512 (64-byte hash size) #[cfg(feature = "blake2b")] - #[mh(code = 0xb240, hasher = crate::Blake2b512, digest = crate::Blake2bDigest)] + #[mh(code = 0xb240, hasher = crate::Blake2b512, digest = crate::Blake2bDigest<64>)] Blake2b512, /// BLAKE2s-128 (16-byte hash size) #[cfg(feature = "blake2s")] - #[mh(code = 0xb250, hasher = crate::Blake2s128, digest = crate::Blake2sDigest)] + #[mh(code = 0xb250, hasher = crate::Blake2s128, digest = crate::Blake2sDigest<16>)] Blake2s128, /// BLAKE2s-256 (32-byte hash size) #[cfg(feature = "blake2s")] - #[mh(code = 0xb260, hasher = crate::Blake2s256, digest = crate::Blake2sDigest)] + #[mh(code = 0xb260, hasher = crate::Blake2s256, digest = crate::Blake2sDigest<32>)] Blake2s256, /// BLAKE3-256 (32-byte hash size) #[cfg(feature = "blake3")] - #[mh(code = 0x1e, hasher = crate::Blake3_256, digest = crate::Blake3Digest)] + #[mh(code = 0x1e, hasher = crate::Blake3_256, digest = crate::Blake3Digest<32>)] Blake3_256, // The following hashes are not cryptographically secure hashes and are not enabled by default /// Identity hash (max. 64 bytes) #[cfg(feature = "identity")] - #[mh(code = 0x00, hasher = crate::IdentityHasher::, digest = crate::IdentityDigest)] + #[mh(code = 0x00, hasher = crate::IdentityHasher::, digest = crate::IdentityDigest<64>)] Identity, } From bfed660e24651c3096b5989f5fc90d6d9b20226d Mon Sep 17 00:00:00 2001 From: mriise Date: Fri, 23 Apr 2021 02:19:09 -0700 Subject: [PATCH 06/18] fix sha macro --- src/hasher_impl.rs | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/hasher_impl.rs b/src/hasher_impl.rs index bd404bba..df59727b 100644 --- a/src/hasher_impl.rs +++ b/src/hasher_impl.rs @@ -197,8 +197,8 @@ macro_rules! derive_hasher_sha { state: $module, } - impl $crate::hasher::StatefulHasher for $name { - type Digest = $digest; + impl $crate::hasher::StatefulHasher<$size> for $name { + type Digest = $digest<$size>; fn update(&mut self, input: &[u8]) { use digest::Digest; @@ -208,9 +208,10 @@ macro_rules! derive_hasher_sha { fn finalize(&self) -> Self::Digest { use digest::Digest; // TODO: this extra array seems excessive to convert from a generic array - let a = self.state.clone().finalize().as_slice(); - let array = [0; S]; - array.copy_from_slice(a); + let a = self.state.clone().finalize(); + let b = a.as_slice(); + let mut array = [0; $size]; + array.copy_from_slice(b); Self::Digest::from(array) } From 224dbe7128b74d99751fca47358b4a4455d7a63e Mon Sep 17 00:00:00 2001 From: mriise Date: Mon, 26 Apr 2021 02:37:11 -0700 Subject: [PATCH 07/18] final changes & cargo clippy/fmt --- Cargo.toml | 3 +- derive/src/multihash.rs | 7 ++-- examples/custom_table.rs | 15 +++---- src/arb.rs | 2 +- src/hasher.rs | 18 +++------ src/hasher_impl.rs | 46 +++++++++------------- src/multihash.rs | 48 +++++++++++++---------- src/multihash_impl.rs | 2 +- tests/lib.rs | 85 +++++++++++++++++++++------------------- 9 files changed, 109 insertions(+), 117 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 3f0b78a9..22acc48b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,7 +21,7 @@ derive = ["multihash-derive"] arb = ["quickcheck", "rand"] secure-hashes = ["blake2b", "blake2s", "blake3", "sha2", "sha3"] scale-codec = ["parity-scale-codec"] -serde-codec = ["serde"] +serde-codec = ["serde", "serde-big-array"] blake2b = ["blake2b_simd"] blake2s = ["blake2s_simd"] @@ -36,6 +36,7 @@ parity-scale-codec = { version = "1.3.5", optional = true, default-features = fa quickcheck = { version = "0.9.2", optional = true } rand = { version = "0.7.3", optional = true } serde = { version = "1.0.116", optional = true, default-features = false, features = ["derive"] } +serde-big-array = { version = "0.3.2", optional = true, features = ["const-generics"] } multihash-derive = { version = "^0.7.1", path = "derive", default-features = false, optional = true } unsigned-varint = "0.7.0" diff --git a/derive/src/multihash.rs b/derive/src/multihash.rs index 2e35d3f5..4f5a1a9c 100644 --- a/derive/src/multihash.rs +++ b/derive/src/multihash.rs @@ -327,9 +327,9 @@ pub fn multihash(s: Structure) -> TokenStream { } } - fn multihash_from_digest<'a, D>(digest: &'a D) -> Multihash + fn multihash_from_digest<'a, D, const S: usize>(digest: &'a D) -> Multihash where - D: #mh_crate::Digest<#alloc_size>, + D: #mh_crate::Digest, Self: From<&'a D>, { let code = Self::from(&digest); @@ -400,9 +400,8 @@ mod tests { } } - fn multihash_from_digest<'a, S, D>(digest: &'a D) -> Multihash + fn multihash_from_digest<'a, D, const S: usize>(digest: &'a D) -> Multihash where - S: multihash::Size, D: multihash::Digest, Self: From<&'a D>, { diff --git a/examples/custom_table.rs b/examples/custom_table.rs index 01ae32d0..183f3977 100644 --- a/examples/custom_table.rs +++ b/examples/custom_table.rs @@ -1,19 +1,16 @@ use std::convert::TryFrom; use multihash::derive::Multihash; -use multihash::typenum::{U20, U25, U64}; use multihash::{ - Digest, Error, Hasher, MultihashDigest, MultihashGeneric, Sha2Digest, Sha2_256, Size, - StatefulHasher, + Digest, Error, Hasher, MultihashDigest, MultihashGeneric, Sha2Digest, Sha2_256, StatefulHasher, }; // You can implement a custom hasher. This is a SHA2 256-bit hasher that returns a hash that is // truncated to 160 bits. #[derive(Default, Debug)] pub struct Sha2_256Truncated20(Sha2_256); -impl StatefulHasher for Sha2_256Truncated20 { - type Size = U20; - type Digest = Sha2Digest; +impl StatefulHasher<20> for Sha2_256Truncated20 { + type Digest = Sha2Digest<{ Self::SIZE }>; fn update(&mut self, input: &[u8]) { self.0.update(input) } @@ -28,13 +25,13 @@ impl StatefulHasher for Sha2_256Truncated20 { } #[derive(Clone, Copy, Debug, Eq, Multihash, PartialEq)] -#[mh(alloc_size = U64)] +#[mh(alloc_size = 64)] pub enum Code { /// Example for using a custom hasher which returns truncated hashes - #[mh(code = 0x12, hasher = Sha2_256Truncated20, digest = multihash::Sha2Digest)] + #[mh(code = 0x12, hasher = Sha2_256Truncated20, digest = multihash::Sha2Digest<20>)] Sha2_256Truncated20, /// Example for using a hasher with a bit size that is not exported by default - #[mh(code = 0xb219, hasher = multihash::Blake2bHasher::, digest = multihash::Blake2bDigest)] + #[mh(code = 0xb219, hasher = multihash::Blake2bHasher::<25>, digest = multihash::Blake2bDigest<25>)] Blake2b200, } diff --git a/src/arb.rs b/src/arb.rs index b17d4953..84eb6af9 100644 --- a/src/arb.rs +++ b/src/arb.rs @@ -4,7 +4,7 @@ use rand::{ Rng, }; -use crate::{MultihashGeneric}; +use crate::MultihashGeneric; /// Generates a random valid multihash. impl Arbitrary for MultihashGeneric<64> { diff --git a/src/hasher.rs b/src/hasher.rs index 6e851271..d3c9884a 100644 --- a/src/hasher.rs +++ b/src/hasher.rs @@ -1,7 +1,6 @@ use crate::error::Error; use core::fmt::Debug; - /// Stack allocated digest trait. pub trait Digest: AsRef<[u8]> @@ -18,9 +17,7 @@ pub trait Digest: + 'static { /// Size of the digest. Maximum for Some of the Blake family is 2^64-1 bytes - fn size(&self) -> usize { - S - } + const SIZE: usize = S; /// Wraps the digest bytes. fn wrap(digest: &[u8]) -> Result { @@ -55,7 +52,6 @@ pub trait Digest: /// Trait implemented by a hash function implementation. pub trait StatefulHasher: Default + Send + Sync { - /// The Digest type to distinguish the output of different `Hasher` implementations. type Digest: Digest; @@ -92,14 +88,12 @@ pub trait StatefulHasher: Default + Send + Sync { /// [Multihashes]: https://github.com/multiformats/multihash /// [associated type]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#specifying-placeholder-types-in-trait-definitions-with-associated-types /// [`MultihashDigest`]: crate::MultihashDigest -pub trait Hasher: Default + Send + Sync { +pub trait Hasher: Default + Send + Sync { /// The Digest type to distinguish the output of different `Hasher` implementations. - type Digest: Digest; + type Digest: Digest; - /// Returns the allocated size of the digest. - fn size() -> usize { - S - } + ///the allocated size of the digest. + const SIZE: usize = SIZE; /// Hashes the given `input` data and returns its hash digest. fn digest(input: &[u8]) -> Self::Digest @@ -107,7 +101,7 @@ pub trait Hasher: Default + Send + Sync { Self: Sized; } -impl, const S: usize> Hasher for T{ +impl, const SIZE: usize> Hasher for T { type Digest = T::Digest; fn digest(input: &[u8]) -> Self::Digest { diff --git a/src/hasher_impl.rs b/src/hasher_impl.rs index df59727b..aa65a7d1 100644 --- a/src/hasher_impl.rs +++ b/src/hasher_impl.rs @@ -157,9 +157,7 @@ pub mod blake3 { fn default() -> Self { let hasher = ::blake3::Hasher::new(); - Self { - hasher, - } + Self { hasher } } } @@ -238,10 +236,9 @@ macro_rules! derive_hasher_sha { #[cfg(feature = "sha1")] pub mod sha1 { use super::*; - use generic_array::typenum::U20; derive_digest!(Sha1Digest); - derive_hasher_sha!(::sha1::Sha1, Sha1, U20, Sha1Digest); + derive_hasher_sha!(::sha1::Sha1, Sha1, 20, Sha1Digest); } #[cfg(feature = "sha2")] @@ -280,7 +277,7 @@ pub mod identity { impl Default for IdentityDigest { fn default() -> Self { - Self {0: 0, 1: [0u8; S]} + Self { 0: 0, 1: [0u8; S] } } } @@ -308,18 +305,16 @@ pub mod identity { } } - impl Digest for IdentityDigest { - fn size(&self) -> usize { - self.0 - } + impl Digest for IdentityDigest { + const SIZE: usize = SIZE; // A custom implementation is needed as an identity hash value might be shorter than the // allocated Digest. fn wrap(digest: &[u8]) -> Result { - if digest.len() > S { + if digest.len() > SIZE { return Err(Error::InvalidSize(digest.len() as _)); } - let mut array = [0; S]; + let mut array = [0; SIZE]; let len = digest.len().min(array.len()); array[..len].copy_from_slice(&digest[..len]); Ok(Self(len, array)) @@ -335,10 +330,10 @@ pub mod identity { use unsigned_varint::io::read_u64; let size = read_u64(&mut r)?; - if size > S as u64|| size > u8::max_value() as u64 { + if size > SIZE as u64 || size > u8::max_value() as u64 { return Err(Error::InvalidSize(size)); } - let mut digest = [0; S]; + let mut digest = [0; SIZE]; r.read_exact(&mut digest[..size as usize])?; Ok(Self(size as usize, digest)) } @@ -357,11 +352,13 @@ pub mod identity { impl Default for IdentityHasher { fn default() -> Self { - Self {i: 0, bytes: [0u8; S]} + Self { + i: 0, + bytes: [0u8; S], + } } } - impl StatefulHasher for IdentityHasher { type Digest = IdentityDigest; @@ -373,7 +370,7 @@ pub mod identity { } fn finalize(&self) -> Self::Digest { - IdentityDigest(self.i, self.bytes.clone()) + IdentityDigest(self.i, self.bytes) } fn reset(&mut self) { @@ -400,15 +397,12 @@ pub mod unknown { #[cfg(feature = "strobe")] pub mod strobe { use super::*; - use core::marker::PhantomData; - use generic_array::typenum::{U32, U64}; use strobe_rs::{SecParam, Strobe}; derive_digest!(StrobeDigest); /// Strobe hasher. pub struct StrobeHasher { - _marker: PhantomData, strobe: Strobe, initialized: bool, } @@ -416,16 +410,14 @@ pub mod strobe { impl Default for StrobeHasher { fn default() -> Self { Self { - _marker: PhantomData, strobe: Strobe::new(b"StrobeHash", SecParam::B128), initialized: false, } } } - impl StatefulHasher for StrobeHasher { - type Size = S; - type Digest = StrobeDigest; + impl StatefulHasher for StrobeHasher { + type Digest = StrobeDigest; fn update(&mut self, input: &[u8]) { self.strobe.ad(input, self.initialized); @@ -433,7 +425,7 @@ pub mod strobe { } fn finalize(&self) -> Self::Digest { - let mut hash = GenericArray::default(); + let mut hash = [0; S]; self.strobe.clone().prf(&mut hash, false); Self::Digest::from(hash) } @@ -448,8 +440,8 @@ pub mod strobe { derive_write!(StrobeHasher); /// 256 bit strobe hasher. - pub type Strobe256 = StrobeHasher; + pub type Strobe256 = StrobeHasher<32>; /// 512 bit strobe hasher. - pub type Strobe512 = StrobeHasher; + pub type Strobe512 = StrobeHasher<64>; } diff --git a/src/multihash.rs b/src/multihash.rs index cc10ccf8..9a5556cb 100644 --- a/src/multihash.rs +++ b/src/multihash.rs @@ -1,9 +1,11 @@ -use crate::hasher::{Digest}; +use crate::hasher::Digest; use crate::Error; use core::convert::TryFrom; #[cfg(feature = "std")] use core::convert::TryInto; use core::fmt::Debug; +#[cfg(feature = "serde-codec")] +use serde_big_array::BigArray; /// Trait that implements hashing. /// @@ -39,9 +41,9 @@ pub trait MultihashDigest: /// println!("{:02x?}", hash); /// ``` #[allow(clippy::needless_lifetimes)] - fn multihash_from_digest<'a, D>(digest: &'a D) -> Multihash + fn multihash_from_digest<'a, D, const DIGEST_SIZE: usize>(digest: &'a D) -> Multihash where - D: Digest, + D: Digest, Self: From<&'a D>; } @@ -68,7 +70,6 @@ pub trait MultihashDigest: /// ``` #[cfg_attr(feature = "serde-codec", derive(serde::Deserialize))] #[cfg_attr(feature = "serde-codec", derive(serde::Serialize))] -#[cfg_attr(feature = "serde-codec", serde(bound = "S: Size"))] #[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)] pub struct Multihash { /// The code of the Multihash. @@ -76,11 +77,22 @@ pub struct Multihash { /// The actual size of the digest in bytes (not the allocated size). size: u8, /// The digest. + #[cfg_attr(feature = "serde-codec", serde(with = "BigArray"))] digest: [u8; S], } impl Copy for Multihash {} +impl Default for Multihash { + fn default() -> Self { + Self { + code: 0, + size: 0, + digest: [0; SIZE], + } + } +} + impl Multihash { /// Wraps the digest in a multihash. pub fn wrap(code: u64, input_digest: &[u8]) -> Result { @@ -175,7 +187,7 @@ impl From> for Vec { } #[cfg(feature = "scale-codec")] -impl parity_scale_codec::Encode for Multihash { +impl parity_scale_codec::Encode for Multihash<32> { fn encode_to(&self, dest: &mut EncOut) { let mut digest = [0; 32]; digest.copy_from_slice(&self.digest); @@ -186,26 +198,23 @@ impl parity_scale_codec::Encode for Multihash { } #[cfg(feature = "scale-codec")] -impl parity_scale_codec::EncodeLike for Multihash {} +impl parity_scale_codec::EncodeLike for Multihash<32> {} #[cfg(feature = "scale-codec")] -impl parity_scale_codec::Decode for Multihash { +impl parity_scale_codec::Decode for Multihash<32> { fn decode( input: &mut DecIn, ) -> Result { Ok(Multihash { code: parity_scale_codec::Decode::decode(input)?, size: parity_scale_codec::Decode::decode(input)?, - digest: { - let digest = <[u8; 32]>::decode(input)?; - GenericArray::clone_from_slice(&digest) - }, + digest: <[u8; 32]>::decode(input)?, }) } } #[cfg(feature = "scale-codec")] -impl parity_scale_codec::Encode for Multihash { +impl parity_scale_codec::Encode for Multihash<64> { fn encode_to(&self, dest: &mut EncOut) { let mut digest = [0; 64]; digest.copy_from_slice(&self.digest); @@ -216,20 +225,17 @@ impl parity_scale_codec::Encode for Multihash { } #[cfg(feature = "scale-codec")] -impl parity_scale_codec::EncodeLike for Multihash {} +impl parity_scale_codec::EncodeLike for Multihash<64> {} #[cfg(feature = "scale-codec")] -impl parity_scale_codec::Decode for Multihash { +impl parity_scale_codec::Decode for Multihash<64> { fn decode( input: &mut DecIn, ) -> Result { Ok(Multihash { code: parity_scale_codec::Decode::decode(input)?, size: parity_scale_codec::Decode::decode(input)?, - digest: { - let digest = <[u8; 64]>::decode(input)?; - GenericArray::clone_from_slice(&digest) - }, + digest: <[u8; 64]>::decode(input)?, }) } } @@ -298,16 +304,16 @@ mod tests { fn test_scale() { use parity_scale_codec::{Decode, Encode}; - let mh = Multihash::::default(); + let mh = Multihash::<32>::default(); let bytes = mh.encode(); - let mh2: Multihash = Decode::decode(&mut &bytes[..]).unwrap(); + let mh2: Multihash<32> = Decode::decode(&mut &bytes[..]).unwrap(); assert_eq!(mh, mh2); } #[test] #[cfg(feature = "serde-codec")] fn test_serde() { - let mh = Multihash::::default(); + let mh = Multihash::<32>::default(); let bytes = serde_json::to_string(&mh).unwrap(); let mh2 = serde_json::from_str(&bytes).unwrap(); assert_eq!(mh, mh2); diff --git a/src/multihash_impl.rs b/src/multihash_impl.rs index 3de778de..073ce102 100644 --- a/src/multihash_impl.rs +++ b/src/multihash_impl.rs @@ -73,7 +73,7 @@ pub enum Code { // The following hashes are not cryptographically secure hashes and are not enabled by default /// Identity hash (max. 64 bytes) #[cfg(feature = "identity")] - #[mh(code = 0x00, hasher = crate::IdentityHasher::, digest = crate::IdentityDigest<64>)] + #[mh(code = 0x00, hasher = crate::IdentityHasher::<64>, digest = crate::IdentityDigest<64>)] Identity, } diff --git a/tests/lib.rs b/tests/lib.rs index 56665012..8265b410 100644 --- a/tests/lib.rs +++ b/tests/lib.rs @@ -5,50 +5,49 @@ use multihash::{ Blake2sDigest, Blake3Digest, Blake3_256, Digest, Error, Hasher, Identity256, IdentityDigest, Keccak224, Keccak256, Keccak384, Keccak512, KeccakDigest, MultihashDigest, MultihashGeneric, Sha1, Sha1Digest, Sha2Digest, Sha2_256, Sha2_512, Sha3Digest, Sha3_224, Sha3_256, Sha3_384, - Sha3_512, Size, StatefulHasher, Strobe256, Strobe512, StrobeDigest, U16, U20, U28, U32, U48, - U64, + Sha3_512, StatefulHasher, Strobe256, Strobe512, StrobeDigest, }; #[derive(Clone, Copy, Debug, Eq, Multihash, PartialEq)] -#[mh(alloc_size = U64)] +#[mh(alloc_size = 64)] pub enum Code { - #[mh(code = 0x00, hasher = Identity256, digest = IdentityDigest)] + #[mh(code = 0x00, hasher = Identity256, digest = IdentityDigest<32>)] Identity, - #[mh(code = 0x11, hasher = Sha1, digest = Sha1Digest)] + #[mh(code = 0x11, hasher = Sha1, digest = Sha1Digest<20>)] Sha1, - #[mh(code = 0x12, hasher = Sha2_256, digest = Sha2Digest)] + #[mh(code = 0x12, hasher = Sha2_256, digest = Sha2Digest<32>)] Sha2_256, - #[mh(code = 0x13, hasher = Sha2_512, digest = Sha2Digest)] + #[mh(code = 0x13, hasher = Sha2_512, digest = Sha2Digest<64>)] Sha2_512, - #[mh(code = 0x17, hasher = Sha3_224, digest = Sha3Digest)] + #[mh(code = 0x17, hasher = Sha3_224, digest = Sha3Digest<28>)] Sha3_224, - #[mh(code = 0x16, hasher = Sha3_256, digest = Sha3Digest)] + #[mh(code = 0x16, hasher = Sha3_256, digest = Sha3Digest<32>)] Sha3_256, - #[mh(code = 0x15, hasher = Sha3_384, digest = Sha3Digest)] + #[mh(code = 0x15, hasher = Sha3_384, digest = Sha3Digest<48>)] Sha3_384, - #[mh(code = 0x14, hasher = Sha3_512, digest = Sha3Digest)] + #[mh(code = 0x14, hasher = Sha3_512, digest = Sha3Digest<64>)] Sha3_512, - #[mh(code = 0x1a, hasher = Keccak224, digest = KeccakDigest)] + #[mh(code = 0x1a, hasher = Keccak224, digest = KeccakDigest<28>)] Keccak224, - #[mh(code = 0x1b, hasher = Keccak256, digest = KeccakDigest)] + #[mh(code = 0x1b, hasher = Keccak256, digest = KeccakDigest<32>)] Keccak256, - #[mh(code = 0x1c, hasher = Keccak384, digest = KeccakDigest)] + #[mh(code = 0x1c, hasher = Keccak384, digest = KeccakDigest<48>)] Keccak384, - #[mh(code = 0x1d, hasher = Keccak512, digest = KeccakDigest)] + #[mh(code = 0x1d, hasher = Keccak512, digest = KeccakDigest<64>)] Keccak512, - #[mh(code = 0xb220, hasher = Blake2b256, digest = Blake2bDigest)] + #[mh(code = 0xb220, hasher = Blake2b256, digest = Blake2bDigest<32>)] Blake2b256, - #[mh(code = 0xb240, hasher = Blake2b512, digest = Blake2bDigest)] + #[mh(code = 0xb240, hasher = Blake2b512, digest = Blake2bDigest<64>)] Blake2b512, - #[mh(code = 0xb250, hasher = Blake2s128, digest = Blake2sDigest)] + #[mh(code = 0xb250, hasher = Blake2s128, digest = Blake2sDigest<16>)] Blake2s128, - #[mh(code = 0xb260, hasher = Blake2s256, digest = Blake2sDigest)] + #[mh(code = 0xb260, hasher = Blake2s256, digest = Blake2sDigest<32>)] Blake2s256, - #[mh(code = 0x1e, hasher = Blake3_256, digest = Blake3Digest)] + #[mh(code = 0x1e, hasher = Blake3_256, digest = Blake3Digest<32>)] Blake3_256, - #[mh(code = 0x3312e7, hasher = Strobe256, digest = StrobeDigest)] + #[mh(code = 0x3312e7, hasher = Strobe256, digest = StrobeDigest<16>)] Strobe256, - #[mh(code = 0x3312e8, hasher = Strobe512, digest = StrobeDigest)] + #[mh(code = 0x3312e8, hasher = Strobe512, digest = StrobeDigest<32>)] Strobe512, } @@ -204,9 +203,9 @@ fn assert_roundtrip() { } /// Testing the public interface of `Multihash` and coversions to it -fn multihash_methods(code: Code, prefix: &str, digest_str: &str) +fn multihash_methods(code: Code, prefix: &str, digest_str: &str) where - H: StatefulHasher, + H: StatefulHasher, Code: for<'a> From<&'a H::Digest>, { let digest = hex::decode(digest_str).unwrap(); @@ -236,77 +235,81 @@ where #[test] fn test_multihash_methods() { - multihash_methods::(Code::Identity, "000b", "68656c6c6f20776f726c64"); - multihash_methods::( + multihash_methods::( + Code::Identity, + "000b", + "68656c6c6f20776f726c64", + ); + multihash_methods::( Code::Sha1, "1114", "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", ); - multihash_methods::( + multihash_methods::( Code::Sha2_256, "1220", "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9", ); - multihash_methods::( + multihash_methods::( Code::Sha2_512, "1340", "309ecc489c12d6eb4cc40f50c902f2b4d0ed77ee511a7c7a9bcd3ca86d4cd86f989dd35bc5ff499670da34255b45b0cfd830e81f605dcf7dc5542e93ae9cd76f"); - multihash_methods::( + multihash_methods::( Code::Sha3_224, "171C", "dfb7f18c77e928bb56faeb2da27291bd790bc1045cde45f3210bb6c5", ); - multihash_methods::( + multihash_methods::( Code::Sha3_256, "1620", "644bcc7e564373040999aac89e7622f3ca71fba1d972fd94a31c3bfbf24e3938", ); - multihash_methods::( + multihash_methods::( Code::Sha3_384, "1530", "83bff28dde1b1bf5810071c6643c08e5b05bdb836effd70b403ea8ea0a634dc4997eb1053aa3593f590f9c63630dd90b"); - multihash_methods::( + multihash_methods::( Code::Sha3_512, "1440", "840006653e9ac9e95117a15c915caab81662918e925de9e004f774ff82d7079a40d4d27b1b372657c61d46d470304c88c788b3a4527ad074d1dccbee5dbaa99a"); - multihash_methods::( + multihash_methods::( Code::Keccak224, "1A1C", "25f3ecfebabe99686282f57f5c9e1f18244cfee2813d33f955aae568", ); - multihash_methods::( + multihash_methods::( Code::Keccak256, "1B20", "47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad", ); - multihash_methods::( + multihash_methods::( Code::Keccak384, "1C30", "65fc99339a2a40e99d3c40d695b22f278853ca0f925cde4254bcae5e22ece47e6441f91b6568425adc9d95b0072eb49f"); - multihash_methods::( + multihash_methods::( Code::Keccak512, "1D40", "3ee2b40047b8060f68c67242175660f4174d0af5c01d47168ec20ed619b0b7c42181f40aa1046f39e2ef9efc6910782a998e0013d172458957957fac9405b67d"); - multihash_methods::( + multihash_methods::( Code::Blake2b512, "c0e40240", "021ced8799296ceca557832ab941a50b4a11f83478cf141f51f933f653ab9fbcc05a037cddbed06e309bf334942c4e58cdf1a46e237911ccd7fcf9787cbc7fd0"); - multihash_methods::( + multihash_methods::( Code::Blake2s256, "e0e40220", "9aec6806794561107e594b1f6a8a6b0c92a0cba9acf5e5e93cca06f781813b0b", ); - multihash_methods::( + multihash_methods::( Code::Blake2b256, "a0e40220", "256c83b297114d201b30179f3f0ef0cace9783622da5974326b436178aeef610", ); - multihash_methods::( + multihash_methods::( Code::Blake2s128, "d0e40210", "37deae0226c30da2ab424a7b8ee14e83", ); - multihash_methods::( + multihash_methods::( Code::Blake3_256, "1e20", "d74981efa70a0c880b8d8c1985d075dbcbf679b99a5f9914e5aaf96b831a9e24", From f040069f860076cb47c4da815c5e15dee940fcdd Mon Sep 17 00:00:00 2001 From: mriise Date: Mon, 26 Apr 2021 03:07:31 -0700 Subject: [PATCH 08/18] update proc macro messages and tests --- derive/src/lib.rs | 12 ++++---- derive/src/multihash.rs | 61 ++++++++++++++++++++--------------------- 2 files changed, 36 insertions(+), 37 deletions(-) diff --git a/derive/src/lib.rs b/derive/src/lib.rs index f616551a..84189842 100644 --- a/derive/src/lib.rs +++ b/derive/src/lib.rs @@ -8,8 +8,8 @@ //! //! If you set `#mh(alloc_size = …)` to a too low value, you will get compiler errors. Please note //! the the sizes are checked only on a syntactic level and *not* on the type level. This means -//! that digest need to have a size generic, which is a valid `typenum`, for example `U32` or -//! `generic_array::typenum::U64`. +//! that digest need to have a size const generic, which is a valid `usize`, for example `32` or +//! `64`. //! //! You can disable those compiler errors with setting the `no_alloc_size_errors` attribute. This //! can be useful if you e.g. have specified type aliases for your hash digests and you are sure @@ -19,14 +19,14 @@ //! //! ``` //! use multihash::derive::Multihash; -//! use multihash::{MultihashDigest, U32, U64}; +//! use multihash::MultihashDigest; //! //! #[derive(Clone, Copy, Debug, Eq, Multihash, PartialEq)] -//! #[mh(alloc_size = U64)] +//! #[mh(alloc_size = 64)] //! pub enum Code { -//! #[mh(code = 0x01, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] +//! #[mh(code = 0x01, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] //! Foo, -//! #[mh(code = 0x02, hasher = multihash::Sha2_512, digest = multihash::Sha2Digest)] +//! #[mh(code = 0x02, hasher = multihash::Sha2_512, digest = multihash::Sha2Digest<64>)] //! Bar, //! } //! diff --git a/derive/src/multihash.rs b/derive/src/multihash.rs index 4f5a1a9c..986bd620 100644 --- a/derive/src/multihash.rs +++ b/derive/src/multihash.rs @@ -143,7 +143,7 @@ impl<'a> From<&'a VariantInfo<'a>> for Hash { proc_macro_error::abort!(ident, msg); }); let digest = digest.unwrap_or_else(|| { - let msg = "Missing digest atttibute: e.g. #[mh(digest = multihash::Sha2Digest)]"; + let msg = "Missing digest atttibute: e.g. #[mh(digest = multihash::Sha2Digest<32>)]"; #[cfg(test)] panic!(msg); #[cfg(not(test))] @@ -181,7 +181,7 @@ fn parse_code_enum_attrs(ast: &syn::DeriveInput) -> (syn::LitInt, bool) { match alloc_size { Some(alloc_size) => (alloc_size, no_alloc_size_errors), None => { - let msg = "enum is missing `alloc_size` attribute: e.g. #[mh(alloc_size = U64)]"; + let msg = "enum is missing `alloc_size` attribute: e.g. #[mh(alloc_size = 64)]"; #[cfg(test)] panic!(msg); #[cfg(not(test))] @@ -283,7 +283,7 @@ fn error_alloc_size(hashes: &[Hash], expected_alloc_size_type: &syn::LitInt) { }); if let Err(_error) = maybe_error { - let msg = "Invalid byte size. It must be a unsigned integer typenum, e.g. `U32`"; + let msg = "Invalid byte size. It must be a unsigned integer typenum, e.g. `32`"; #[cfg(test)] panic!(msg); #[cfg(not(test))] @@ -369,21 +369,20 @@ mod tests { fn test_multihash_derive() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U32)] + #[mh(alloc_size = 32)] pub enum Code { - #[mh(code = multihash::IDENTITY, hasher = multihash::Identity256, digest = multihash::IdentityDigest)] + #[mh(code = multihash::IDENTITY, hasher = multihash::Identity256, digest = multihash::IdentityDigest<32>)] Identity256, /// Multihash array for hash function. - #[mh(code = 0x38b64f, hasher = multihash::Strobe256, digest = multihash::StrobeDigest)] + #[mh(code = 0x38b64f, hasher = multihash::Strobe256, digest = multihash::StrobeDigest<32>)] Strobe256, } }; let expected = quote! { /// A Multihash with the same allocated size as the Multihashes produces by this derive. - pub type Multihash = multihash::MultihashGeneric::; + pub type Multihash = multihash::MultihashGeneric::<32>; - impl multihash::MultihashDigest for Code { - type AllocSize = U32; + impl multihash::MultihashDigest<32> for Code { fn digest(&self, input: &[u8]) -> Multihash { use multihash::Hasher; @@ -433,13 +432,13 @@ mod tests { } } - impl From<&multihash::IdentityDigest > for Code { - fn from(digest: &multihash::IdentityDigest) -> Self { + impl From<&multihash::IdentityDigest<32> > for Code { + fn from(digest: &multihash::IdentityDigest<32>) -> Self { Self::Identity256 } } - impl From<&multihash::StrobeDigest > for Code { - fn from(digest: &multihash::StrobeDigest) -> Self { + impl From<&multihash::StrobeDigest<32> > for Code { + fn from(digest: &multihash::StrobeDigest<32>) -> Self { Self::Strobe256 } } @@ -457,11 +456,11 @@ mod tests { fn test_multihash_error_code_duplicates() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U64)] + #[mh(alloc_size = 64)] pub enum Multihash { - #[mh(code = multihash::SHA2_256, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] + #[mh(code = multihash::SHA2_256, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] Identity256, - #[mh(code = multihash::SHA2_256, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] + #[mh(code = multihash::SHA2_256, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] Identity256, } }; @@ -475,11 +474,11 @@ mod tests { fn test_multihash_error_code_duplicates_numbers() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U32)] + #[mh(alloc_size = 32)] pub enum Code { - #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] + #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] Identity256, - #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] + #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] Identity256, } }; @@ -490,13 +489,13 @@ mod tests { #[test] #[should_panic( - expected = "enum is missing `alloc_size` attribute: e.g. #[mh(alloc_size = U64)]" + expected = "enum is missing `alloc_size` attribute: e.g. #[mh(alloc_size = 64)]" )] fn test_multihash_error_no_alloc_size() { let input = quote! { #[derive(Clone, Multihash)] pub enum Code { - #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] + #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] Sha2_256, } }; @@ -507,14 +506,14 @@ mod tests { #[test] #[should_panic( - expected = "The `#mh(alloc_size) attribute must be bigger than the maximum defined digest size (U32)" + expected = "The `#mh(alloc_size) attribute must be bigger than the maximum defined digest size (32)" )] fn test_multihash_error_too_small_alloc_size() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U16)] + #[mh(alloc_size = 16)] pub enum Code { - #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] + #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] Sha2_256, } }; @@ -525,12 +524,12 @@ mod tests { #[test] #[should_panic( - expected = "Invalid byte size. It must be a unsigned integer typenum, e.g. `U32`" + expected = "Invalid byte size. It must be a unsigned integer typenum, e.g. `32`" )] fn test_multihash_error_digest_invalid_size_type() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U32)] + #[mh(alloc_size = 32)] pub enum Code { #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] Sha2_256, @@ -543,12 +542,12 @@ mod tests { #[test] #[should_panic( - expected = "Invalid byte size. It must be a unsigned integer typenum, e.g. `U32`" + expected = "Invalid byte size. It must be a unsigned integer typenum, e.g. `32`" )] fn test_multihash_error_digest_invalid_size_type2() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U32)] + #[mh(alloc_size = 32)] pub enum Code { #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<_>)] Sha2_256, @@ -561,12 +560,12 @@ mod tests { #[test] #[should_panic( - expected = "Invalid byte size. It must be a unsigned integer typenum, e.g. `U32`" + expected = "Invalid byte size. It must be a unsigned integer typenum, e.g. `32`" )] fn test_multihash_error_digest_without_typenum() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U32)] + #[mh(alloc_size = 32)] pub enum Code { #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = Sha2_256Digest)] Sha2_256, @@ -582,7 +581,7 @@ mod tests { fn test_multihash_error_digest_without_typenum_no_alloc_size_errors() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U32, no_alloc_size_errors)] + #[mh(alloc_size = 32, no_alloc_size_errors)] pub enum Code { #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = Sha2_256Digest)] Sha2_256, From 0354a47f672e737ee73c3747022f0207adaddff0 Mon Sep 17 00:00:00 2001 From: mriise Date: Mon, 26 Apr 2021 21:42:28 -0700 Subject: [PATCH 09/18] fix syntax and wordings --- README.md | 12 +++++++----- derive/src/multihash.rs | 12 ++++++------ src/hasher.rs | 10 +++++----- src/hasher_impl.rs | 12 ++++++------ src/multihash.rs | 8 ++++---- 5 files changed, 28 insertions(+), 26 deletions(-) diff --git a/README.md b/README.md index c784720d..b919132f 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,8 @@ multihash = "*" Then run `cargo build`. +MSRV 1.51.0 due to use of const generics + ## Usage ```rust @@ -51,14 +53,14 @@ You can derive your own application specific code table: ```rust use multihash::derive::Multihash; -use multihash::{MultihashCode, U32, U64}; +use multihash::MultihashCode; #[derive(Clone, Copy, Debug, Eq, Multihash, PartialEq)] -#[mh(alloc_size = U64)] +#[mh(alloc_size = 64)] pub enum Code { - #[mh(code = 0x01, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] + #[mh(code = 0x01, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] Foo, - #[mh(code = 0x02, hasher = multihash::Sha2_512, digest = multihash::Sha2Digest)] + #[mh(code = 0x02, hasher = multihash::Sha2_512, digest = multihash::Sha2Digest<64>)] Bar, } @@ -75,7 +77,7 @@ fn main() { * `SHA2-512` * `SHA3`/`Keccak` * `Blake2b-256`/`Blake2b-512`/`Blake2s-128`/`Blake2s-256` -* `Blake3` +* `Blake3`(256 only) * `Strobe` ## Maintainers diff --git a/derive/src/multihash.rs b/derive/src/multihash.rs index 986bd620..a5653518 100644 --- a/derive/src/multihash.rs +++ b/derive/src/multihash.rs @@ -283,7 +283,7 @@ fn error_alloc_size(hashes: &[Hash], expected_alloc_size_type: &syn::LitInt) { }); if let Err(_error) = maybe_error { - let msg = "Invalid byte size. It must be a unsigned integer typenum, e.g. `32`"; + let msg = "Invalid byte size. It must be a unsigned integer, e.g. `32`"; #[cfg(test)] panic!(msg); #[cfg(not(test))] @@ -524,7 +524,7 @@ mod tests { #[test] #[should_panic( - expected = "Invalid byte size. It must be a unsigned integer typenum, e.g. `32`" + expected = "Invalid byte size. It must be a unsigned integer, e.g. `32`" )] fn test_multihash_error_digest_invalid_size_type() { let input = quote! { @@ -542,7 +542,7 @@ mod tests { #[test] #[should_panic( - expected = "Invalid byte size. It must be a unsigned integer typenum, e.g. `32`" + expected = "Invalid byte size. It must be a unsigned integer, e.g. `32`" )] fn test_multihash_error_digest_invalid_size_type2() { let input = quote! { @@ -560,9 +560,9 @@ mod tests { #[test] #[should_panic( - expected = "Invalid byte size. It must be a unsigned integer typenum, e.g. `32`" + expected = "Invalid byte size. It must be a unsigned integer, e.g. `32`" )] - fn test_multihash_error_digest_without_typenum() { + fn test_multihash_error_digest_without_size() { let input = quote! { #[derive(Clone, Multihash)] #[mh(alloc_size = 32)] @@ -578,7 +578,7 @@ mod tests { // This one does not panic, die to `no_alloc_size_errors` #[test] - fn test_multihash_error_digest_without_typenum_no_alloc_size_errors() { + fn test_multihash_error_digest_without_size_no_alloc_size_errors() { let input = quote! { #[derive(Clone, Multihash)] #[mh(alloc_size = 32, no_alloc_size_errors)] diff --git a/src/hasher.rs b/src/hasher.rs index d3c9884a..ad0b7018 100644 --- a/src/hasher.rs +++ b/src/hasher.rs @@ -51,9 +51,9 @@ pub trait Digest: } /// Trait implemented by a hash function implementation. -pub trait StatefulHasher: Default + Send + Sync { +pub trait StatefulHasher: Default + Send + Sync { /// The Digest type to distinguish the output of different `Hasher` implementations. - type Digest: Digest; + type Digest: Digest; /// Consume input and update internal state. fn update(&mut self, input: &[u8]); @@ -88,12 +88,12 @@ pub trait StatefulHasher: Default + Send + Sync { /// [Multihashes]: https://github.com/multiformats/multihash /// [associated type]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#specifying-placeholder-types-in-trait-definitions-with-associated-types /// [`MultihashDigest`]: crate::MultihashDigest -pub trait Hasher: Default + Send + Sync { +pub trait Hasher: Default + Send + Sync { /// The Digest type to distinguish the output of different `Hasher` implementations. - type Digest: Digest; + type Digest: Digest; ///the allocated size of the digest. - const SIZE: usize = SIZE; + const SIZE: usize = S; /// Hashes the given `input` data and returns its hash digest. fn digest(input: &[u8]) -> Self::Digest diff --git a/src/hasher_impl.rs b/src/hasher_impl.rs index aa65a7d1..fe9e2002 100644 --- a/src/hasher_impl.rs +++ b/src/hasher_impl.rs @@ -305,16 +305,16 @@ pub mod identity { } } - impl Digest for IdentityDigest { - const SIZE: usize = SIZE; + impl Digest for IdentityDigest { + const SIZE: usize = S; // A custom implementation is needed as an identity hash value might be shorter than the // allocated Digest. fn wrap(digest: &[u8]) -> Result { - if digest.len() > SIZE { + if digest.len() > S { return Err(Error::InvalidSize(digest.len() as _)); } - let mut array = [0; SIZE]; + let mut array = [0; S]; let len = digest.len().min(array.len()); array[..len].copy_from_slice(&digest[..len]); Ok(Self(len, array)) @@ -330,10 +330,10 @@ pub mod identity { use unsigned_varint::io::read_u64; let size = read_u64(&mut r)?; - if size > SIZE as u64 || size > u8::max_value() as u64 { + if size > S as u64 || size > u8::max_value() as u64 { return Err(Error::InvalidSize(size)); } - let mut digest = [0; SIZE]; + let mut digest = [0; S]; r.read_exact(&mut digest[..size as usize])?; Ok(Self(size as usize, digest)) } diff --git a/src/multihash.rs b/src/multihash.rs index 9a5556cb..a65ebcca 100644 --- a/src/multihash.rs +++ b/src/multihash.rs @@ -71,24 +71,24 @@ pub trait MultihashDigest: #[cfg_attr(feature = "serde-codec", derive(serde::Deserialize))] #[cfg_attr(feature = "serde-codec", derive(serde::Serialize))] #[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)] -pub struct Multihash { +pub struct Multihash { /// The code of the Multihash. code: u64, /// The actual size of the digest in bytes (not the allocated size). size: u8, /// The digest. #[cfg_attr(feature = "serde-codec", serde(with = "BigArray"))] - digest: [u8; S], + digest: [u8; SIZE], } impl Copy for Multihash {} -impl Default for Multihash { +impl Default for Multihash { fn default() -> Self { Self { code: 0, size: 0, - digest: [0; SIZE], + digest: [0; S], } } } From 616bc452da6a043307dad9cb4e719a58ae983d5c Mon Sep 17 00:00:00 2001 From: mriise Date: Mon, 26 Apr 2021 22:49:51 -0700 Subject: [PATCH 10/18] cargo fmt" --- derive/src/multihash.rs | 12 +++--------- src/lib.rs | 2 +- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/derive/src/multihash.rs b/derive/src/multihash.rs index a5653518..2875730f 100644 --- a/derive/src/multihash.rs +++ b/derive/src/multihash.rs @@ -523,9 +523,7 @@ mod tests { } #[test] - #[should_panic( - expected = "Invalid byte size. It must be a unsigned integer, e.g. `32`" - )] + #[should_panic(expected = "Invalid byte size. It must be a unsigned integer, e.g. `32`")] fn test_multihash_error_digest_invalid_size_type() { let input = quote! { #[derive(Clone, Multihash)] @@ -541,9 +539,7 @@ mod tests { } #[test] - #[should_panic( - expected = "Invalid byte size. It must be a unsigned integer, e.g. `32`" - )] + #[should_panic(expected = "Invalid byte size. It must be a unsigned integer, e.g. `32`")] fn test_multihash_error_digest_invalid_size_type2() { let input = quote! { #[derive(Clone, Multihash)] @@ -559,9 +555,7 @@ mod tests { } #[test] - #[should_panic( - expected = "Invalid byte size. It must be a unsigned integer, e.g. `32`" - )] + #[should_panic(expected = "Invalid byte size. It must be a unsigned integer, e.g. `32`")] fn test_multihash_error_digest_without_size() { let input = quote! { #[derive(Clone, Multihash)] diff --git a/src/lib.rs b/src/lib.rs index abb39cbb..87cb1395 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -49,7 +49,7 @@ //! [Serde]: https://serde.rs //! [SCALE Codec]: https://github.com/paritytech/parity-scale-codec -#![deny(missing_docs)] +#![deny(missing_docs, warnings, unsafe_code)] #![cfg_attr(not(feature = "std"), no_std)] #[cfg(any(test, feature = "arb"))] From 78bc734decd0b12bba2bf502dae67377115bdb02 Mon Sep 17 00:00:00 2001 From: mriise Date: Wed, 28 Apr 2021 00:49:20 -0700 Subject: [PATCH 11/18] make clippy happy, deny unsafe --- derive/src/multihash.rs | 4 ++-- src/lib.rs | 2 +- src/multihash.rs | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/derive/src/multihash.rs b/derive/src/multihash.rs index 0d5ea8e2..6e687ed1 100644 --- a/derive/src/multihash.rs +++ b/derive/src/multihash.rs @@ -152,8 +152,8 @@ impl<'a> From<&'a VariantInfo<'a>> for Hash { Self { ident, code, - digest, hasher, + digest, } } } @@ -257,7 +257,7 @@ fn error_alloc_size(hashes: &[Hash], expected_alloc_size_type: &syn::LitInt) { let msg = format!("The `#mh(alloc_size) attribute must be bigger than the maximum defined digest size ({})", max_digest_size); #[cfg(test)] - panic!(msg); + panic!("{}", msg); #[cfg(not(test))] { let digest = &hash.digest.to_token_stream().to_string().replace(" ", ""); diff --git a/src/lib.rs b/src/lib.rs index 87cb1395..619b98b1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -49,7 +49,7 @@ //! [Serde]: https://serde.rs //! [SCALE Codec]: https://github.com/paritytech/parity-scale-codec -#![deny(missing_docs, warnings, unsafe_code)] +#![deny(missing_docs, unsafe_code)] #![cfg_attr(not(feature = "std"), no_std)] #[cfg(any(test, feature = "arb"))] diff --git a/src/multihash.rs b/src/multihash.rs index a65ebcca..e8808ca3 100644 --- a/src/multihash.rs +++ b/src/multihash.rs @@ -162,7 +162,7 @@ impl Multihash { /// Returns the bytes of a multihash. #[cfg(feature = "std")] - pub fn to_bytes(&self) -> Vec { + pub fn to_bytes(self) -> Vec { let mut bytes = Vec::with_capacity(self.size().into()); self.write(&mut bytes) .expect("writing to a vec should never fail"); From 415d110953af57af81bbaef1058c9049469eb935 Mon Sep 17 00:00:00 2001 From: mriise Date: Tue, 8 Jun 2021 00:51:28 -0700 Subject: [PATCH 12/18] rename SIZE to S --- src/hasher.rs | 2 +- src/multihash.rs | 10 +++++----- tests/lib.rs | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/hasher.rs b/src/hasher.rs index ad0b7018..628f0d05 100644 --- a/src/hasher.rs +++ b/src/hasher.rs @@ -101,7 +101,7 @@ pub trait Hasher: Default + Send + Sync { Self: Sized; } -impl, const SIZE: usize> Hasher for T { +impl, const S: usize> Hasher for T { type Digest = T::Digest; fn digest(input: &[u8]) -> Self::Digest { diff --git a/src/multihash.rs b/src/multihash.rs index e8808ca3..b7cdf65d 100644 --- a/src/multihash.rs +++ b/src/multihash.rs @@ -12,7 +12,7 @@ use serde_big_array::BigArray; /// It is usually implemented by a custom code table enum that derives the [`Multihash` derive]. /// /// [`Multihash` derive]: crate::derive -pub trait MultihashDigest: +pub trait MultihashDigest: TryFrom + Into + Send + Sync + Unpin + Copy + Eq + Debug + 'static { /// Calculate the hash of some input data. @@ -26,7 +26,7 @@ pub trait MultihashDigest: /// let hash = Code::Sha3_256.digest(b"Hello world!"); /// println!("{:02x?}", hash); /// ``` - fn digest(&self, input: &[u8]) -> Multihash; + fn digest(&self, input: &[u8]) -> Multihash; /// Create a multihash from an existing [`Digest`]. /// @@ -41,7 +41,7 @@ pub trait MultihashDigest: /// println!("{:02x?}", hash); /// ``` #[allow(clippy::needless_lifetimes)] - fn multihash_from_digest<'a, D, const DIGEST_SIZE: usize>(digest: &'a D) -> Multihash + fn multihash_from_digest<'a, D, const DIGEST_SIZE: usize>(digest: &'a D) -> Multihash where D: Digest, Self: From<&'a D>; @@ -71,14 +71,14 @@ pub trait MultihashDigest: #[cfg_attr(feature = "serde-codec", derive(serde::Deserialize))] #[cfg_attr(feature = "serde-codec", derive(serde::Serialize))] #[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)] -pub struct Multihash { +pub struct Multihash { /// The code of the Multihash. code: u64, /// The actual size of the digest in bytes (not the allocated size). size: u8, /// The digest. #[cfg_attr(feature = "serde-codec", serde(with = "BigArray"))] - digest: [u8; SIZE], + digest: [u8; S], } impl Copy for Multihash {} diff --git a/tests/lib.rs b/tests/lib.rs index 8265b410..0b63209c 100644 --- a/tests/lib.rs +++ b/tests/lib.rs @@ -203,9 +203,9 @@ fn assert_roundtrip() { } /// Testing the public interface of `Multihash` and coversions to it -fn multihash_methods(code: Code, prefix: &str, digest_str: &str) +fn multihash_methods(code: Code, prefix: &str, digest_str: &str) where - H: StatefulHasher, + H: StatefulHasher, Code: for<'a> From<&'a H::Digest>, { let digest = hex::decode(digest_str).unwrap(); From a9d5c0822da6b69b04689763ae2373176add4908 Mon Sep 17 00:00:00 2001 From: mriise Date: Tue, 8 Jun 2021 01:12:51 -0700 Subject: [PATCH 13/18] update parity codec --- Cargo.toml | 2 +- src/multihash.rs | 44 +++++++++----------------------------------- 2 files changed, 10 insertions(+), 36 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 22acc48b..d49dd492 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,7 +32,7 @@ sha3 = ["digest", "sha-3"] strobe = ["strobe-rs"] [dependencies] -parity-scale-codec = { version = "1.3.5", optional = true, default-features = false, features = ["derive"] } +parity-scale-codec = { version = "2.1.1", default-features = false, features = ["derive"], optional = true } quickcheck = { version = "0.9.2", optional = true } rand = { version = "0.7.3", optional = true } serde = { version = "1.0.116", optional = true, default-features = false, features = ["derive"] } diff --git a/src/multihash.rs b/src/multihash.rs index b7cdf65d..2310440c 100644 --- a/src/multihash.rs +++ b/src/multihash.rs @@ -187,58 +187,32 @@ impl From> for Vec { } #[cfg(feature = "scale-codec")] -impl parity_scale_codec::Encode for Multihash<32> { - fn encode_to(&self, dest: &mut EncOut) { - let mut digest = [0; 32]; +impl parity_scale_codec::Encode for Multihash { + fn encode_to(&self, dest: &mut EncOut) { + let mut digest = [0; S]; digest.copy_from_slice(&self.digest); - dest.push(&self.code); - dest.push(&self.size); - dest.push(&digest); + self.code.encode_to(dest); + self.size.encode_to(dest); + digest.encode_to(dest); } } #[cfg(feature = "scale-codec")] -impl parity_scale_codec::EncodeLike for Multihash<32> {} +impl parity_scale_codec::EncodeLike for Multihash {} #[cfg(feature = "scale-codec")] -impl parity_scale_codec::Decode for Multihash<32> { +impl parity_scale_codec::Decode for Multihash { fn decode( input: &mut DecIn, ) -> Result { Ok(Multihash { code: parity_scale_codec::Decode::decode(input)?, size: parity_scale_codec::Decode::decode(input)?, - digest: <[u8; 32]>::decode(input)?, + digest: <[u8; S]>::decode(input)?, }) } } -#[cfg(feature = "scale-codec")] -impl parity_scale_codec::Encode for Multihash<64> { - fn encode_to(&self, dest: &mut EncOut) { - let mut digest = [0; 64]; - digest.copy_from_slice(&self.digest); - dest.push(&self.code); - dest.push(&self.size); - dest.push(&digest); - } -} - -#[cfg(feature = "scale-codec")] -impl parity_scale_codec::EncodeLike for Multihash<64> {} - -#[cfg(feature = "scale-codec")] -impl parity_scale_codec::Decode for Multihash<64> { - fn decode( - input: &mut DecIn, - ) -> Result { - Ok(Multihash { - code: parity_scale_codec::Decode::decode(input)?, - size: parity_scale_codec::Decode::decode(input)?, - digest: <[u8; 64]>::decode(input)?, - }) - } -} /// Writes the multihash to a byte stream. #[cfg(feature = "std")] From 5cea7ed168265c3fb2ae9a7babce54567581b7f4 Mon Sep 17 00:00:00 2001 From: mriise Date: Thu, 10 Jun 2021 21:06:21 -0700 Subject: [PATCH 14/18] rewrite sha finalize() --- src/hasher_impl.rs | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/hasher_impl.rs b/src/hasher_impl.rs index fe9e2002..05676059 100644 --- a/src/hasher_impl.rs +++ b/src/hasher_impl.rs @@ -205,12 +205,10 @@ macro_rules! derive_hasher_sha { fn finalize(&self) -> Self::Digest { use digest::Digest; - // TODO: this extra array seems excessive to convert from a generic array - let a = self.state.clone().finalize(); - let b = a.as_slice(); + let digest = self.state.clone().finalize(); let mut array = [0; $size]; - array.copy_from_slice(b); - Self::Digest::from(array) + array.copy_from_slice(digest.as_slice()); + array.into() } fn reset(&mut self) { From 65f31276e86a5e8d3cf1e789940ce8cb8c533641 Mon Sep 17 00:00:00 2001 From: mriise Date: Thu, 10 Jun 2021 21:09:22 -0700 Subject: [PATCH 15/18] raise limit of varint to u16, use associated const --- src/hasher.rs | 2 +- src/hasher_impl.rs | 2 +- src/multihash.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/hasher.rs b/src/hasher.rs index 628f0d05..5d96ac79 100644 --- a/src/hasher.rs +++ b/src/hasher.rs @@ -41,7 +41,7 @@ pub trait Digest: use unsigned_varint::io::read_u64; let size = read_u64(&mut r)?; - if size > S as u64 || size > u8::max_value() as u64 { + if size > S as u64 || size > u16::MAX as u64 { return Err(Error::InvalidSize(size)); } let mut digest = [0; S]; diff --git a/src/hasher_impl.rs b/src/hasher_impl.rs index 05676059..f259e0f6 100644 --- a/src/hasher_impl.rs +++ b/src/hasher_impl.rs @@ -328,7 +328,7 @@ pub mod identity { use unsigned_varint::io::read_u64; let size = read_u64(&mut r)?; - if size > S as u64 || size > u8::max_value() as u64 { + if size > S as u64 || size > u16::MAX as u64 { return Err(Error::InvalidSize(size)); } let mut digest = [0; S]; diff --git a/src/multihash.rs b/src/multihash.rs index 93a311a1..9be60450 100644 --- a/src/multihash.rs +++ b/src/multihash.rs @@ -249,7 +249,7 @@ where let code = read_u64(&mut r)?; let size = read_u64(&mut r)?; - if size > S as u64 || size > u8::MAX as u64 { + if size > S as u64 || size > u16::MAX as u64 { return Err(Error::InvalidSize(size)); } From 7adfd4be281504b6504cccc7d37847388646ede5 Mon Sep 17 00:00:00 2001 From: mriise Date: Thu, 10 Jun 2021 21:29:04 -0700 Subject: [PATCH 16/18] derive copy for Multihash & Digest --- src/hasher_impl.rs | 2 +- src/multihash.rs | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/hasher_impl.rs b/src/hasher_impl.rs index f259e0f6..7f8b2aa5 100644 --- a/src/hasher_impl.rs +++ b/src/hasher_impl.rs @@ -5,7 +5,7 @@ use core::convert::TryFrom; macro_rules! derive_digest { ($name:ident) => { /// Multihash digest. - #[derive(Clone, Debug, Eq, Hash, PartialEq)] + #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub struct $name([u8; S]); impl Default for $name { diff --git a/src/multihash.rs b/src/multihash.rs index 9be60450..ecec939c 100644 --- a/src/multihash.rs +++ b/src/multihash.rs @@ -70,7 +70,7 @@ pub trait MultihashDigest: /// ``` #[cfg_attr(feature = "serde-codec", derive(serde::Deserialize))] #[cfg_attr(feature = "serde-codec", derive(serde::Serialize))] -#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)] +#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)] pub struct Multihash { /// The code of the Multihash. code: u64, @@ -81,8 +81,6 @@ pub struct Multihash { digest: [u8; S], } -impl Copy for Multihash {} - impl Default for Multihash { fn default() -> Self { Self { From 9162fe3da6786f5fe2af4a84270d601b88bb0261 Mon Sep 17 00:00:00 2001 From: mriise Date: Mon, 14 Jun 2021 03:20:04 -0700 Subject: [PATCH 17/18] revert max size --- src/hasher.rs | 2 +- src/hasher_impl.rs | 2 +- src/multihash.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/hasher.rs b/src/hasher.rs index 5d96ac79..63c73827 100644 --- a/src/hasher.rs +++ b/src/hasher.rs @@ -41,7 +41,7 @@ pub trait Digest: use unsigned_varint::io::read_u64; let size = read_u64(&mut r)?; - if size > S as u64 || size > u16::MAX as u64 { + if size > S as u64 || size > u8::MAX as u64 { return Err(Error::InvalidSize(size)); } let mut digest = [0; S]; diff --git a/src/hasher_impl.rs b/src/hasher_impl.rs index 7f8b2aa5..8fb44400 100644 --- a/src/hasher_impl.rs +++ b/src/hasher_impl.rs @@ -328,7 +328,7 @@ pub mod identity { use unsigned_varint::io::read_u64; let size = read_u64(&mut r)?; - if size > S as u64 || size > u16::MAX as u64 { + if size > S as u64 || size > u8::MAX as u64 { return Err(Error::InvalidSize(size)); } let mut digest = [0; S]; diff --git a/src/multihash.rs b/src/multihash.rs index ecec939c..e7ffffab 100644 --- a/src/multihash.rs +++ b/src/multihash.rs @@ -247,7 +247,7 @@ where let code = read_u64(&mut r)?; let size = read_u64(&mut r)?; - if size > S as u64 || size > u16::MAX as u64 { + if size > S as u64 || size > u8::MAX as u64 { return Err(Error::InvalidSize(size)); } From 750f5be2bf20aaef006ae4bfdddf634952531240 Mon Sep 17 00:00:00 2001 From: mriise Date: Mon, 14 Jun 2021 03:23:25 -0700 Subject: [PATCH 18/18] dont update README in this pr" --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5666b90e..b0835557 100644 --- a/README.md +++ b/README.md @@ -76,7 +76,7 @@ fn main() { * `SHA2-512` * `SHA3`/`Keccak` * `Blake2b-256`/`Blake2b-512`/`Blake2s-128`/`Blake2s-256` -* `Blake3`(256 only) +* `Blake3` * `Strobe` ## Maintainers