From b6626dac9261a70109535a9a9c78f9598433431c Mon Sep 17 00:00:00 2001 From: m6xwzzz Date: Sat, 4 Oct 2025 09:32:03 +0800 Subject: [PATCH] chore: Fix typos --- curves/bls12_381/src/curves/g1.rs | 2 +- curves/bls12_381/src/curves/g2.rs | 2 +- curves/bls12_381/src/curves/util.rs | 14 +++++++------- serialize/src/lib.rs | 4 ++-- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/curves/bls12_381/src/curves/g1.rs b/curves/bls12_381/src/curves/g1.rs index d4e378732..77c5c97a6 100644 --- a/curves/bls12_381/src/curves/g1.rs +++ b/curves/bls12_381/src/curves/g1.rs @@ -120,7 +120,7 @@ impl SWCurveConfig for Config { let encoding = EncodingFlags { is_compressed: compress == ark_serialize::Compress::Yes, is_infinity: item.is_zero(), - is_lexographically_largest: item.y > -item.y, + is_lexicographically_largest: item.y > -item.y, }; let mut p = *item; if encoding.is_infinity { diff --git a/curves/bls12_381/src/curves/g2.rs b/curves/bls12_381/src/curves/g2.rs index 134a3e289..c5e437a39 100644 --- a/curves/bls12_381/src/curves/g2.rs +++ b/curves/bls12_381/src/curves/g2.rs @@ -147,7 +147,7 @@ impl SWCurveConfig for Config { let encoding = EncodingFlags { is_compressed: compress == ark_serialize::Compress::Yes, is_infinity: item.is_zero(), - is_lexographically_largest: item.y > -item.y, + is_lexicographically_largest: item.y > -item.y, }; let mut p = *item; if encoding.is_infinity { diff --git a/curves/bls12_381/src/curves/util.rs b/curves/bls12_381/src/curves/util.rs index 5e35e5d24..9d14ba29b 100644 --- a/curves/bls12_381/src/curves/util.rs +++ b/curves/bls12_381/src/curves/util.rs @@ -10,7 +10,7 @@ pub const G2_SERIALIZED_SIZE: usize = 96; pub struct EncodingFlags { pub is_compressed: bool, pub is_infinity: bool, - pub is_lexographically_largest: bool, + pub is_lexicographically_largest: bool, } impl EncodingFlags { @@ -22,16 +22,16 @@ impl EncodingFlags { let is_compressed = compression_flag_set == 1; let is_infinity = infinity_flag_set == 1; - let is_lexographically_largest = sort_flag_set == 1; + let is_lexicographically_largest = sort_flag_set == 1; - if is_lexographically_largest && (!is_compressed || is_infinity) { + if is_lexicographically_largest && (!is_compressed || is_infinity) { return Err(SerializationError::InvalidData); } Ok(Self { is_compressed, is_infinity, - is_lexographically_largest, + is_lexicographically_largest, }) } @@ -45,7 +45,7 @@ impl EncodingFlags { bytes[0] |= 1 << 6; } - if self.is_compressed && !self.is_infinity && self.is_lexographically_largest { + if self.is_compressed && !self.is_infinity && self.is_lexicographically_largest { bytes[0] |= 1 << 5; } } @@ -130,7 +130,7 @@ pub(crate) fn read_g1_compressed( } let x = deserialize_fq(x_bytes).ok_or(SerializationError::InvalidData)?; - let p = G1Affine::get_point_from_x_unchecked(x, flags.is_lexographically_largest) + let p = G1Affine::get_point_from_x_unchecked(x, flags.is_lexicographically_largest) .ok_or(SerializationError::InvalidData)?; Ok(p) @@ -202,7 +202,7 @@ pub(crate) fn read_g2_compressed( let xc0 = deserialize_fq(xc0_bytes).ok_or(SerializationError::InvalidData)?; let x = Fq2::new(xc0, xc1); - let p = G2Affine::get_point_from_x_unchecked(x, flags.is_lexographically_largest) + let p = G2Affine::get_point_from_x_unchecked(x, flags.is_lexicographically_largest) .ok_or(SerializationError::InvalidData)?; Ok(p) diff --git a/serialize/src/lib.rs b/serialize/src/lib.rs index 87db8b94b..3376dbb06 100644 --- a/serialize/src/lib.rs +++ b/serialize/src/lib.rs @@ -268,14 +268,14 @@ pub trait CanonicalSerializeHashExt: CanonicalSerialize { fn hash(&self) -> GenericArray::OutputSize> { let mut hasher = H::new(); self.serialize_compressed(HashMarshaller(&mut hasher)) - .expect("HashMarshaller::flush should be infaillible!"); + .expect("HashMarshaller::flush should be infallible!"); hasher.finalize() } fn hash_uncompressed(&self) -> GenericArray::OutputSize> { let mut hasher = H::new(); self.serialize_uncompressed(HashMarshaller(&mut hasher)) - .expect("HashMarshaller::flush should be infaillible!"); + .expect("HashMarshaller::flush should be infallible!"); hasher.finalize() } }