From 1b63afc50997388207fb2f68d05e52f5f214e851 Mon Sep 17 00:00:00 2001 From: moana Date: Mon, 16 Oct 2023 18:44:24 +0200 Subject: [PATCH] Fix clippy warnings Resolves #774 --- CHANGELOG.md | 2 + src/commitment_scheme/kzg10/key.rs | 2 +- src/commitment_scheme/kzg10/srs.rs | 2 +- src/composer.rs | 24 +++++----- src/composer/compiler.rs | 6 +-- src/composer/compiler/compress.rs | 74 ++++++++++++------------------ src/composer/prover.rs | 5 +- src/fft/evaluations.rs | 5 +- src/fft/polynomial.rs | 11 ++--- src/runtime.rs | 7 +++ 10 files changed, 66 insertions(+), 72 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 18724bd0..901fdad6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - Update `criterion` dev-dependency to 0.5 +- Fix clippy warnings [#774] ## [0.16.0] - 2023-10-11 @@ -515,6 +516,7 @@ is necessary since `rkyv/validation` was required as a bound. - Proof system module. +[#774]: https://github.com/dusk-network/plonk/issues/774 [#763]: https://github.com/dusk-network/plonk/issues/763 [#760]: https://github.com/dusk-network/plonk/issues/760 [#752]: https://github.com/dusk-network/plonk/pull/752 diff --git a/src/commitment_scheme/kzg10/key.rs b/src/commitment_scheme/kzg10/key.rs index fa1d2054..cebe989b 100644 --- a/src/commitment_scheme/kzg10/key.rs +++ b/src/commitment_scheme/kzg10/key.rs @@ -117,7 +117,7 @@ impl CommitKey { pub fn from_slice(bytes: &[u8]) -> Result { let powers_of_g = bytes .chunks(G1Affine::SIZE) - .map(|chunk| G1Affine::from_slice(chunk)) + .map(G1Affine::from_slice) .collect::, dusk_bytes::Error>>()?; Ok(CommitKey { powers_of_g }) diff --git a/src/commitment_scheme/kzg10/srs.rs b/src/commitment_scheme/kzg10/srs.rs index c9a56aea..55a18595 100644 --- a/src/commitment_scheme/kzg10/srs.rs +++ b/src/commitment_scheme/kzg10/srs.rs @@ -65,7 +65,7 @@ impl PublicParameters { } // we update the degree to match the required one (n + 6) - max_degree = max_degree + Self::ADDED_BLINDING_DEGREE; + max_degree += Self::ADDED_BLINDING_DEGREE; // Generate the secret scalar x let x = BlsScalar::random(&mut rng); diff --git a/src/composer.rs b/src/composer.rs index bc7bd340..9a227e4e 100644 --- a/src/composer.rs +++ b/src/composer.rs @@ -56,7 +56,7 @@ pub trait Composer: Sized + Index { /// /// This shouldn't be used directly; instead, use [`Self::initialized`] #[deprecated( - since = "13.0", + since = "0.13.0", note = "this function is meant for internal use. call `initialized` instead" )] fn uninitialized() -> Self; @@ -66,14 +66,14 @@ pub trait Composer: Sized + Index { /// Allocate a witness value into the composer and return its index. #[deprecated( - since = "13.0", + since = "0.13.0", note = "this function is meant for internal use. call `append_witness` instead" )] fn append_witness_internal(&mut self, witness: BlsScalar) -> Witness; /// Append a new width-4 poly gate/constraint. #[deprecated( - since = "13.0", + since = "0.13.0", note = "this function is meant for internal use. call `append_custom_gate` instead" )] fn append_custom_gate_internal(&mut self, constraint: Constraint); @@ -364,10 +364,10 @@ pub trait Composer: Sized + Index { .left(wnaf_round.x_beta) .right(wnaf_round.y_beta) .constant(wnaf_round.xy_beta) - .a(wnaf_round.acc_x.into()) - .b(wnaf_round.acc_y.into()) - .o(wnaf_round.xy_alpha.into()) - .d(wnaf_round.accumulated_bit.into()); + .a(wnaf_round.acc_x) + .b(wnaf_round.acc_y) + .o(wnaf_round.xy_alpha) + .d(wnaf_round.accumulated_bit); self.append_custom_gate(constraint) } @@ -989,16 +989,18 @@ pub trait Composer: Sized + Index { // last constraint is zeroed as it is reserved for the genesis quad or // padding - constraints.last_mut().map(|c| *c = Constraint::new()); + if let Some(c) = constraints.last_mut() { + *c = Constraint::new(); + } // the accumulators count is a function to the number of quads. hence, // this optional gate will not cause different circuits depending on the // witness because this computation is bound to the constant bits count // alone. if let Some(accumulator) = accumulators.last() { - constraints - .last_mut() - .map(|c| c.set_witness(WiredWitness::D, *accumulator)); + if let Some(c) = constraints.last_mut() { + c.set_witness(WiredWitness::D, *accumulator); + } } constraints diff --git a/src/composer/compiler.rs b/src/composer/compiler.rs index 82c1c09d..ac3eef37 100644 --- a/src/composer/compiler.rs +++ b/src/composer/compiler.rs @@ -65,7 +65,7 @@ impl Compiler { where C: Circuit, { - compress::CompressedCircuit::from_circuit::(compress::Version::V2) + compress::CompressedCircuit::from_circuit::(true) } /// Generates a [Prover] and [Verifier] from a buffer created by @@ -91,7 +91,7 @@ impl Compiler { let (commit, opening) = pp.trim(n)?; let (prover, verifier) = - Self::preprocess(label, commit, opening, &builder)?; + Self::preprocess(label, commit, opening, builder)?; Ok((prover, verifier)) } @@ -405,7 +405,7 @@ impl Compiler { label.clone(), prover_key, commit_key, - verifier_key.clone(), + verifier_key, size, constraints, ); diff --git a/src/composer/compiler/compress.rs b/src/composer/compiler/compress.rs index 1e1892ef..a3e0e5e3 100644 --- a/src/composer/compiler/compress.rs +++ b/src/composer/compiler/compress.rs @@ -45,46 +45,33 @@ pub struct CompressedPolynomial { pub q_variable_group_add: usize, } -#[derive( - Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, MsgPacker, -)] -pub enum Version { - V1, - V2, -} - -impl Version { - pub fn into_scalars(self) -> HashMap { - match self { - Version::V1 => { - [BlsScalar::zero(), BlsScalar::one(), -BlsScalar::one()] - .into_iter() - .enumerate() - .map(|(i, s)| (s, i)) - .collect() - } - Version::V2 => { - let mut scalars = Self::V1.into_scalars(); - // assert we don't override a previously inserted constant - for s in hades::constants() { - let len = scalars.len(); - scalars.entry(s).or_insert(len); - } - for r in hades::mds() { - for s in r { - let len = scalars.len(); - scalars.entry(s).or_insert(len); - } - } - scalars +fn scalar_map(hades_optimization: bool) -> HashMap { + let mut scalars: HashMap = { + [BlsScalar::zero(), BlsScalar::one(), -BlsScalar::one()] + .into_iter() + .enumerate() + .map(|(i, s)| (s, i)) + .collect() + }; + if hades_optimization { + // assert we don't override a previously inserted constant + for s in hades::constants() { + let len = scalars.len(); + scalars.entry(s).or_insert(len); + } + for r in hades::mds() { + for s in r { + let len = scalars.len(); + scalars.entry(s).or_insert(len); } } } + scalars } #[derive(Debug, Clone, PartialEq, Eq, MsgPacker)] pub struct CompressedCircuit { - version: Version, + hades_optimization: bool, public_inputs: Vec, witnesses: usize, scalars: Vec<[u8; BlsScalar::SIZE]>, @@ -93,16 +80,16 @@ pub struct CompressedCircuit { } impl CompressedCircuit { - pub fn from_circuit(version: Version) -> Result, Error> + pub fn from_circuit(hades_optimization: bool) -> Result, Error> where C: Circuit, { let mut builder = Builder::initialized(); C::default().circuit(&mut builder)?; - Ok(Self::from_builder(version, builder)) + Ok(Self::from_builder(hades_optimization, builder)) } - pub fn from_builder(version: Version, builder: Builder) -> Vec { + pub fn from_builder(hades_optimization: bool, builder: Builder) -> Vec { let mut public_inputs: Vec<_> = builder.public_inputs.keys().copied().collect(); public_inputs.sort(); @@ -111,7 +98,7 @@ impl CompressedCircuit { let polynomials = builder.constraints; let constraints = polynomials.into_iter(); - let mut scalars = version.into_scalars(); + let mut scalars = scalar_map(hades_optimization); let base_scalars_len = scalars.len(); let mut polynomials = HashMap::new(); let constraints = constraints @@ -193,7 +180,7 @@ impl CompressedCircuit { .for_each(|(s, i)| scalars[i] = s.to_bytes()); // clear the scalars that can be determiniscally reconstructed from the - // version + // scalar_map let scalars = scalars.split_off(base_scalars_len); let polynomials_map = polynomials; @@ -204,7 +191,7 @@ impl CompressedCircuit { .for_each(|(p, i)| polynomials[i] = p); let compressed = Self { - version, + hades_optimization, public_inputs, witnesses, scalars, @@ -230,7 +217,7 @@ impl CompressedCircuit { let ( _, Self { - version, + hades_optimization, public_inputs, witnesses, scalars, @@ -240,10 +227,9 @@ impl CompressedCircuit { ) = Self::unpack(&compressed) .map_err(|_| Error::InvalidCompressedCircuit)?; - let version_scalars_map = version.into_scalars(); - let mut version_scalars = - vec![BlsScalar::zero(); version_scalars_map.len()]; - version_scalars_map + let scalar_map = scalar_map(hades_optimization); + let mut version_scalars = vec![BlsScalar::zero(); scalar_map.len()]; + scalar_map .into_iter() .for_each(|(s, i)| version_scalars[i] = s); for s in scalars { diff --git a/src/composer/prover.rs b/src/composer/prover.rs index 079c4026..06b257e0 100644 --- a/src/composer/prover.rs +++ b/src/composer/prover.rs @@ -87,7 +87,7 @@ impl Prover { for i in 0..hiding_degree + 1 { let blinding_scalar = BlsScalar::random(&mut *rng); - w_vec_inverse[i] = w_vec_inverse[i] - blinding_scalar; + w_vec_inverse[i] -= blinding_scalar; w_vec_inverse.push(blinding_scalar); } @@ -206,8 +206,7 @@ impl Prover { let prover_key = ProverKey::from_slice(prover_key)?; // Safety: checked len - let commit_key = - unsafe { CommitKey::from_slice_unchecked(&commit_key) }; + let commit_key = unsafe { CommitKey::from_slice_unchecked(commit_key) }; let verifier_key = VerifierKey::from_slice(verifier_key)?; diff --git a/src/fft/evaluations.rs b/src/fft/evaluations.rs index 7bb34924..b1838d3f 100644 --- a/src/fft/evaluations.rs +++ b/src/fft/evaluations.rs @@ -49,8 +49,7 @@ impl Evaluations { bytes.extend( self.evals .iter() - .map(|scalar| scalar.to_bytes().to_vec()) - .flatten(), + .flat_map(|scalar| scalar.to_bytes().to_vec()), ); bytes @@ -62,7 +61,7 @@ impl Evaluations { let domain = EvaluationDomain::from_reader(&mut buffer)?; let evals = buffer .chunks(BlsScalar::SIZE) - .map(|chunk| BlsScalar::from_slice(chunk)) + .map(BlsScalar::from_slice) .collect::, dusk_bytes::Error>>()?; Ok(Evaluations::from_vec_and_domain(evals, domain)) } diff --git a/src/fft/polynomial.rs b/src/fft/polynomial.rs index 178dcdf2..f08bc831 100644 --- a/src/fft/polynomial.rs +++ b/src/fft/polynomial.rs @@ -136,8 +136,7 @@ impl Polynomial { pub fn to_var_bytes(&self) -> Vec { self.coeffs .iter() - .map(|item| item.to_bytes().to_vec()) - .flatten() + .flat_map(|item| item.to_bytes().to_vec()) .collect() } @@ -145,7 +144,7 @@ impl Polynomial { pub fn from_slice(bytes: &[u8]) -> Result { let coeffs = bytes .chunks(BlsScalar::SIZE) - .map(|chunk| BlsScalar::from_slice(chunk)) + .map(BlsScalar::from_slice) .collect::, dusk_bytes::Error>>()?; Ok(Polynomial { coeffs }) @@ -198,7 +197,7 @@ impl<'a, 'b> Add<&'a Polynomial> for &'b Polynomial { } } -impl<'a, 'b> AddAssign<&'a Polynomial> for Polynomial { +impl<'a> AddAssign<&'a Polynomial> for Polynomial { fn add_assign(&mut self, other: &'a Polynomial) { if self.is_zero() { self.coeffs.truncate(0); @@ -219,7 +218,7 @@ impl<'a, 'b> AddAssign<&'a Polynomial> for Polynomial { } } -impl<'a, 'b> AddAssign<(BlsScalar, &'a Polynomial)> for Polynomial { +impl<'a> AddAssign<(BlsScalar, &'a Polynomial)> for Polynomial { fn add_assign(&mut self, (f, other): (BlsScalar, &'a Polynomial)) { if self.is_zero() { self.coeffs.truncate(0); @@ -285,7 +284,7 @@ impl<'a, 'b> Sub<&'a Polynomial> for &'b Polynomial { } } -impl<'a, 'b> SubAssign<&'a Polynomial> for Polynomial { +impl<'a> SubAssign<&'a Polynomial> for Polynomial { #[inline] fn sub_assign(&mut self, other: &'a Polynomial) { if self.is_zero() { diff --git a/src/runtime.rs b/src/runtime.rs index b3c939a0..2861ccab 100644 --- a/src/runtime.rs +++ b/src/runtime.rs @@ -15,6 +15,7 @@ use crate::debugger::Debugger; /// Runtime events #[derive(Debug, Clone, Copy)] +#[allow(clippy::large_enum_variant)] pub enum RuntimeEvent { /// A witness was appended to the constraint system WitnessAppended { @@ -41,6 +42,12 @@ pub struct Runtime { debugger: Debugger, } +impl Default for Runtime { + fn default() -> Self { + Self::new() + } +} + impl Runtime { /// Create a new PLONK runtime #[allow(unused_variables)]