diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 0081bbc3..3474831d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,7 +1,7 @@ name: "Aries-Askar" env: - RUST_VERSION: "1.58.0" + RUST_VERSION: "1.60.0" CROSS_VERSION: "0.2.4" on: diff --git a/Cargo.toml b/Cargo.toml index d81bec58..1f7f3348 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,13 +5,14 @@ members = ["askar-bbs", "askar-crypto"] name = "aries-askar" version = "0.2.8-dev.3" authors = ["Hyperledger Aries Contributors "] -edition = "2018" +edition = "2021" description = "Hyperledger Aries Askar secure storage" license = "MIT OR Apache-2.0" readme = "README.md" repository = "https://github.com/hyperledger/aries-askar/" categories = ["cryptography", "database"] keywords = ["hyperledger", "aries", "ssi", "verifiable", "credentials"] +rust-version = "1.60" [lib] name = "aries_askar" @@ -50,7 +51,7 @@ futures-lite = "1.11" hex = "0.4" hmac = "0.12" itertools = "0.10" -jemallocator = { version = "0.3", optional = true } +jemallocator = { version = "0.5", optional = true } log = { version = "0.4", optional = true } num_cpus = { version = "1.0", optional = true } once_cell = "1.5" @@ -63,7 +64,7 @@ serde_json = "1.0" sha2 = "0.10" tokio = { version = "1.5", features = ["time"] } url = { version = "2.1", default-features = false } -uuid = { version = "0.8", features = ["v4"] } +uuid = { version = "1.2", features = ["v4"] } zeroize = "1.4" [dependencies.askar-crypto] @@ -72,7 +73,7 @@ path = "./askar-crypto" features = ["all_keys", "any_key", "argon2", "crypto_box", "std"] [dependencies.sqlx] -version = "0.5.13" +version = "0.6.2" default-features = false features = ["chrono", "runtime-tokio-rustls"] optional = true @@ -80,6 +81,7 @@ optional = true [profile.release] codegen-units = 1 lto = true +panic = "abort" [[test]] name = "backends" diff --git a/askar-crypto/Cargo.toml b/askar-crypto/Cargo.toml index 49748c3d..bd60b1b0 100644 --- a/askar-crypto/Cargo.toml +++ b/askar-crypto/Cargo.toml @@ -2,13 +2,14 @@ name = "askar-crypto" version = "0.2.5" authors = ["Hyperledger Aries Contributors "] -edition = "2018" +edition = "2021" description = "Hyperledger Aries Askar cryptography" license = "MIT OR Apache-2.0" readme = "README.md" repository = "https://github.com/hyperledger/aries-askar/" categories = ["cryptography", "no-std"] keywords = ["hyperledger", "aries", "didcomm", "ssi"] +rust-version = "1.60" [package.metadata.docs.rs] features = ["argon2", "std"] @@ -31,12 +32,10 @@ std_rng = ["getrandom", "rand/std", "rand/std_rng"] [dev-dependencies] base64 = { version = "0.13", default-features = false, features = ["alloc"] } -criterion = "0.3" -# override transitive dependency from criterion to support rust versions older than 1.60 -csv = "=1.1" +criterion = "0.4" hex-literal = "0.3" serde_cbor = "0.11" -serde-json-core = { version = "0.4", default-features = false, features = ["std"] } +serde-json-core = { version = "0.5", default-features = false, features = ["std"] } [[bench]] name = "enc" @@ -70,7 +69,7 @@ k256 = { version = "0.10", default-features = false, features = ["arithmetic", " p256 = { version = "0.10", default-features = false, features = ["arithmetic", "ecdsa", "ecdh"], optional = true } rand = { version = "0.8", default-features = false } serde = { version = "1.0", default-features = false, features = ["derive"] } -serde-json-core = { version = "0.4", default-features = false } +serde-json-core = { version = "0.5", default-features = false } subtle = "2.4" sha2 = { version = "0.10", default-features = false } x25519-dalek = { version = "=1.1", default-features = false, features = ["u64_backend"], optional = true } diff --git a/askar-crypto/benches/enc.rs b/askar-crypto/benches/enc.rs index eb700d11..21c01a1b 100644 --- a/askar-crypto/benches/enc.rs +++ b/askar-crypto/benches/enc.rs @@ -21,88 +21,88 @@ fn criterion_benchmark(c: &mut Criterion) { let mut message = vec![0u8; MSG_SIZE]; fill_random(&mut message[..]); - let message = &message[..]; + let message = message.as_slice(); - c.bench_function(&format!("random nonce"), move |b| { - b.iter(|| AesKey::::random_nonce()) + c.bench_function("random nonce", move |b| { + b.iter(AesKey::::random_nonce) }); - c.bench_function(&format!("aes128gcm encrypt"), move |b| { + c.bench_function("aes128gcm encrypt", move |b| { let key = AesKey::::random().unwrap(); let nonce = AesKey::::random_nonce(); let mut buffer = Vec::with_capacity(ALLOC_SIZE); b.iter(|| { buffer.clear(); - buffer.extend_from_slice(black_box(&message[..])); + buffer.extend_from_slice(black_box(message)); key.encrypt_in_place(&mut buffer, &nonce, &[]).unwrap(); }) }); - c.bench_function(&format!("aes256gcm encrypt"), move |b| { + c.bench_function("aes256gcm encrypt", move |b| { let key = AesKey::::random().unwrap(); let nonce = AesKey::::random_nonce(); let mut buffer = Vec::with_capacity(ALLOC_SIZE); b.iter(|| { buffer.clear(); - buffer.extend_from_slice(black_box(&message[..])); + buffer.extend_from_slice(black_box(message)); key.encrypt_in_place(&mut buffer, &nonce, &[]).unwrap(); }) }); - c.bench_function(&format!("aes128cbc-hs256 encrypt"), move |b| { + c.bench_function("aes128cbc-hs256 encrypt", move |b| { let key = AesKey::::random().unwrap(); let nonce = AesKey::::random_nonce(); let mut buffer = Vec::with_capacity(ALLOC_SIZE); b.iter(|| { buffer.clear(); - buffer.extend_from_slice(black_box(&message[..])); + buffer.extend_from_slice(black_box(message)); key.encrypt_in_place(&mut buffer, &nonce, &[]).unwrap(); }) }); - c.bench_function(&format!("aes256cbc-hs512 encrypt"), move |b| { + c.bench_function("aes256cbc-hs512 encrypt", move |b| { let key = AesKey::::random().unwrap(); let nonce = AesKey::::random_nonce(); let mut buffer = Vec::with_capacity(ALLOC_SIZE); b.iter(|| { buffer.clear(); - buffer.extend_from_slice(black_box(&message[..])); + buffer.extend_from_slice(black_box(message)); key.encrypt_in_place(&mut buffer, &nonce, &[]).unwrap(); }) }); - c.bench_function(&format!("chacha20-poly1305 encrypt"), move |b| { + c.bench_function("chacha20-poly1305 encrypt", move |b| { let key = Chacha20Key::::random().unwrap(); let nonce = Chacha20Key::::random_nonce(); let mut buffer = Vec::with_capacity(ALLOC_SIZE); b.iter(|| { buffer.clear(); - buffer.extend_from_slice(black_box(&message[..])); + buffer.extend_from_slice(black_box(message)); key.encrypt_in_place(&mut buffer, &nonce, &[]).unwrap(); }) }); - c.bench_function(&format!("xchacha20-poly1305 encrypt"), move |b| { + c.bench_function("xchacha20-poly1305 encrypt", move |b| { let key = Chacha20Key::::random().unwrap(); let nonce = Chacha20Key::::random_nonce(); let mut buffer = Vec::with_capacity(ALLOC_SIZE); b.iter(|| { buffer.clear(); - buffer.extend_from_slice(black_box(&message[..])); + buffer.extend_from_slice(black_box(message)); key.encrypt_in_place(&mut buffer, &nonce, &[]).unwrap(); }) }); // test overhead of SecretBytes - c.bench_function(&format!("chacha20-poly1305 encrypt alloc"), move |b| { + c.bench_function("chacha20-poly1305 encrypt alloc", move |b| { let key = Chacha20Key::::random().unwrap(); let nonce = Chacha20Key::::random_nonce(); let mut buffer = SecretBytes::with_capacity(ALLOC_SIZE); b.iter(|| { buffer.clear(); - buffer.buffer_write(black_box(&message[..])).unwrap(); + buffer.buffer_write(black_box(message)).unwrap(); key.encrypt_in_place(&mut buffer, &nonce, &[]).unwrap(); }) }); // test overhead of AnyKey - c.bench_function(&format!("chacha20-poly1305 encrypt as any"), move |b| { + c.bench_function("chacha20-poly1305 encrypt as any", move |b| { let key = Box::::random(KeyAlg::Chacha20(Chacha20Types::C20P)).unwrap(); let mut nonce = [0u8; 255]; let nonce_len = key.aead_params().nonce_length; @@ -110,7 +110,7 @@ fn criterion_benchmark(c: &mut Criterion) { let mut buffer = Vec::with_capacity(ALLOC_SIZE); b.iter(|| { buffer.clear(); - buffer.extend_from_slice(black_box(&message[..])); + buffer.extend_from_slice(black_box(message)); key.encrypt_in_place(&mut buffer, &nonce[..nonce_len], &[]) .unwrap(); }) diff --git a/askar-crypto/benches/kdf.rs b/askar-crypto/benches/kdf.rs index 861869f9..bf0dbd4f 100644 --- a/askar-crypto/benches/kdf.rs +++ b/askar-crypto/benches/kdf.rs @@ -18,7 +18,7 @@ fn criterion_benchmark(c: &mut Criterion) { prv_info: &[], }; - c.bench_function(&format!("concat kdf sha256"), move |b| { + c.bench_function("concat kdf sha256", move |b| { b.iter(|| { let mut output = [0u8; 32]; ConcatKDF::::derive_key(black_box(message), black_box(params), &mut output) diff --git a/askar-crypto/src/alg/aes/key_wrap.rs b/askar-crypto/src/alg/aes/key_wrap.rs index ee7d4e16..56de9006 100644 --- a/askar-crypto/src/alg/aes/key_wrap.rs +++ b/askar-crypto/src/alg/aes/key_wrap.rs @@ -1,6 +1,6 @@ //! AES key wrap -use core::{convert::TryInto, marker::PhantomData}; +use core::marker::PhantomData; use aes_core::{Aes128, Aes256}; use block_modes::cipher::{BlockCipher, BlockDecrypt, BlockEncrypt, NewBlockCipher}; @@ -64,10 +64,10 @@ where nonce: &[u8], aad: &[u8], ) -> Result { - if nonce.len() != 0 { + if !nonce.is_empty() { return Err(err_msg!(Unsupported, "Custom nonce not supported")); } - if aad.len() != 0 { + if !aad.is_empty() { return Err(err_msg!(Unsupported, "AAD not supported")); } let mut buf_len = buffer.as_ref().len(); @@ -108,10 +108,10 @@ where nonce: &[u8], aad: &[u8], ) -> Result<(), Error> { - if nonce.len() != 0 { + if !nonce.is_empty() { return Err(err_msg!(Unsupported, "Custom nonce not supported")); } - if aad.len() != 0 { + if !aad.is_empty() { return Err(err_msg!(Unsupported, "AAD not supported")); } if buffer.as_ref().len() % 8 != 0 { diff --git a/askar-crypto/src/alg/aes/mod.rs b/askar-crypto/src/alg/aes/mod.rs index 0d757de6..e4bfc821 100644 --- a/askar-crypto/src/alg/aes/mod.rs +++ b/askar-crypto/src/alg/aes/mod.rs @@ -26,7 +26,7 @@ mod key_wrap; pub use key_wrap::{A128Kw, A256Kw}; /// The 'kty' value of a symmetric key JWK -pub static JWK_KEY_TYPE: &'static str = "oct"; +pub static JWK_KEY_TYPE: &str = "oct"; /// Trait implemented by supported AES authenticated encryption algorithms pub trait AesType: 'static { diff --git a/askar-crypto/src/alg/any.rs b/askar-crypto/src/alg/any.rs index e54c85b8..c4864a15 100644 --- a/askar-crypto/src/alg/any.rs +++ b/askar-crypto/src/alg/any.rs @@ -1,6 +1,4 @@ use alloc::{boxed::Box, sync::Arc}; -#[cfg(feature = "ed25519")] -use core::convert::TryFrom; use core::{ any::{Any, TypeId}, fmt::Debug, @@ -230,12 +228,10 @@ fn generate_any(alg: KeyAlg, rng: impl KeyMaterial) -> Result P256KeyPair::generate(rng).map(R::alloc_key), #[allow(unreachable_patterns)] - _ => { - return Err(err_msg!( - Unsupported, - "Unsupported algorithm for key generation" - )) - } + _ => Err(err_msg!( + Unsupported, + "Unsupported algorithm for key generation" + )), } } @@ -267,12 +263,10 @@ fn from_public_bytes_any(alg: KeyAlg, public: &[u8]) -> Result { - return Err(err_msg!( - Unsupported, - "Unsupported algorithm for public key import" - )) - } + _ => Err(err_msg!( + Unsupported, + "Unsupported algorithm for public key import" + )), } } @@ -336,12 +330,10 @@ fn from_secret_bytes_any(alg: KeyAlg, secret: &[u8]) -> Result { - return Err(err_msg!( - Unsupported, - "Unsupported algorithm for secret key import" - )) - } + _ => Err(err_msg!( + Unsupported, + "Unsupported algorithm for secret key import" + )), } } @@ -387,12 +379,10 @@ where Chacha20Key::::from_key_exchange(secret, public).map(R::alloc_key) } #[allow(unreachable_patterns)] - _ => { - return Err(err_msg!( - Unsupported, - "Unsupported algorithm for key exchange" - )); - } + _ => Err(err_msg!( + Unsupported, + "Unsupported algorithm for key exchange" + )), } } @@ -449,12 +439,10 @@ fn from_key_derivation_any( Chacha20Key::::from_key_derivation(derive).map(R::alloc_key) } #[allow(unreachable_patterns)] - _ => { - return Err(err_msg!( - Unsupported, - "Unsupported algorithm for key derivation" - )); - } + _ => Err(err_msg!( + Unsupported, + "Unsupported algorithm for key derivation" + )), } } @@ -486,12 +474,10 @@ fn convert_key_any(key: &AnyKey, alg: KeyAlg) -> Result { ) .map(R::alloc_key)?), #[allow(unreachable_patterns)] - _ => { - return Err(err_msg!( - Unsupported, - "Unsupported key conversion operation" - )) - } + _ => Err(err_msg!( + Unsupported, + "Unsupported key conversion operation" + )), } } @@ -714,7 +700,7 @@ impl KeyExchange for AnyKey { #[allow(unreachable_patterns)] _ => { let _ = out; - return Err(err_msg!(Unsupported, "Unsupported key exchange")); + Err(err_msg!(Unsupported, "Unsupported key exchange")) } } } diff --git a/askar-crypto/src/alg/bls.rs b/askar-crypto/src/alg/bls.rs index df785859..bf05aa5c 100644 --- a/askar-crypto/src/alg/bls.rs +++ b/askar-crypto/src/alg/bls.rs @@ -1,7 +1,6 @@ //! BLS12-381 key support use core::{ - convert::TryInto, fmt::{self, Debug, Formatter}, ops::Add, }; @@ -29,7 +28,7 @@ use crate::{ }; /// The 'kty' value of a BLS key JWK -pub const JWK_KEY_TYPE: &'static str = "OKP"; +pub const JWK_KEY_TYPE: &str = "OKP"; /// A BLS12-381 key pair #[derive(Clone, Zeroize)] @@ -187,24 +186,21 @@ impl FromJwk for BlsKeyPair { ArrayKey::::temp(|pk_arr| { if jwk.x.decode_base64(pk_arr)? != pk_arr.len() { Err(err_msg!(InvalidKeyData)) + } else if jwk.d.is_some() { + ArrayKey::::temp(|sk_arr| { + if jwk.d.decode_base64(sk_arr)? != sk_arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + let result = BlsKeyPair::from_secret_key(BlsSecretKey::from_bytes(sk_arr)?); + result.check_public_bytes(pk_arr)?; + Ok(result) + } + }) } else { - if jwk.d.is_some() { - ArrayKey::::temp(|sk_arr| { - if jwk.d.decode_base64(sk_arr)? != sk_arr.len() { - Err(err_msg!(InvalidKeyData)) - } else { - let result = - BlsKeyPair::from_secret_key(BlsSecretKey::from_bytes(sk_arr)?); - result.check_public_bytes(pk_arr)?; - Ok(result) - } - }) - } else { - Ok(Self { - secret: None, - public: Pk::from_public_bytes(pk_arr)?, - }) - } + Ok(Self { + secret: None, + public: Pk::from_public_bytes(pk_arr)?, + }) } }) } @@ -430,7 +426,7 @@ impl From<&BlsKeyPair> for BlsKeyPair { fn from(kp: &BlsKeyPair) -> Self { BlsKeyPair { secret: kp.secret.clone(), - public: kp.public.0.clone(), + public: kp.public.0, } } } @@ -439,7 +435,7 @@ impl From<&BlsKeyPair> for BlsKeyPair { fn from(kp: &BlsKeyPair) -> Self { BlsKeyPair { secret: kp.secret.clone(), - public: kp.public.1.clone(), + public: kp.public.1, } } } @@ -531,7 +527,7 @@ mod tests { let kp = BlsKeyPair::::from_secret_bytes(&test_pvt[..]).expect("Error creating key"); let jwk = kp.to_jwk_public(None).expect("Error converting key to JWK"); - let jwk = JwkParts::from_str(&jwk).expect("Error parsing JWK"); + let jwk = JwkParts::try_from_str(&jwk).expect("Error parsing JWK"); assert_eq!(jwk.kty, JWK_KEY_TYPE); assert_eq!(jwk.crv, G1::JWK_CURVE); assert_eq!( diff --git a/askar-crypto/src/alg/chacha20.rs b/askar-crypto/src/alg/chacha20.rs index 621cd612..56766712 100644 --- a/askar-crypto/src/alg/chacha20.rs +++ b/askar-crypto/src/alg/chacha20.rs @@ -20,7 +20,7 @@ use crate::{ }; /// The 'kty' value of a symmetric key JWK -pub static JWK_KEY_TYPE: &'static str = "oct"; +pub static JWK_KEY_TYPE: &str = "oct"; /// Trait implemented by supported ChaCha20 algorithms pub trait Chacha20Type: 'static { diff --git a/askar-crypto/src/alg/ed25519.rs b/askar-crypto/src/alg/ed25519.rs index c702314f..a5aa5da5 100644 --- a/askar-crypto/src/alg/ed25519.rs +++ b/askar-crypto/src/alg/ed25519.rs @@ -33,9 +33,9 @@ pub const SECRET_KEY_LENGTH: usize = 32; pub const KEYPAIR_LENGTH: usize = SECRET_KEY_LENGTH + PUBLIC_KEY_LENGTH; /// The 'kty' value of an Ed25519 JWK -pub static JWK_KEY_TYPE: &'static str = "OKP"; +pub static JWK_KEY_TYPE: &str = "OKP"; /// The 'crv' value of an Ed25519 JWK -pub static JWK_CURVE: &'static str = "Ed25519"; +pub static JWK_CURVE: &str = "Ed25519"; /// An Ed25519 public key or keypair pub struct Ed25519KeyPair { @@ -111,7 +111,7 @@ impl Clone for Ed25519KeyPair { .secret .as_ref() .map(|sk| SecretKey::from_bytes(&sk.as_bytes()[..]).unwrap()), - public: self.public.clone(), + public: self.public, } } } @@ -278,20 +278,18 @@ impl FromJwk for Ed25519KeyPair { ArrayKey::::temp(|pk_arr| { if jwk.x.decode_base64(pk_arr)? != pk_arr.len() { Err(err_msg!(InvalidKeyData)) + } else if jwk.d.is_some() { + ArrayKey::::temp(|sk_arr| { + if jwk.d.decode_base64(sk_arr)? != sk_arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + let kp = Ed25519KeyPair::from_secret_bytes(sk_arr)?; + kp.check_public_bytes(pk_arr)?; + Ok(kp) + } + }) } else { - if jwk.d.is_some() { - ArrayKey::::temp(|sk_arr| { - if jwk.d.decode_base64(sk_arr)? != sk_arr.len() { - Err(err_msg!(InvalidKeyData)) - } else { - let kp = Ed25519KeyPair::from_secret_bytes(sk_arr)?; - kp.check_public_bytes(pk_arr)?; - Ok(kp) - } - }) - } else { - Ed25519KeyPair::from_public_bytes(pk_arr) - } + Ed25519KeyPair::from_public_bytes(pk_arr) } }) } @@ -304,7 +302,7 @@ pub struct Ed25519SigningKey<'p>(ExpandedSecretKey, &'p PublicKey); impl Ed25519SigningKey<'_> { /// Sign a message with the secret key pub fn sign(&self, message: &[u8]) -> [u8; EDDSA_SIGNATURE_LENGTH] { - self.0.sign(message, &self.1).to_bytes() + self.0.sign(message, self.1).to_bytes() } } @@ -368,7 +366,7 @@ mod tests { let jwk = kp .to_jwk_public(None) .expect("Error converting public key to JWK"); - let jwk = JwkParts::from_str(&jwk).expect("Error parsing JWK output"); + let jwk = JwkParts::try_from_str(&jwk).expect("Error parsing JWK output"); assert_eq!(jwk.kty, JWK_KEY_TYPE); assert_eq!(jwk.crv, JWK_CURVE); assert_eq!(jwk.x, "11qYAYKxCrfVS_7TyWQHOg7hcvPapiMlrwIaaPcHURo"); @@ -404,9 +402,9 @@ mod tests { let kp = Ed25519KeyPair::from_keypair_bytes(test_keypair).unwrap(); let sig = &kp.sign(test_msg).unwrap(); assert_eq!(sig, test_sig); - assert_eq!(kp.verify_signature(test_msg, &sig[..]), true); - assert_eq!(kp.verify_signature(b"Not the message", &sig[..]), false); - assert_eq!(kp.verify_signature(test_msg, &[0u8; 64]), false); + assert!(kp.verify_signature(test_msg, &sig[..])); + assert!(!kp.verify_signature(b"Not the message", &sig[..])); + assert!(!kp.verify_signature(test_msg, &[0u8; 64])); } #[test] diff --git a/askar-crypto/src/alg/k256.rs b/askar-crypto/src/alg/k256.rs index ce0e6ab3..f9588657 100644 --- a/askar-crypto/src/alg/k256.rs +++ b/askar-crypto/src/alg/k256.rs @@ -47,9 +47,9 @@ pub const SECRET_KEY_LENGTH: usize = 32; pub const KEYPAIR_LENGTH: usize = SECRET_KEY_LENGTH + PUBLIC_KEY_LENGTH; /// The 'kty' value of an elliptic curve key JWK -pub static JWK_KEY_TYPE: &'static str = "EC"; +pub static JWK_KEY_TYPE: &str = "EC"; /// The 'crv' value of a K-256 key JWK -pub static JWK_CURVE: &'static str = "secp256k1"; +pub static JWK_CURVE: &str = "secp256k1"; type FieldSize = elliptic_curve::FieldSize; @@ -119,7 +119,7 @@ impl KeyGen for K256KeyPair { fn generate(mut rng: impl KeyMaterial) -> Result { ArrayKey::::temp(|buf| loop { rng.read_okm(buf); - if let Ok(key) = SecretKey::from_be_bytes(&buf) { + if let Ok(key) = SecretKey::from_be_bytes(buf) { return Ok(Self::from_secret_key(key)); } }) @@ -137,7 +137,7 @@ impl KeySecretBytes for K256KeyPair { if let Some(sk) = self.secret.as_ref() { ArrayKey::::temp(|arr| { ec_common::write_sk(sk, &mut arr[..]); - f(Some(&arr)) + f(Some(arr)) }) } else { f(None) @@ -310,7 +310,7 @@ impl KeyExchange for K256KeyPair { match self.secret.as_ref() { Some(sk) => { let xk = diffie_hellman(sk.to_nonzero_scalar(), other.public.as_affine()); - out.buffer_write(xk.as_bytes())?; + out.buffer_write(xk.as_bytes().as_ref())?; Ok(()) } None => Err(err_msg!(MissingSecretKey)), @@ -343,7 +343,7 @@ mod tests { let sk = K256KeyPair::from_secret_bytes(&test_pvt).expect("Error creating signing key"); let jwk = sk.to_jwk_public(None).expect("Error converting key to JWK"); - let jwk = JwkParts::from_str(&jwk).expect("Error parsing JWK"); + let jwk = JwkParts::try_from_str(&jwk).expect("Error parsing JWK"); assert_eq!(jwk.kty, JWK_KEY_TYPE); assert_eq!(jwk.crv, JWK_CURVE); assert_eq!(jwk.x, test_pub_b64.0); @@ -381,9 +381,9 @@ mod tests { let kp = K256KeyPair::from_secret_bytes(&test_pvt).unwrap(); let sig = kp.sign(&test_msg[..]).unwrap(); assert_eq!(sig, &test_sig[..]); - assert_eq!(kp.verify_signature(&test_msg[..], &sig[..]), true); - assert_eq!(kp.verify_signature(b"Not the message", &sig[..]), false); - assert_eq!(kp.verify_signature(&test_msg[..], &[0u8; 64]), false); + assert!(kp.verify_signature(&test_msg[..], &sig[..])); + assert!(!kp.verify_signature(b"Not the message", &sig[..])); + assert!(!kp.verify_signature(&test_msg[..], &[0u8; 64])); } #[test] diff --git a/askar-crypto/src/alg/mod.rs b/askar-crypto/src/alg/mod.rs index 5bc921b2..ef586f9f 100644 --- a/askar-crypto/src/alg/mod.rs +++ b/askar-crypto/src/alg/mod.rs @@ -184,6 +184,7 @@ impl<'a> NormalizedIter<'a> { impl Iterator for NormalizedIter<'_> { type Item = char; fn next(&mut self) -> Option { + #[allow(clippy::while_let_on_iterator)] while let Some(c) = self.chars.next() { if c != '-' && c != '_' && c != ' ' { return Some(c.to_ascii_lowercase()); @@ -262,10 +263,10 @@ mod tests { #[test] fn cmp_normalize() { - assert_eq!(normalize_alg("Test").unwrap() == "test", true); - assert_eq!(normalize_alg("t-e-s-t").unwrap() == "test", true); - assert_eq!(normalize_alg("--TE__ST--").unwrap() == "test", true); - assert_eq!(normalize_alg("t-e-s-t").unwrap() == "tes", false); - assert_eq!(normalize_alg("t-e-s-t").unwrap() == "testt", false); + assert!(normalize_alg("Test").unwrap() == "test"); + assert!(normalize_alg("t-e-s-t").unwrap() == "test"); + assert!(normalize_alg("--TE__ST--").unwrap() == "test"); + assert!(normalize_alg("t-e-s-t").unwrap() != "tes"); + assert!(normalize_alg("t-e-s-t").unwrap() != "testt"); } } diff --git a/askar-crypto/src/alg/p256.rs b/askar-crypto/src/alg/p256.rs index daa6b743..0ef59219 100644 --- a/askar-crypto/src/alg/p256.rs +++ b/askar-crypto/src/alg/p256.rs @@ -47,9 +47,9 @@ pub const SECRET_KEY_LENGTH: usize = 32; pub const KEYPAIR_LENGTH: usize = SECRET_KEY_LENGTH + PUBLIC_KEY_LENGTH; /// The 'kty' value of an elliptic curve key JWK -pub static JWK_KEY_TYPE: &'static str = "EC"; +pub static JWK_KEY_TYPE: &str = "EC"; /// The 'crv' value of a P-256 key JWK -pub static JWK_CURVE: &'static str = "P-256"; +pub static JWK_CURVE: &str = "P-256"; type FieldSize = elliptic_curve::FieldSize; @@ -119,7 +119,7 @@ impl KeyGen for P256KeyPair { fn generate(mut rng: impl KeyMaterial) -> Result { ArrayKey::::temp(|buf| loop { rng.read_okm(buf); - if let Ok(key) = SecretKey::from_be_bytes(&buf) { + if let Ok(key) = SecretKey::from_be_bytes(buf) { return Ok(Self::from_secret_key(key)); } }) @@ -137,7 +137,7 @@ impl KeySecretBytes for P256KeyPair { if let Some(sk) = self.secret.as_ref() { ArrayKey::::temp(|arr| { ec_common::write_sk(sk, &mut arr[..]); - f(Some(&arr)) + f(Some(arr)) }) } else { f(None) @@ -310,7 +310,7 @@ impl KeyExchange for P256KeyPair { match self.secret.as_ref() { Some(sk) => { let xk = diffie_hellman(sk.to_nonzero_scalar(), other.public.as_affine()); - out.buffer_write(xk.as_bytes())?; + out.buffer_write(xk.as_bytes().as_ref())?; Ok(()) } None => Err(err_msg!(MissingSecretKey)), @@ -341,7 +341,7 @@ mod tests { let sk = P256KeyPair::from_secret_bytes(&test_pvt).expect("Error creating signing key"); let jwk = sk.to_jwk_public(None).expect("Error converting key to JWK"); - let jwk = JwkParts::from_str(&jwk).expect("Error parsing JWK"); + let jwk = JwkParts::try_from_str(&jwk).expect("Error parsing JWK"); assert_eq!(jwk.kty, JWK_KEY_TYPE); assert_eq!(jwk.crv, JWK_CURVE); assert_eq!(jwk.x, test_pub_b64.0); @@ -396,9 +396,9 @@ mod tests { let kp = P256KeyPair::from_secret_bytes(&test_pvt).unwrap(); let sig = kp.sign(&test_msg[..]).unwrap(); assert_eq!(sig, &test_sig[..]); - assert_eq!(kp.verify_signature(&test_msg[..], &sig[..]), true); - assert_eq!(kp.verify_signature(b"Not the message", &sig[..]), false); - assert_eq!(kp.verify_signature(&test_msg[..], &[0u8; 64]), false); + assert!(kp.verify_signature(&test_msg[..], &sig[..])); + assert!(!kp.verify_signature(b"Not the message", &sig[..])); + assert!(!kp.verify_signature(&test_msg[..], &[0u8; 64])); } #[test] diff --git a/askar-crypto/src/alg/x25519.rs b/askar-crypto/src/alg/x25519.rs index b6772e0d..768fedc0 100644 --- a/askar-crypto/src/alg/x25519.rs +++ b/askar-crypto/src/alg/x25519.rs @@ -32,9 +32,9 @@ pub const SECRET_KEY_LENGTH: usize = 32; pub const KEYPAIR_LENGTH: usize = SECRET_KEY_LENGTH + PUBLIC_KEY_LENGTH; /// The 'kty' value of an X25519 JWK -pub static JWK_KEY_TYPE: &'static str = "OKP"; +pub static JWK_KEY_TYPE: &str = "OKP"; /// The 'crv' value of an X25519 JWK -pub static JWK_CURVE: &'static str = "X25519"; +pub static JWK_CURVE: &str = "X25519"; /// An X25519 public key or keypair #[derive(Clone)] @@ -202,20 +202,18 @@ impl FromJwk for X25519KeyPair { ArrayKey::::temp(|pk_arr| { if jwk.x.decode_base64(pk_arr)? != pk_arr.len() { Err(err_msg!(InvalidKeyData)) + } else if jwk.d.is_some() { + ArrayKey::::temp(|sk_arr| { + if jwk.d.decode_base64(sk_arr)? != sk_arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + let kp = X25519KeyPair::from_secret_bytes(sk_arr)?; + kp.check_public_bytes(pk_arr)?; + Ok(kp) + } + }) } else { - if jwk.d.is_some() { - ArrayKey::::temp(|sk_arr| { - if jwk.d.decode_base64(sk_arr)? != sk_arr.len() { - Err(err_msg!(InvalidKeyData)) - } else { - let kp = X25519KeyPair::from_secret_bytes(sk_arr)?; - kp.check_public_bytes(pk_arr)?; - Ok(kp) - } - }) - } else { - X25519KeyPair::from_public_bytes(pk_arr) - } + X25519KeyPair::from_public_bytes(pk_arr) } }) } @@ -263,7 +261,7 @@ mod tests { let jwk = kp .to_jwk_public(None) .expect("Error converting public key to JWK"); - let jwk = JwkParts::from_str(&jwk).expect("Error parsing JWK output"); + let jwk = JwkParts::try_from_str(&jwk).expect("Error parsing JWK output"); assert_eq!(jwk.kty, JWK_KEY_TYPE); assert_eq!(jwk.crv, JWK_CURVE); assert_eq!(jwk.x, "tGskN_ae61DP4DLY31_fjkbvnKqf-ze7kA6Cj2vyQxU"); diff --git a/askar-crypto/src/buffer/array.rs b/askar-crypto/src/buffer/array.rs index 037cb56f..31b9bdec 100644 --- a/askar-crypto/src/buffer/array.rs +++ b/askar-crypto/src/buffer/array.rs @@ -1,5 +1,6 @@ use core::{ fmt::{self, Debug, Formatter}, + hash, marker::{PhantomData, PhantomPinned}, ops::Deref, }; @@ -17,7 +18,7 @@ use crate::{ }; /// A secure representation for fixed-length keys -#[derive(Clone, Hash)] +#[derive(Clone)] #[repr(transparent)] pub struct ArrayKey>( GenericArray, @@ -149,6 +150,12 @@ impl> PartialEq for ArrayKey { } impl> Eq for ArrayKey {} +impl> hash::Hash for ArrayKey { + fn hash(&self, state: &mut H) { + self.0.hash(state); + } +} + impl> Serialize for ArrayKey { fn serialize(&self, serializer: S) -> Result where diff --git a/askar-crypto/src/buffer/hash.rs b/askar-crypto/src/buffer/hash.rs index 0807ed62..c6647ab1 100644 --- a/askar-crypto/src/buffer/hash.rs +++ b/askar-crypto/src/buffer/hash.rs @@ -22,6 +22,12 @@ impl HashBuffer { } } +impl Default for HashBuffer { + fn default() -> Self { + Self::new() + } +} + impl WriteBuffer for HashBuffer { fn buffer_write(&mut self, data: &[u8]) -> Result<(), Error> { self.0.update(data); diff --git a/askar-crypto/src/buffer/mod.rs b/askar-crypto/src/buffer/mod.rs index faae2496..f9f4f30f 100644 --- a/askar-crypto/src/buffer/mod.rs +++ b/askar-crypto/src/buffer/mod.rs @@ -64,7 +64,7 @@ impl WriteBuffer for Vec { #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] impl ResizeBuffer for Vec { fn buffer_insert(&mut self, pos: usize, data: &[u8]) -> Result<(), Error> { - self.splice(pos..pos, data.into_iter().cloned()); + self.splice(pos..pos, data.iter().cloned()); Ok(()) } @@ -87,19 +87,19 @@ mod tests { pub(crate) fn test_write_buffer>(mut w: B) { w.buffer_write(b"he").unwrap(); w.buffer_write(b"y").unwrap(); - assert_eq!(&w.as_ref()[..], b"hey"); + assert_eq!(w.as_ref(), b"hey"); } pub(crate) fn test_resize_buffer(mut w: B) { w.buffer_write(b"hello").unwrap(); w.buffer_insert(1, b"world").unwrap(); - assert_eq!(&w.as_ref()[..], b"hworldello"); + assert_eq!(w.as_ref(), b"hworldello"); w.buffer_resize(12).unwrap(); - assert_eq!(&w.as_ref()[..], b"hworldello\0\0"); + assert_eq!(w.as_ref(), b"hworldello\0\0"); w.buffer_resize(6).unwrap(); - assert_eq!(&w.as_ref()[..], b"hworld"); + assert_eq!(w.as_ref(), b"hworld"); w.buffer_insert(1, b"ello").unwrap(); - assert_eq!(&w.as_ref()[..], b"helloworld"); + assert_eq!(w.as_ref(), b"helloworld"); } #[test] diff --git a/askar-crypto/src/buffer/secret.rs b/askar-crypto/src/buffer/secret.rs index 81982882..8ebce8ef 100644 --- a/askar-crypto/src/buffer/secret.rs +++ b/askar-crypto/src/buffer/secret.rs @@ -1,7 +1,7 @@ use alloc::{boxed::Box, string::String, vec::Vec}; use core::{ fmt::{self, Debug, Formatter}, - mem, + hash, mem, ops::{Deref, Range}, }; @@ -13,7 +13,7 @@ use super::{string::MaybeStr, HexRepr, ResizeBuffer, WriteBuffer}; use crate::error::Error; /// A heap-allocated, zeroized byte buffer -#[derive(Clone, Default, Hash, Zeroize)] +#[derive(Clone, Default, Zeroize)] pub struct SecretBytes(Vec); impl SecretBytes { @@ -59,6 +59,12 @@ impl SecretBytes { self.0.len() } + /// Determine if the buffer has zero length + #[inline] + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + /// Try to convert the buffer value to a string reference pub fn as_opt_str(&self) -> Option<&str> { core::str::from_utf8(self.0.as_slice()).ok() @@ -205,6 +211,12 @@ impl PartialEq for SecretBytes { } impl Eq for SecretBytes {} +impl hash::Hash for SecretBytes { + fn hash(&self, state: &mut H) { + self.0.hash(state); + } +} + impl From<&[u8]> for SecretBytes { fn from(inner: &[u8]) -> Self { Self(inner.to_vec()) @@ -256,7 +268,7 @@ impl WriteBuffer for SecretBytes { impl ResizeBuffer for SecretBytes { fn buffer_insert(&mut self, pos: usize, data: &[u8]) -> Result<(), Error> { - self.splice(pos..pos, data.into_iter().cloned()) + self.splice(pos..pos, data.iter().cloned()) } fn buffer_remove(&mut self, range: Range) -> Result<(), Error> { diff --git a/askar-crypto/src/buffer/string.rs b/askar-crypto/src/buffer/string.rs index 05205727..8a0c744f 100644 --- a/askar-crypto/src/buffer/string.rs +++ b/askar-crypto/src/buffer/string.rs @@ -29,7 +29,7 @@ impl> PartialEq<[u8]> for HexRepr { } } - write!(&mut CmpWrite(other.into_iter()), "{}", self).is_ok() + write!(&mut CmpWrite(other.iter()), "{}", self).is_ok() } } diff --git a/askar-crypto/src/buffer/writer.rs b/askar-crypto/src/buffer/writer.rs index df9d0626..f273dae5 100644 --- a/askar-crypto/src/buffer/writer.rs +++ b/askar-crypto/src/buffer/writer.rs @@ -45,19 +45,23 @@ impl Writer<'_, [u8]> { assert!(range.end >= range.start); let rem_len = range.len(); let ins_len = iter.len(); - if ins_len > rem_len { - let diff = ins_len - rem_len; - if self.pos + diff > self.inner.len() { - return Err(err_msg!(ExceededBuffer)); + match ins_len { + _ if ins_len > rem_len => { + let diff = ins_len - rem_len; + if self.pos + diff > self.inner.len() { + return Err(err_msg!(ExceededBuffer)); + } + self.inner + .copy_within((range.end - diff)..self.pos, range.end); + self.pos += diff; } - self.inner - .copy_within((range.end - diff)..self.pos, range.end); - self.pos += diff; - } else if ins_len < rem_len { - let diff = rem_len - ins_len; - self.inner - .copy_within(range.end..self.pos, range.end - diff); - self.pos -= diff; + _ if ins_len < rem_len => { + let diff = rem_len - ins_len; + self.inner + .copy_within(range.end..self.pos, range.end - diff); + self.pos -= diff; + } + _ => {} } for idx in 0..ins_len { self.inner[range.start + idx] = iter.next().unwrap(); @@ -93,7 +97,7 @@ impl WriteBuffer for Writer<'_, [u8]> { impl ResizeBuffer for Writer<'_, [u8]> { fn buffer_insert(&mut self, pos: usize, data: &[u8]) -> Result<(), Error> { - self.splice(pos..pos, data.into_iter().cloned()) + self.splice(pos..pos, data.iter().cloned()) } fn buffer_remove(&mut self, range: Range) -> Result<(), Error> { diff --git a/askar-crypto/src/jwk/mod.rs b/askar-crypto/src/jwk/mod.rs index bdf7d485..3373be29 100644 --- a/askar-crypto/src/jwk/mod.rs +++ b/askar-crypto/src/jwk/mod.rs @@ -80,7 +80,7 @@ pub fn write_jwk_thumbprint( pub trait FromJwk: Sized { /// Import the key from a JWK string reference fn from_jwk(jwk: &str) -> Result { - JwkParts::from_str(jwk).and_then(Self::from_jwk_parts) + JwkParts::try_from_str(jwk).and_then(Self::from_jwk_parts) } /// Import the key from a JWK byte slice diff --git a/askar-crypto/src/jwk/ops.rs b/askar-crypto/src/jwk/ops.rs index 3da4c8a0..670df2d2 100644 --- a/askar-crypto/src/jwk/ops.rs +++ b/askar-crypto/src/jwk/ops.rs @@ -67,7 +67,7 @@ impl KeyOps { } /// Parse a key operation from a string reference - pub fn from_str(key: &str) -> Option { + pub fn try_from_str(key: &str) -> Option { match key { "sign" => Some(Self::Sign), "verify" => Some(Self::Verify), @@ -218,7 +218,7 @@ impl<'de> Visitor<'de> for KeyOpsVisitor { { let mut ops = KeyOpsSet::new(); while let Some(op) = seq.next_element()? { - if let Some(op) = KeyOps::from_str(op) { + if let Some(op) = KeyOps::try_from_str(op) { if ops & op { return Err(serde::de::Error::duplicate_field(op.as_str())); } else { @@ -258,8 +258,8 @@ mod tests { #[test] fn invariants() { - assert_eq!(KeyOpsSet::new().is_empty(), true); - assert_eq!(KeyOpsSet::from(KeyOps::Decrypt).is_empty(), false); + assert!(KeyOpsSet::new().is_empty()); + assert!(!KeyOpsSet::from(KeyOps::Decrypt).is_empty()); assert_eq!(KeyOpsSet::new(), KeyOpsSet::new()); assert_ne!(KeyOpsSet::from(KeyOps::Decrypt), KeyOpsSet::new()); assert_ne!(KeyOps::Decrypt, KeyOps::Encrypt); diff --git a/askar-crypto/src/jwk/parts.rs b/askar-crypto/src/jwk/parts.rs index 1fdb2978..d042ee21 100644 --- a/askar-crypto/src/jwk/parts.rs +++ b/askar-crypto/src/jwk/parts.rs @@ -39,7 +39,7 @@ pub struct JwkParts<'a> { impl<'de> JwkParts<'de> { /// Parse a JWK from a string reference - pub fn from_str(jwk: &'de str) -> Result { + pub fn try_from_str(jwk: &'de str) -> Result { let (parts, _read) = serde_json_core::from_str(jwk).map_err(err_map!(Invalid, "Error parsing JWK"))?; Ok(parts) @@ -67,7 +67,7 @@ impl OptAttr<'_> { self.0.is_some() } - pub fn to_option(&self) -> Option<&str> { + pub fn as_opt_str(&self) -> Option<&str> { self.0 } @@ -211,26 +211,26 @@ impl Serialize for JwkParts<'_> { S: Serializer, { let mut map = serializer.serialize_map(None)?; - if let Some(alg) = self.alg.to_option() { + if let Some(alg) = self.alg.as_opt_str() { map.serialize_entry("alg", alg)?; } - if let Some(crv) = self.crv.to_option() { + if let Some(crv) = self.crv.as_opt_str() { map.serialize_entry("crv", crv)?; } - if let Some(d) = self.d.to_option() { + if let Some(d) = self.d.as_opt_str() { map.serialize_entry("d", d)?; } - if let Some(k) = self.k.to_option() { + if let Some(k) = self.k.as_opt_str() { map.serialize_entry("k", k)?; } - if let Some(kid) = self.kid.to_option() { + if let Some(kid) = self.kid.as_opt_str() { map.serialize_entry("kid", kid)?; } map.serialize_entry("kty", self.kty)?; - if let Some(x) = self.x.to_option() { + if let Some(x) = self.x.as_opt_str() { map.serialize_entry("x", x)?; } - if let Some(y) = self.y.to_option() { + if let Some(y) = self.y.as_opt_str() { map.serialize_entry("y", y)?; } if let Some(ops) = self.key_ops { @@ -254,7 +254,7 @@ mod tests { "key_ops": ["sign", "verify"], "kid": "FdFYFzERwC2uCBB46pZQi4GG85LujR8obt-KWRBICVQ" }"#; - let parts = JwkParts::from_str(jwk).unwrap(); + let parts = JwkParts::try_from_str(jwk).unwrap(); assert_eq!(parts.kty, "OKP"); assert_eq!( parts.kid, diff --git a/askar-crypto/src/kdf/concat.rs b/askar-crypto/src/kdf/concat.rs index 7a1707ea..1051c316 100644 --- a/askar-crypto/src/kdf/concat.rs +++ b/askar-crypto/src/kdf/concat.rs @@ -106,6 +106,12 @@ impl ConcatKDFHash { } } +impl Default for ConcatKDFHash { + fn default() -> Self { + Self::new() + } +} + impl WriteBuffer for ConcatKDFHash { fn buffer_write(&mut self, data: &[u8]) -> Result<(), Error> { self.hasher.update(data); diff --git a/askar-crypto/src/kdf/ecdh_1pu.rs b/askar-crypto/src/kdf/ecdh_1pu.rs index 0b0fff80..fd71024a 100644 --- a/askar-crypto/src/kdf/ecdh_1pu.rs +++ b/askar-crypto/src/kdf/ecdh_1pu.rs @@ -27,6 +27,7 @@ pub struct Ecdh1PU<'d, Key: KeyExchange + ?Sized> { impl<'d, Key: KeyExchange + ?Sized> Ecdh1PU<'d, Key> { /// Create a new KDF instance + #[allow(clippy::too_many_arguments)] pub fn new( ephem_key: &'d Key, send_key: &'d Key, @@ -80,7 +81,7 @@ impl KeyDerivation for Ecdh1PU<'_, Key> { pub_w.buffer_write(&((output_len as u32) * 8).to_be_bytes())?; // output length in bits if !self.cc_tag.is_empty() { pub_w.buffer_write(&(self.cc_tag.len() as u32).to_be_bytes())?; - pub_w.buffer_write(&self.cc_tag)?; + pub_w.buffer_write(self.cc_tag)?; } kdf.hash_params(ConcatKDFParams { diff --git a/src/backend/any.rs b/src/backend/any.rs index 1d084c6c..b09065fe 100644 --- a/src/backend/any.rs +++ b/src/backend/any.rs @@ -86,13 +86,13 @@ impl Backend for AnyBackend { #[cfg(feature = "postgres")] Self::Postgres(store) => { let session = store.session(profile, transaction)?; - Ok(AnyQueryBackend::PostgresSession(session)) + Ok(AnyQueryBackend::PostgresSession(Box::new(session))) } #[cfg(feature = "sqlite")] Self::Sqlite(store) => { let session = store.session(profile, transaction)?; - Ok(AnyQueryBackend::SqliteSession(session)) + Ok(AnyQueryBackend::SqliteSession(Box::new(session))) } _ => unreachable!(), @@ -117,11 +117,11 @@ impl Backend for AnyBackend { pub enum AnyQueryBackend { /// A PostgreSQL store session #[cfg(feature = "postgres")] - PostgresSession(::Session), + PostgresSession(Box<::Session>), /// A Sqlite store session #[cfg(feature = "sqlite")] - SqliteSession(::Session), + SqliteSession(Box<::Session>), #[allow(unused)] #[doc(hidden)] diff --git a/src/backend/db_utils.rs b/src/backend/db_utils.rs index 12a4feee..ef4dd5b2 100644 --- a/src/backend/db_utils.rs +++ b/src/backend/db_utils.rs @@ -155,7 +155,7 @@ impl DbSession { } } -impl<'q, DB: ExtDatabase> Drop for DbSession { +impl Drop for DbSession { fn drop(&mut self) { if self.txn_depth > 0 { self.txn_depth = 0; @@ -436,7 +436,7 @@ pub fn replace_arg_placeholders( '$' => Some((start_offs + 2, index)), '0'..='9' => { let mut end_offs = start_offs + 2; - while let Some(c) = iter.next() { + for c in iter { if ('0'..='9').contains(&c) { end_offs += 1; } else { @@ -528,7 +528,7 @@ pub fn decrypt_scan_entry( let tags = key.decrypt_entry_tags( decode_tags(enc_entry.tags).map_err(|_| err_msg!(Unexpected, "Error decoding tags"))?, )?; - Ok(Entry::new(category.to_string(), name, value, tags)) + Ok(Entry::new(category, name, value, tags)) } pub fn expiry_timestamp(expire_ms: i64) -> Result { @@ -537,6 +537,7 @@ pub fn expiry_timestamp(expire_ms: i64) -> Result { .ok_or_else(|| err_msg!(Unexpected, "Invalid expiry timestamp")) } +#[allow(clippy::type_complexity)] pub fn encode_tag_filter( tag_filter: Option, key: &ProfileKey, @@ -545,8 +546,8 @@ pub fn encode_tag_filter( if let Some(tag_filter) = tag_filter { let tag_query = tag_query(tag_filter.query)?; let mut enc = TagSqlEncoder::new( - |name| Ok(key.encrypt_tag_name(ProfileKey::prepare_input(name.as_bytes()))?), - |value| Ok(key.encrypt_tag_value(ProfileKey::prepare_input(value.as_bytes()))?), + |name| key.encrypt_tag_name(ProfileKey::prepare_input(name.as_bytes())), + |value| key.encrypt_tag_value(ProfileKey::prepare_input(value.as_bytes())), ); if let Some(filter) = enc.encode_query(&tag_query)? { let filter = replace_arg_placeholders::(&filter, (offset as i64) + 1); @@ -605,9 +606,9 @@ where Ok(query) } -pub fn init_keys<'a>( +pub fn init_keys( method: StoreKeyMethod, - pass_key: PassKey<'a>, + pass_key: PassKey<'_>, ) -> Result<(ProfileKey, Vec, StoreKey, String), Error> { if method == StoreKeyMethod::RawKey && pass_key.is_empty() { // disallow random key for a new database diff --git a/src/backend/postgres/mod.rs b/src/backend/postgres/mod.rs index 8a8019f7..e087f344 100644 --- a/src/backend/postgres/mod.rs +++ b/src/backend/postgres/mod.rs @@ -31,43 +31,42 @@ use crate::{ storage::{EncEntryTag, Entry, EntryKind, EntryOperation, EntryTag, Scan, TagFilter}, }; -const COUNT_QUERY: &'static str = "SELECT COUNT(*) FROM items i +const COUNT_QUERY: &str = "SELECT COUNT(*) FROM items i WHERE profile_id = $1 AND kind = $2 AND category = $3 AND (expiry IS NULL OR expiry > CURRENT_TIMESTAMP)"; -const DELETE_QUERY: &'static str = "DELETE FROM items +const DELETE_QUERY: &str = "DELETE FROM items WHERE profile_id = $1 AND kind = $2 AND category = $3 AND name = $4"; -const FETCH_QUERY: &'static str = "SELECT id, value, +const FETCH_QUERY: &str = "SELECT id, value, (SELECT ARRAY_TO_STRING(ARRAY_AGG(it.plaintext || ':' || ENCODE(it.name, 'hex') || ':' || ENCODE(it.value, 'hex')), ',') FROM items_tags it WHERE it.item_id = i.id) tags FROM items i WHERE profile_id = $1 AND kind = $2 AND category = $3 AND name = $4 AND (expiry IS NULL OR expiry > CURRENT_TIMESTAMP)"; -const FETCH_QUERY_UPDATE: &'static str = "SELECT id, value, +const FETCH_QUERY_UPDATE: &str = "SELECT id, value, (SELECT ARRAY_TO_STRING(ARRAY_AGG(it.plaintext || ':' || ENCODE(it.name, 'hex') || ':' || ENCODE(it.value, 'hex')), ',') FROM items_tags it WHERE it.item_id = i.id) tags FROM items i WHERE profile_id = $1 AND kind = $2 AND category = $3 AND name = $4 AND (expiry IS NULL OR expiry > CURRENT_TIMESTAMP) FOR NO KEY UPDATE"; -const INSERT_QUERY: &'static str = - "INSERT INTO items (profile_id, kind, category, name, value, expiry) +const INSERT_QUERY: &str = "INSERT INTO items (profile_id, kind, category, name, value, expiry) VALUES ($1, $2, $3, $4, $5, $6) ON CONFLICT DO NOTHING RETURNING id"; -const UPDATE_QUERY: &'static str = "UPDATE items SET value=$5, expiry=$6 +const UPDATE_QUERY: &str = "UPDATE items SET value=$5, expiry=$6 WHERE profile_id=$1 AND kind=$2 AND category=$3 AND name=$4 RETURNING id"; -const SCAN_QUERY: &'static str = "SELECT id, name, value, +const SCAN_QUERY: &str = "SELECT id, name, value, (SELECT ARRAY_TO_STRING(ARRAY_AGG(it.plaintext || ':' || ENCODE(it.name, 'hex') || ':' || ENCODE(it.value, 'hex')), ',') FROM items_tags it WHERE it.item_id = i.id) tags FROM items i WHERE profile_id = $1 AND kind = $2 AND category = $3 AND (expiry IS NULL OR expiry > CURRENT_TIMESTAMP)"; -const DELETE_ALL_QUERY: &'static str = "DELETE FROM items i +const DELETE_ALL_QUERY: &str = "DELETE FROM items i WHERE i.profile_id = $1 AND i.kind = $2 AND i.category = $3"; -const TAG_INSERT_QUERY: &'static str = "INSERT INTO items_tags +const TAG_INSERT_QUERY: &str = "INSERT INTO items_tags (item_id, name, value, plaintext) VALUES ($1, $2, $3, $4)"; -const TAG_DELETE_QUERY: &'static str = "DELETE FROM items_tags +const TAG_DELETE_QUERY: &str = "DELETE FROM items_tags WHERE item_id=$1"; mod provision; @@ -384,12 +383,8 @@ impl QueryBackend for DbSession { ); pin!(scan); let mut enc_rows = vec![]; - loop { - if let Some(rows) = scan.try_next().await? { - enc_rows.extend(rows) - } else { - break; - } + while let Some(rows) = scan.try_next().await? { + enc_rows.extend(rows) } unblock(move || decrypt_scan_batch(category, enc_rows, &key)).await }) @@ -496,7 +491,7 @@ impl QueryBackend for DbSession { }) .await?; let mut active = acquire_session(&mut *self).await?; - Ok(perform_remove(&mut active, kind, &enc_category, &enc_name, false).await?) + perform_remove(&mut active, kind, &enc_category, &enc_name, false).await }), } } @@ -546,9 +541,9 @@ async fn acquire_key( } } -async fn acquire_session<'q>( - session: &'q mut DbSession, -) -> Result, Error> { +async fn acquire_session( + session: &'_ mut DbSession, +) -> Result, Error> { session.make_active(&resolve_profile_key).await } @@ -559,24 +554,23 @@ async fn resolve_profile_key( ) -> Result<(ProfileId, Arc), Error> { if let Some((pid, key)) = cache.get_profile(profile.as_str()).await { Ok((pid, key)) + } else if let Some(row) = sqlx::query("SELECT id, profile_key FROM profiles WHERE name=$1") + .bind(profile.as_str()) + .fetch_optional(conn) + .await? + { + let pid = row.try_get(0)?; + let key = Arc::new(cache.load_key(row.try_get(1)?).await?); + cache.add_profile(profile, pid, key.clone()).await; + Ok((pid, key)) } else { - if let Some(row) = sqlx::query("SELECT id, profile_key FROM profiles WHERE name=$1") - .bind(profile.as_str()) - .fetch_optional(conn) - .await? - { - let pid = row.try_get(0)?; - let key = Arc::new(cache.load_key(row.try_get(1)?).await?); - cache.add_profile(profile, pid, key.clone()).await; - Ok((pid, key)) - } else { - Err(err_msg!(NotFound, "Profile not found")) - } + Err(err_msg!(NotFound, "Profile not found")) } } -async fn perform_insert<'q>( - active: &mut DbSessionTxn<'q, Postgres>, +#[allow(clippy::too_many_arguments)] +async fn perform_insert( + active: &mut DbSessionTxn<'_, Postgres>, kind: EntryKind, enc_category: &[u8], enc_name: &[u8], @@ -651,8 +645,9 @@ async fn perform_remove<'q>( } } -fn perform_scan<'q>( - mut active: DbSessionRef<'q, Postgres>, +#[allow(clippy::too_many_arguments)] +fn perform_scan( + mut active: DbSessionRef<'_, Postgres>, profile_id: ProfileId, key: Arc, kind: EntryKind, @@ -661,7 +656,7 @@ fn perform_scan<'q>( offset: Option, limit: Option, for_update: bool, -) -> impl Stream, Error>> + 'q { +) -> impl Stream, Error>> + '_ { try_stream! { let mut params = QueryParams::new(); params.push(profile_id); @@ -696,7 +691,6 @@ fn perform_scan<'q>( } } drop(rows); - drop(acquired); drop(active); if batch.len() > 0 { diff --git a/src/backend/postgres/provision.rs b/src/backend/postgres/provision.rs index d9d51732..b66ce9d4 100644 --- a/src/backend/postgres/provision.rs +++ b/src/backend/postgres/provision.rs @@ -119,7 +119,7 @@ impl PostgresStoreOptions { conn_opts.log_slow_statements(log::LevelFilter::Debug, Default::default()); } PgPoolOptions::default() - .connect_timeout(self.connect_timeout) + .acquire_timeout(self.connect_timeout) .idle_timeout(self.idle_timeout) .max_connections(self.max_connections) .min_connections(self.min_connections) @@ -180,28 +180,26 @@ impl PostgresStoreOptions { if recreate { // remove expected tables reset_db(&mut *txn).await?; - } else { - if sqlx::query_scalar::<_, i64>( - "SELECT COUNT(*) FROM information_schema.tables + } else if sqlx::query_scalar::<_, i64>( + "SELECT COUNT(*) FROM information_schema.tables WHERE table_schema='public' AND table_name='config'", + ) + .fetch_one(&mut txn) + .await? + == 1 + { + // proceed to open, will fail if the version doesn't match + return open_db( + conn_pool, + Some(method), + pass_key, + profile, + self.host, + self.name, ) - .fetch_one(&mut txn) - .await? - == 1 - { - // proceed to open, will fail if the version doesn't match - return open_db( - conn_pool, - Some(method), - pass_key, - profile, - self.host, - self.name, - ) - .await; - } - // no 'config' table, assume empty database + .await; } + // else: no 'config' table, assume empty database let (profile_key, enc_profile_key, store_key, store_key_ref) = unblock({ let pass_key = pass_key.into_owned(); diff --git a/src/backend/sqlite/mod.rs b/src/backend/sqlite/mod.rs index 381b8db6..01384fab 100644 --- a/src/backend/sqlite/mod.rs +++ b/src/backend/sqlite/mod.rs @@ -33,33 +33,32 @@ use crate::{ mod provision; pub use provision::SqliteStoreOptions; -const COUNT_QUERY: &'static str = "SELECT COUNT(*) FROM items i +const COUNT_QUERY: &str = "SELECT COUNT(*) FROM items i WHERE profile_id = ?1 AND kind = ?2 AND category = ?3 AND (expiry IS NULL OR expiry > DATETIME('now'))"; -const DELETE_QUERY: &'static str = "DELETE FROM items +const DELETE_QUERY: &str = "DELETE FROM items WHERE profile_id = ?1 AND kind = ?2 AND category = ?3 AND name = ?4"; -const FETCH_QUERY: &'static str = "SELECT i.id, i.value, +const FETCH_QUERY: &str = "SELECT i.id, i.value, (SELECT GROUP_CONCAT(it.plaintext || ':' || HEX(it.name) || ':' || HEX(it.value)) FROM items_tags it WHERE it.item_id = i.id) AS tags FROM items i WHERE i.profile_id = ?1 AND i.kind = ?2 AND i.category = ?3 AND i.name = ?4 AND (i.expiry IS NULL OR i.expiry > DATETIME('now'))"; -const INSERT_QUERY: &'static str = +const INSERT_QUERY: &str = "INSERT OR IGNORE INTO items (profile_id, kind, category, name, value, expiry) VALUES (?1, ?2, ?3, ?4, ?5, ?6)"; -const UPDATE_QUERY: &'static str = - "UPDATE items SET value=?5, expiry=?6 WHERE profile_id=?1 AND kind=?2 +const UPDATE_QUERY: &str = "UPDATE items SET value=?5, expiry=?6 WHERE profile_id=?1 AND kind=?2 AND category=?3 AND name=?4 RETURNING id"; -const SCAN_QUERY: &'static str = "SELECT i.id, i.name, i.value, +const SCAN_QUERY: &str = "SELECT i.id, i.name, i.value, (SELECT GROUP_CONCAT(it.plaintext || ':' || HEX(it.name) || ':' || HEX(it.value)) FROM items_tags it WHERE it.item_id = i.id) AS tags FROM items i WHERE i.profile_id = ?1 AND i.kind = ?2 AND i.category = ?3 AND (i.expiry IS NULL OR i.expiry > DATETIME('now'))"; -const DELETE_ALL_QUERY: &'static str = "DELETE FROM items AS i +const DELETE_ALL_QUERY: &str = "DELETE FROM items AS i WHERE i.profile_id = ?1 AND i.kind = ?2 AND i.category = ?3"; -const TAG_INSERT_QUERY: &'static str = "INSERT INTO items_tags +const TAG_INSERT_QUERY: &str = "INSERT INTO items_tags (item_id, name, value, plaintext) VALUES (?1, ?2, ?3, ?4)"; -const TAG_DELETE_QUERY: &'static str = "DELETE FROM items_tags +const TAG_DELETE_QUERY: &str = "DELETE FROM items_tags WHERE item_id=?1"; /// A Sqlite database store @@ -359,12 +358,8 @@ impl QueryBackend for DbSession { ); pin!(scan); let mut enc_rows = vec![]; - loop { - if let Some(rows) = scan.try_next().await? { - enc_rows.extend(rows) - } else { - break; - } + while let Some(rows) = scan.try_next().await? { + enc_rows.extend(rows) } unblock(move || decrypt_scan_batch(category, enc_rows, &key)).await }) @@ -466,7 +461,7 @@ impl QueryBackend for DbSession { }) .await?; let mut active = acquire_session(&mut *self).await?; - Ok(perform_remove(&mut active, kind, &enc_category, &enc_name, false).await?) + perform_remove(&mut active, kind, &enc_category, &enc_name, false).await }), } } @@ -508,9 +503,9 @@ async fn acquire_key( } } -async fn acquire_session<'q>( - session: &'q mut DbSession, -) -> Result, Error> { +async fn acquire_session( + session: &mut DbSession, +) -> Result, Error> { session.make_active(&resolve_profile_key).await } @@ -521,24 +516,23 @@ async fn resolve_profile_key( ) -> Result<(ProfileId, Arc), Error> { if let Some((pid, key)) = cache.get_profile(profile.as_str()).await { Ok((pid, key)) + } else if let Some(row) = sqlx::query("SELECT id, profile_key FROM profiles WHERE name=?1") + .bind(profile.as_str()) + .fetch_optional(conn) + .await? + { + let pid = row.try_get(0)?; + let key = Arc::new(cache.load_key(row.try_get(1)?).await?); + cache.add_profile(profile, pid, key.clone()).await; + Ok((pid, key)) } else { - if let Some(row) = sqlx::query("SELECT id, profile_key FROM profiles WHERE name=?1") - .bind(profile.as_str()) - .fetch_optional(conn) - .await? - { - let pid = row.try_get(0)?; - let key = Arc::new(cache.load_key(row.try_get(1)?).await?); - cache.add_profile(profile, pid, key.clone()).await; - Ok((pid, key)) - } else { - Err(err_msg!(NotFound, "Profile not found")) - } + Err(err_msg!(NotFound, "Profile not found")) } } -async fn perform_insert<'q>( - active: &mut DbSessionTxn<'q, Sqlite>, +#[allow(clippy::too_many_arguments)] +async fn perform_insert( + active: &mut DbSessionTxn<'_, Sqlite>, kind: EntryKind, enc_category: &[u8], enc_name: &[u8], @@ -616,8 +610,9 @@ async fn perform_remove<'q>( } } -fn perform_scan<'q>( - mut active: DbSessionRef<'q, Sqlite>, +#[allow(clippy::too_many_arguments)] +fn perform_scan( + mut active: DbSessionRef<'_, Sqlite>, profile_id: ProfileId, key: Arc, kind: EntryKind, @@ -625,7 +620,7 @@ fn perform_scan<'q>( tag_filter: Option, offset: Option, limit: Option, -) -> impl Stream, Error>> + 'q { +) -> impl Stream, Error>> + '_ { try_stream! { let mut params = QueryParams::new(); params.push(profile_id); @@ -657,7 +652,6 @@ fn perform_scan<'q>( } } drop(rows); - drop(acquired); drop(active); if !batch.is_empty() { diff --git a/src/backend/sqlite/provision.rs b/src/backend/sqlite/provision.rs index d57b9e2b..6d5ffa27 100644 --- a/src/backend/sqlite/provision.rs +++ b/src/backend/sqlite/provision.rs @@ -154,25 +154,24 @@ impl SqliteStoreOptions { } let conn_pool = self.pool(true).await?; - if !recreate { - if sqlx::query_scalar::<_, i64>( + if !recreate + && sqlx::query_scalar::<_, i64>( "SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name='config'", ) .fetch_one(&conn_pool) .await? == 1 - { - return open_db( - conn_pool, - Some(method), - pass_key, - profile, - self.path.to_string(), - ) - .await; - } - // no 'config' table, assume empty database + { + return open_db( + conn_pool, + Some(method), + pass_key, + profile, + self.path.to_string(), + ) + .await; } + // else: no 'config' table, assume empty database let default_profile = profile .map(str::to_string) @@ -209,7 +208,7 @@ impl SqliteStoreOptions { } Err(err) => Err(err.into()), }?; - Ok(open_db(conn_pool, method, pass_key, profile, self.path.to_string()).await?) + open_db(conn_pool, method, pass_key, profile, self.path.to_string()).await } /// Remove the Sqlite store defined by these configuration options @@ -223,15 +222,15 @@ impl SqliteStoreOptions { /// Default options for an in-memory Sqlite store pub fn in_memory() -> Self { - let mut opts = Options::default(); - opts.host = Cow::Borrowed(":memory:"); - Self::new(opts).unwrap() + Self::from_path(":memory:") } /// Default options for a given Sqlite database path pub fn from_path(path: &str) -> Self { - let mut opts = Options::default(); - opts.host = Cow::Borrowed(path); + let opts = Options { + host: Cow::Borrowed(path), + ..Default::default() + }; Self::new(opts).unwrap() } } diff --git a/src/backend/types.rs b/src/backend/types.rs index d397a529..4ede8fee 100644 --- a/src/backend/types.rs +++ b/src/backend/types.rs @@ -108,6 +108,7 @@ pub trait QueryBackend: Send { ) -> BoxFuture<'q, Result>; /// Insert or replace a record in the store + #[allow(clippy::too_many_arguments)] fn update<'q>( &'q mut self, kind: EntryKind, diff --git a/src/error.rs b/src/error.rs index b20aa9f7..781233a0 100644 --- a/src/error.rs +++ b/src/error.rs @@ -81,7 +81,7 @@ impl Error { /// Accessor for the error message pub fn message(&self) -> Option<&str> { - self.message.as_ref().map(String::as_str) + self.message.as_deref() } pub(crate) fn with_cause>>( diff --git a/src/ffi/secret.rs b/src/ffi/secret.rs index e24b0fb7..0452f8c5 100644 --- a/src/ffi/secret.rs +++ b/src/ffi/secret.rs @@ -1,4 +1,4 @@ -use std::{convert::TryFrom, mem, ptr}; +use std::{mem, ptr}; use crate::{crypto::buffer::SecretBytes, kms::Encrypted}; diff --git a/src/ffi/store.rs b/src/ffi/store.rs index 240411f8..ebddc91f 100644 --- a/src/ffi/store.rs +++ b/src/ffi/store.rs @@ -28,9 +28,9 @@ new_sequence_handle!(ScanHandle, FFI_SCAN_COUNTER); static FFI_STORES: Lazy>>> = Lazy::new(|| RwLock::new(BTreeMap::new())); static FFI_SESSIONS: Lazy> = - Lazy::new(|| StoreResourceMap::new()); + Lazy::new(StoreResourceMap::new); static FFI_SCANS: Lazy>> = - Lazy::new(|| StoreResourceMap::new()); + Lazy::new(StoreResourceMap::new); impl StoreHandle { pub async fn create(value: AnyStore) -> Self { @@ -63,6 +63,7 @@ impl StoreHandle { } struct StoreResourceMap { + #[allow(clippy::type_complexity)] map: RwLock>)>>, } @@ -177,7 +178,7 @@ pub extern "C" fn askar_store_provision( let store = spec_uri.provision_backend( key_method, pass_key, - profile.as_ref().map(String::as_str), + profile.as_deref(), recreate != 0 ).await?; Ok(StoreHandle::create(store).await) @@ -221,7 +222,7 @@ pub extern "C" fn askar_store_open( let store = spec_uri.open_backend( key_method, pass_key, - profile.as_ref().map(String::as_str) + profile.as_deref() ).await?; Ok(StoreHandle::create(store).await) }.await; @@ -333,7 +334,7 @@ pub extern "C" fn askar_store_remove_profile( spawn_ok(async move { let result = async { let store = handle.load().await?; - Ok(store.remove_profile(profile).await?) + store.remove_profile(profile).await }.await; cb.resolve(result); }); @@ -730,7 +731,7 @@ pub extern "C" fn askar_session_update( spawn_ok(async move { let result = async { let mut session = FFI_SESSIONS.borrow(handle).await?; - let result = session.update(operation, &category, &name, Some(value.as_slice()), tags.as_ref().map(Vec::as_slice), expiry_ms).await; + let result = session.update(operation, &category, &name, Some(value.as_slice()), tags.as_deref(), expiry_ms).await; result }.await; cb.resolve(result); @@ -785,8 +786,8 @@ pub extern "C" fn askar_session_insert_key( let result = session.insert_key( name.as_str(), &key, - metadata.as_ref().map(String::as_str), - tags.as_ref().map(Vec::as_slice), + metadata.as_deref(), + tags.as_deref(), expiry_ms, ).await; result @@ -871,8 +872,8 @@ pub extern "C" fn askar_session_fetch_all_keys( let result = async { let mut session = FFI_SESSIONS.borrow(handle).await?; let result = session.fetch_all_keys( - alg.as_ref().map(String::as_str), - thumbprint.as_ref().map(String::as_str), + alg.as_deref(), + thumbprint.as_deref(), tag_filter, limit, for_update != 0 @@ -928,8 +929,8 @@ pub extern "C" fn askar_session_update_key( let mut session = FFI_SESSIONS.borrow(handle).await?; let result = session.update_key( &name, - metadata.as_ref().map(String::as_str), - tags.as_ref().map(Vec::as_slice), + metadata.as_deref(), + tags.as_deref(), expiry_ms, ).await; diff --git a/src/kms/enc.rs b/src/kms/enc.rs index 148ade25..2a706589 100644 --- a/src/kms/enc.rs +++ b/src/kms/enc.rs @@ -61,6 +61,7 @@ pub struct ToDecrypt<'d> { impl<'d> ToDecrypt<'d> { /// Accessor for the combined length + #[allow(clippy::len_without_is_empty)] #[inline] pub fn len(&self) -> usize { self.ciphertext.len() + self.tag.len() diff --git a/src/kms/entry.rs b/src/kms/entry.rs index 070d587d..16f6b17c 100644 --- a/src/kms/entry.rs +++ b/src/kms/entry.rs @@ -29,9 +29,8 @@ impl KeyParams { } pub(crate) fn from_slice(params: &[u8]) -> Result { - let result = serde_cbor::from_slice(params) - .map_err(|e| err_msg!(Unexpected, "Error deserializing key params: {}", e)); - result + serde_cbor::from_slice(params) + .map_err(|e| err_msg!(Unexpected, "Error deserializing key params: {}", e)) } } diff --git a/src/kms/envelope.rs b/src/kms/envelope.rs index 640866bb..095c2517 100644 --- a/src/kms/envelope.rs +++ b/src/kms/envelope.rs @@ -75,6 +75,7 @@ pub fn crypto_box_seal_open( } /// Derive an ECDH-1PU shared key for authenticated encryption +#[allow(clippy::too_many_arguments)] pub fn derive_key_ecdh_1pu( key_alg: KeyAlg, ephem_key: &LocalKey, diff --git a/src/kms/local_key.rs b/src/kms/local_key.rs index 4d9fd1cc..d25f5ef0 100644 --- a/src/kms/local_key.rs +++ b/src/kms/local_key.rs @@ -146,12 +146,12 @@ impl LocalKey { /// Get the set of indexed JWK thumbprints for this key or keypair pub fn to_jwk_thumbprints(&self) -> Result, Error> { if self.inner.algorithm() == KeyAlg::Bls12_381(BlsCurves::G1G2) { - return Ok(vec![ + Ok(vec![ self.inner .to_jwk_thumbprint(Some(KeyAlg::Bls12_381(BlsCurves::G1)))?, self.inner .to_jwk_thumbprint(Some(KeyAlg::Bls12_381(BlsCurves::G2)))?, - ]); + ]) } else { Ok(vec![self.inner.to_jwk_thumbprint(None)?]) } @@ -189,8 +189,7 @@ impl LocalKey { if nonce_len == 0 { return Ok(Vec::new()); } - let mut buf = Vec::with_capacity(nonce_len); - buf.resize(nonce_len, 0u8); + let mut buf = vec![0; nonce_len]; fill_random(&mut buf); Ok(buf) } diff --git a/src/protect/kdf/argon2.rs b/src/protect/kdf/argon2.rs index 5788d703..08c5055c 100644 --- a/src/protect/kdf/argon2.rs +++ b/src/protect/kdf/argon2.rs @@ -12,8 +12,8 @@ use crate::{ pub use crate::crypto::kdf::argon2::SaltSize; -pub const LEVEL_INTERACTIVE: &'static str = "13:int"; -pub const LEVEL_MODERATE: &'static str = "13:mod"; +pub const LEVEL_INTERACTIVE: &str = "13:int"; +pub const LEVEL_MODERATE: &str = "13:mod"; /// Argon2i derivation methods #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] diff --git a/src/protect/kdf/mod.rs b/src/protect/kdf/mod.rs index 96ffe7ac..0d36265b 100644 --- a/src/protect/kdf/mod.rs +++ b/src/protect/kdf/mod.rs @@ -9,7 +9,7 @@ mod argon2; pub use self::argon2::Level as Argon2Level; use self::argon2::SaltSize as Argon2Salt; -pub const METHOD_ARGON2I: &'static str = "argon2i"; +pub const METHOD_ARGON2I: &str = "argon2i"; /// Supported KDF methods for generating or referencing a store key #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] @@ -19,36 +19,31 @@ pub enum KdfMethod { } impl KdfMethod { - pub(crate) fn from_str(method: &str) -> Option<(Self, String)> { + pub(crate) fn decode(method: &str) -> Result<(Self, String), Error> { let mut method_and_detail = method.splitn(3, ':'); let prefix = method_and_detail.next(); - if prefix != Some(PREFIX_KDF) { - return None; - } - let method = method_and_detail.next().unwrap_or_default(); - let mut level_and_detail = method_and_detail.next().unwrap_or_default().splitn(2, '?'); - let level = level_and_detail.next().unwrap_or_default(); - let detail = level_and_detail.next().unwrap_or_default(); - match method { - METHOD_ARGON2I => { + if prefix == Some(PREFIX_KDF) { + let method = method_and_detail.next().unwrap_or_default(); + let mut level_and_detail = method_and_detail.next().unwrap_or_default().splitn(2, '?'); + let level = level_and_detail.next().unwrap_or_default(); + let detail = level_and_detail.next().unwrap_or_default(); + if method == METHOD_ARGON2I { if let Some(level) = Argon2Level::from_str(level) { - Some(( + return Ok(( Self::Argon2i(level), if detail.is_empty() { "".to_owned() } else { format!("?{}", detail) }, - )) - } else { - None + )); } } - _ => None, } + Err(err_msg!(Unsupported, "Invalid key derivation method")) } - pub(crate) fn to_string(&self, detail: Option<&str>) -> String { + pub(crate) fn encode(&self, detail: Option<&str>) -> String { match self { Self::Argon2i(level) => format!( "{}:{}:{}{}", @@ -66,7 +61,7 @@ impl KdfMethod { let salt = level.generate_salt(); let key = level.derive_key(password.as_bytes(), salt.as_ref())?; let detail = format!("?salt={}", salt.as_hex()); - Ok((key.into(), detail)) + Ok((key, detail)) } } } @@ -76,7 +71,7 @@ impl KdfMethod { Self::Argon2i(level) => { let salt = parse_salt::(detail)?; let key = level.derive_key(password.as_bytes(), salt.as_ref())?; - Ok(key.into()) + Ok(key) } } } diff --git a/src/protect/mod.rs b/src/protect/mod.rs index e3cf6ce9..c4f5a632 100644 --- a/src/protect/mod.rs +++ b/src/protect/mod.rs @@ -44,7 +44,7 @@ impl KeyCache { let data = store_key .unwrap_data(ciphertext) .map_err(err_map!(Encryption, "Error decrypting profile key"))?; - Ok(ProfileKey::from_slice(data.as_ref())?) + ProfileKey::from_slice(data.as_ref()) }) .await } @@ -112,13 +112,13 @@ impl EntryEncryptor for NullEncryptor { .into_iter() .map(|tag| match tag { EntryTag::Encrypted(name, value) => EncEntryTag { - name: name.into_bytes().into(), - value: value.into_bytes().into(), + name: name.into_bytes(), + value: value.into_bytes(), plaintext: false, }, EntryTag::Plaintext(name, value) => EncEntryTag { - name: name.into_bytes().into(), - value: value.into_bytes().into(), + name: name.into_bytes(), + value: value.into_bytes(), plaintext: true, }, }) @@ -126,10 +126,10 @@ impl EntryEncryptor for NullEncryptor { } fn decrypt_entry_category(&self, enc_category: Vec) -> Result { - Ok(String::from_utf8(enc_category).map_err(err_map!(Encryption))?) + String::from_utf8(enc_category).map_err(err_map!(Encryption)) } fn decrypt_entry_name(&self, enc_name: Vec) -> Result { - Ok(String::from_utf8(enc_name).map_err(err_map!(Encryption))?) + String::from_utf8(enc_name).map_err(err_map!(Encryption)) } fn decrypt_entry_value( &self, @@ -140,7 +140,7 @@ impl EntryEncryptor for NullEncryptor { Ok(enc_value.into()) } fn decrypt_entry_tags(&self, enc_tags: Vec) -> Result, Error> { - Ok(enc_tags.into_iter().try_fold(vec![], |mut acc, tag| { + enc_tags.into_iter().try_fold(vec![], |mut acc, tag| { let name = String::from_utf8(tag.name).map_err(err_map!(Encryption))?; let value = String::from_utf8(tag.value).map_err(err_map!(Encryption))?; acc.push(if tag.plaintext { @@ -149,6 +149,6 @@ impl EntryEncryptor for NullEncryptor { EntryTag::Encrypted(name, value) }); Result::<_, Error>::Ok(acc) - })?) + }) } } diff --git a/src/protect/pass_key.rs b/src/protect/pass_key.rs index 83901d6c..ad1a4aef 100644 --- a/src/protect/pass_key.rs +++ b/src/protect/pass_key.rs @@ -8,7 +8,7 @@ use std::{ }; /// A possibly-empty password or key used to derive a store key -#[derive(Clone)] +#[derive(Clone, Default)] pub struct PassKey<'a>(Option>); impl PassKey<'_> { @@ -47,12 +47,6 @@ impl Debug for PassKey<'_> { } } -impl Default for PassKey<'_> { - fn default() -> Self { - Self(None) - } -} - impl Deref for PassKey<'_> { type Target = str; @@ -90,18 +84,15 @@ impl<'a> From> for PassKey<'a> { impl<'a, 'b> PartialEq> for PassKey<'a> { fn eq(&self, other: &PassKey<'b>) -> bool { - &**self == &**other + **self == **other } } impl Eq for PassKey<'_> {} impl Zeroize for PassKey<'_> { fn zeroize(&mut self) { - match self.0.take() { - Some(Cow::Owned(mut s)) => { - s.zeroize(); - } - _ => (), + if let Some(Cow::Owned(mut s)) = self.0.take() { + s.zeroize(); } } } diff --git a/src/protect/profile_key.rs b/src/protect/profile_key.rs index c8d0e873..e281da09 100644 --- a/src/protect/profile_key.rs +++ b/src/protect/profile_key.rs @@ -210,7 +210,7 @@ where let name = self.encrypt_tag_name(name.into())?; Ok(EncEntryTag { name, - value: value.into_bytes().into(), + value: value.into_bytes(), plaintext: true, }) } @@ -274,7 +274,7 @@ mod tests { .encrypt_entry_value( test_record.category.as_bytes(), test_record.name.as_bytes(), - test_record.value.clone().into(), + test_record.value.clone(), ) .unwrap(); let enc_tags = key.encrypt_entry_tags(test_record.tags.clone()).unwrap(); diff --git a/src/protect/store_key.rs b/src/protect/store_key.rs index 676a3e33..6272ab54 100644 --- a/src/protect/store_key.rs +++ b/src/protect/store_key.rs @@ -12,9 +12,9 @@ use crate::{ error::Error, }; -pub const PREFIX_KDF: &'static str = "kdf"; -pub const PREFIX_RAW: &'static str = "raw"; -pub const PREFIX_NONE: &'static str = "none"; +pub const PREFIX_KDF: &str = "kdf"; +pub const PREFIX_RAW: &str = "raw"; +pub const PREFIX_NONE: &str = "none"; pub type StoreKeyType = Chacha20Key; @@ -55,6 +55,7 @@ impl StoreKey { Ok(Self(Some(StoreKeyType::random()?))) } + #[allow(unused)] pub fn is_empty(&self) -> bool { self.0.is_none() } @@ -119,10 +120,10 @@ impl StoreKeyMethod { // let detail = prefix_and_detail.next().unwrap_or_default(); match prefix { PREFIX_RAW => Ok(Self::RawKey), - PREFIX_KDF => match KdfMethod::from_str(uri) { - Some((method, _)) => Ok(Self::DeriveKey(method)), - None => Err(err_msg!(Unsupported, "Invalid key derivation method")), - }, + PREFIX_KDF => { + let (method, _) = KdfMethod::decode(uri)?; + Ok(Self::DeriveKey(method)) + } PREFIX_NONE => Ok(Self::Unprotected), _ => Err(err_msg!(Unsupported, "Invalid store key method")), } @@ -177,13 +178,10 @@ impl StoreKeyReference { let prefix = prefix_and_detail.next().unwrap_or_default(); match prefix { PREFIX_RAW => Ok(Self::RawKey), - PREFIX_KDF => match KdfMethod::from_str(uri) { - Some((method, detail)) => Ok(Self::DeriveKey(method, detail)), - None => Err(err_msg!( - Unsupported, - "Invalid key derivation method for reference" - )), - }, + PREFIX_KDF => { + let (method, detail) = KdfMethod::decode(uri)?; + Ok(Self::DeriveKey(method, detail)) + } PREFIX_NONE => Ok(Self::Unprotected), _ => Err(err_msg!( Unsupported, @@ -195,10 +193,9 @@ impl StoreKeyReference { pub fn compare_method(&self, method: &StoreKeyMethod) -> bool { match self { // Self::ManagedKey(_keyref) => matches!(method, WrapKeyMethod::CreateManagedKey(..)), - Self::DeriveKey(kdf_method, _detail) => match method { - StoreKeyMethod::DeriveKey(m) if m == kdf_method => true, - _ => false, - }, + Self::DeriveKey(kdf_method, _detail) => { + matches!(method, StoreKeyMethod::DeriveKey(m) if m == kdf_method) + } Self::RawKey => *method == StoreKeyMethod::RawKey, Self::Unprotected => *method == StoreKeyMethod::Unprotected, } @@ -207,7 +204,7 @@ impl StoreKeyReference { pub fn into_uri(self) -> String { match self { // Self::ManagedKey(keyref) => keyref, - Self::DeriveKey(method, detail) => method.to_string(Some(detail.as_str())), + Self::DeriveKey(method, detail) => method.encode(Some(detail.as_str())), Self::RawKey => PREFIX_RAW.to_string(), Self::Unprotected => PREFIX_NONE.to_string(), } @@ -272,7 +269,7 @@ mod tests { let unwrapped = key.unwrap_data(wrapped).expect("Error unwrapping data"); assert_eq!(unwrapped, &input[..]); let key_uri = key_ref.into_uri(); - assert_eq!(key_uri.starts_with("kdf:argon2i:13:mod?salt="), true); + assert!(key_uri.starts_with("kdf:argon2i:13:mod?salt=")); } #[test] @@ -304,7 +301,7 @@ mod tests { .resolve("not my pass".into()) .expect("Error deriving comparison key"); let unwrapped_err = check_bad_pass.unwrap_data(wrapped); - assert_eq!(unwrapped_err.is_err(), true); + assert!(unwrapped_err.is_err()); } #[test] @@ -327,7 +324,7 @@ mod tests { let (key, key_ref) = StoreKeyMethod::RawKey .resolve(raw_key.as_ref()) .expect("Error resolving raw key"); - assert_eq!(key.is_empty(), false); + assert!(!key.is_empty()); let wrapped = key .wrap_data((&input[..]).into()) .expect("Error wrapping input"); @@ -342,10 +339,10 @@ mod tests { assert_eq!(unwrapped, &input[..]); let check_no_key = key_ref.resolve(None.into()); - assert_eq!(check_no_key.is_err(), true); + assert!(check_no_key.is_err()); let check_bad_key = key_ref.resolve("not the key".into()); - assert_eq!(check_bad_key.is_err(), true); + assert!(check_bad_key.is_err()); } #[test] @@ -354,7 +351,7 @@ mod tests { let (key, key_ref) = StoreKeyMethod::Unprotected .resolve(None.into()) .expect("Error resolving unprotected"); - assert_eq!(key.is_empty(), true); + assert!(key.is_empty()); let wrapped = key .wrap_data((&input[..]).into()) .expect("Error wrapping unprotected"); diff --git a/src/storage/entry.rs b/src/storage/entry.rs index c5d4629d..43fabd48 100644 --- a/src/storage/entry.rs +++ b/src/storage/entry.rs @@ -17,12 +17,12 @@ use super::wql; use crate::{crypto::buffer::SecretBytes, error::Error}; pub(crate) fn sorted_tags(tags: &Vec) -> Vec<&EntryTag> { - if tags.len() > 0 { + if tags.is_empty() { + Vec::new() + } else { let mut tags = tags.iter().collect::>(); tags.sort(); tags - } else { - Vec::new() } } @@ -309,7 +309,7 @@ impl Serialize for EntryTagSet<'_> { S: Serializer, { if self.1 { - serializer.serialize_str(&self.0) + serializer.serialize_str(self.0) } else { serializer.collect_str(&format_args!("~{}", self.0)) } @@ -322,7 +322,7 @@ impl Serialize for EntryTagSet<'_> { EntryTag::Encrypted(name, val) => (TagName(name.as_str(), true), val.as_str()), EntryTag::Plaintext(name, val) => (TagName(name.as_str(), false), val.as_str()), }; - tags.entry(name).or_insert_with(|| vec![]).push(value); + tags.entry(name).or_insert_with(Vec::new).push(value); } let mut map = serializer.serialize_map(Some(tags.len()))?; @@ -370,7 +370,7 @@ impl TagFilter { /// Get the inverse of a tag filter #[inline] - pub fn not(filter: TagFilter) -> Self { + pub fn negate(filter: TagFilter) -> Self { Self { query: wql::Query::Not(Box::new(filter.query)), } @@ -471,6 +471,7 @@ impl FromStr for TagFilter { /// An active record scan of a store backend pub struct Scan<'s, T> { + #[allow(clippy::type_complexity)] stream: Option, Error>> + Send + 's>>>, page_size: usize, } diff --git a/src/storage/options.rs b/src/storage/options.rs index 2f9cd9c3..37aa5bbd 100644 --- a/src/storage/options.rs +++ b/src/storage/options.rs @@ -145,7 +145,6 @@ impl<'a> IntoOptions<'a> for &'a str { #[cfg(test)] mod tests { use super::*; - use std::iter::FromIterator; #[test] fn options_basic() { diff --git a/src/storage/store.rs b/src/storage/store.rs index b7331cb3..2f42d17a 100644 --- a/src/storage/store.rs +++ b/src/storage/store.rs @@ -40,17 +40,17 @@ impl Store { method: StoreKeyMethod, pass_key: PassKey<'_>, ) -> Result<(), Error> { - Ok(self.0.rekey_backend(method, pass_key).await?) + self.0.rekey_backend(method, pass_key).await } /// Create a new profile with the given profile name pub async fn create_profile(&self, name: Option) -> Result { - Ok(self.0.create_profile(name).await?) + self.0.create_profile(name).await } /// Remove an existing profile with the given profile name pub async fn remove_profile(&self, name: String) -> Result { - Ok(self.0.remove_profile(name).await?) + self.0.remove_profile(name).await } /// Create a new scan instance against the store @@ -64,8 +64,7 @@ impl Store { offset: Option, limit: Option, ) -> Result, Error> { - Ok(self - .0 + self.0 .scan( profile, EntryKind::Item, @@ -74,7 +73,7 @@ impl Store { offset, limit, ) - .await?) + .await } /// Create a new session against the store @@ -90,11 +89,11 @@ impl Store { /// Close the store instance, waiting for any shutdown procedures to complete. pub async fn close(self) -> Result<(), Error> { - Ok(self.0.close().await?) + self.0.close().await } pub(crate) async fn arc_close(self: Arc) -> Result<(), Error> { - Ok(self.0.close().await?) + self.0.close().await } } @@ -115,7 +114,7 @@ impl Session { category: &str, tag_filter: Option, ) -> Result { - Ok(self.0.count(EntryKind::Item, category, tag_filter).await?) + self.0.count(EntryKind::Item, category, tag_filter).await } /// Retrieve the current record at `(category, name)`. @@ -128,10 +127,9 @@ impl Session { name: &str, for_update: bool, ) -> Result, Error> { - Ok(self - .0 + self.0 .fetch(EntryKind::Item, category, name, for_update) - .await?) + .await } /// Retrieve all records matching the given `category` and `tag_filter`. @@ -146,10 +144,9 @@ impl Session { limit: Option, for_update: bool, ) -> Result, Error> { - Ok(self - .0 + self.0 .fetch_all(EntryKind::Item, category, tag_filter, limit, for_update) - .await?) + .await } /// Insert a new record into the store @@ -161,8 +158,7 @@ impl Session { tags: Option<&[EntryTag]>, expiry_ms: Option, ) -> Result<(), Error> { - Ok(self - .0 + self.0 .update( EntryKind::Item, EntryOperation::Insert, @@ -172,13 +168,12 @@ impl Session { tags, expiry_ms, ) - .await?) + .await } /// Remove a record from the store pub async fn remove(&mut self, category: &str, name: &str) -> Result<(), Error> { - Ok(self - .0 + self.0 .update( EntryKind::Item, EntryOperation::Remove, @@ -188,7 +183,7 @@ impl Session { None, None, ) - .await?) + .await } /// Replace the value and tags of a record in the store @@ -200,8 +195,7 @@ impl Session { tags: Option<&[EntryTag]>, expiry_ms: Option, ) -> Result<(), Error> { - Ok(self - .0 + self.0 .update( EntryKind::Item, EntryOperation::Replace, @@ -211,7 +205,7 @@ impl Session { tags, expiry_ms, ) - .await?) + .await } /// Remove all records in the store matching a given `category` and `tag_filter` @@ -220,10 +214,9 @@ impl Session { category: &str, tag_filter: Option, ) -> Result { - Ok(self - .0 + self.0 .remove_all(EntryKind::Item, category, tag_filter) - .await?) + .await } /// Perform a record update @@ -239,8 +232,7 @@ impl Session { tags: Option<&[EntryTag]>, expiry_ms: Option, ) -> Result<(), Error> { - Ok(self - .0 + self.0 .update( EntryKind::Item, operation, @@ -250,7 +242,7 @@ impl Session { tags, expiry_ms, ) - .await?) + .await } /// Insert a local key instance into the store @@ -434,11 +426,11 @@ impl Session { /// Commit the pending transaction pub async fn commit(self) -> Result<(), Error> { - Ok(self.0.close(true).await?) + self.0.close(true).await } /// Roll back the pending transaction pub async fn rollback(self) -> Result<(), Error> { - Ok(self.0.close(false).await?) + self.0.close(false).await } } diff --git a/src/storage/wql/query.rs b/src/storage/wql/query.rs index fe01ae28..102c7f52 100644 --- a/src/storage/wql/query.rs +++ b/src/storage/wql/query.rs @@ -2847,9 +2847,9 @@ mod tests { #[test] fn test_old_format_empty() { - let json = format!(r#"[]"#); + let json = r#"[]"#; - let query: Query = ::serde_json::from_str(&json).unwrap(); + let query: Query = ::serde_json::from_str(json).unwrap(); let expected = Query::And(vec![]); @@ -2863,8 +2863,8 @@ mod tests { let value1 = _random_string(10); let json = json!(vec![ - json ! ({name1.clone(): value1.clone()}), - json!({ name2.clone(): ::serde_json::Value::Null }) + json!({ name1.clone(): value1 }), + json!({ name2: ::serde_json::Value::Null }) ]) .to_string(); @@ -2888,7 +2888,7 @@ mod tests { fn test_optimise_or() { let json = r#"[]"#; - let query: Query = ::serde_json::from_str(&json).unwrap(); + let query: Query = ::serde_json::from_str(json).unwrap(); assert_eq!(query.optimise(), None); } diff --git a/src/storage/wql/sql.rs b/src/storage/wql/sql.rs index 529407fb..810fbe7c 100644 --- a/src/storage/wql/sql.rs +++ b/src/storage/wql/sql.rs @@ -37,15 +37,15 @@ where fn encode_name(&mut self, name: &TagName) -> Result { Ok(match name { - TagName::Encrypted(name) | TagName::Plaintext(name) => (&self.enc_name)(name)?, + TagName::Encrypted(name) | TagName::Plaintext(name) => (self.enc_name)(name)?, }) } - fn encode_value(&mut self, value: &String, is_plaintext: bool) -> Result { + fn encode_value(&mut self, value: &str, is_plaintext: bool) -> Result { Ok(if is_plaintext { value.as_bytes().to_vec() } else { - (&self.enc_value)(value)? + (self.enc_value)(value)? }) } diff --git a/src/storage/wql/tags.rs b/src/storage/wql/tags.rs index 6692e623..322f8148 100644 --- a/src/storage/wql/tags.rs +++ b/src/storage/wql/tags.rs @@ -6,8 +6,8 @@ pub type TagQuery = AbstractQuery; pub fn tag_query(query: Query) -> Result { let result = query .map_names(|k| { - if k.starts_with("~") { - Result::<_, ()>::Ok(TagName::Plaintext(k[1..].to_string())) + if let Some(plain) = k.strip_prefix('~') { + Result::<_, ()>::Ok(TagName::Plaintext(plain.to_string())) } else { Ok(TagName::Encrypted(k)) } @@ -37,9 +37,9 @@ impl ToString for TagName { } } -impl Into for &TagName { - fn into(self) -> String { - self.to_string() +impl From<&TagName> for String { + fn from(tag: &TagName) -> String { + tag.to_string() } } @@ -56,7 +56,7 @@ pub trait TagQueryEncoder { fn encode_name(&mut self, name: &TagName) -> Result; - fn encode_value(&mut self, value: &String, is_plaintext: bool) -> Result; + fn encode_value(&mut self, value: &str, is_plaintext: bool) -> Result; fn encode_op_clause( &mut self, @@ -185,17 +185,14 @@ where fn encode_tag_op( op: CompareOp, name: &TagName, - value: &String, + value: &str, enc: &mut E, negate: bool, ) -> Result, Error> where E: TagQueryEncoder, { - let is_plaintext = match &name { - TagName::Plaintext(_) => true, - _ => false, - }; + let is_plaintext = matches!(name, TagName::Plaintext(_)); let enc_name = enc.encode_name(name)?; let enc_value = enc.encode_value(value, is_plaintext)?; @@ -204,20 +201,17 @@ where fn encode_tag_in( name: &TagName, - values: &Vec, + values: &[String], enc: &mut E, negate: bool, ) -> Result, Error> where E: TagQueryEncoder, { - let is_plaintext = match &name { - TagName::Plaintext(_) => true, - _ => false, - }; + let is_plaintext = matches!(name, TagName::Plaintext(_)); let enc_name = enc.encode_name(name)?; let enc_values = values - .into_iter() + .iter() .map(|val| enc.encode_value(val, is_plaintext)) .collect::, Error>>()?; @@ -231,10 +225,7 @@ where match names.len() { 0 => Ok(None), 1 => { - let is_plaintext = match names[0] { - TagName::Plaintext(_) => true, - _ => false, - }; + let is_plaintext = matches!(names[0], TagName::Plaintext(_)); let enc_name = enc.encode_name(&names[0])?; enc.encode_exist_clause(enc_name, is_plaintext, negate) } @@ -252,7 +243,7 @@ where fn encode_tag_conj( op: ConjunctionOp, - subqueries: &Vec, + subqueries: &[TagQuery], enc: &mut E, negate: bool, ) -> Result, Error> @@ -261,7 +252,7 @@ where { let op = if negate { op.negate() } else { op }; let clauses = subqueries - .into_iter() + .iter() .flat_map(|q| encode_tag_query(q, enc, negate).transpose()) .collect::, Error>>()?; @@ -284,8 +275,8 @@ mod tests { Ok(name.to_string()) } - fn encode_value(&mut self, value: &String, _is_plaintext: bool) -> Result { - Ok(value.clone()) + fn encode_value(&mut self, value: &str, _is_plaintext: bool) -> Result { + Ok(value.to_string()) } fn encode_op_clause( @@ -332,12 +323,12 @@ mod tests { clauses: Vec, ) -> Result, Error> { let mut r = String::new(); - r.push_str("("); + r.push('('); r.extend(Itertools::intersperse( clauses.iter().map(String::as_str), op.as_sql_str(), )); - r.push_str(")"); + r.push(')'); Ok(Some(r)) } } diff --git a/tests/backends.rs b/tests/backends.rs index ca77a734..b6b123bc 100644 --- a/tests/backends.rs +++ b/tests/backends.rs @@ -1,6 +1,8 @@ +#![allow(clippy::bool_assert_comparison)] + mod utils; -const ERR_CLOSE: &'static str = "Error closing database"; +const ERR_CLOSE: &str = "Error closing database"; macro_rules! backend_tests { ($init:expr) => { @@ -231,7 +233,7 @@ mod sqlite { #[test] fn create_remove_db() { log_init(); - let fname = format!("sqlite-test-{}.db", uuid::Uuid::new_v4().to_string()); + let fname = format!("sqlite-test-{}.db", uuid::Uuid::new_v4()); assert_eq!( Path::new(&fname).exists(), false, @@ -281,7 +283,7 @@ mod sqlite { #[test] fn rekey_db() { log_init(); - let fname = format!("sqlite-rekey-{}.db", uuid::Uuid::new_v4().to_string()); + let fname = format!("sqlite-rekey-{}.db", uuid::Uuid::new_v4()); let key1 = generate_raw_store_key(None).expect("Error creating raw key"); let key2 = generate_raw_store_key(None).expect("Error creating raw key"); assert_ne!(key1, key2); @@ -320,7 +322,7 @@ mod sqlite { #[test] fn txn_contention_file() { log_init(); - let fname = format!("sqlite-contention-{}.db", uuid::Uuid::new_v4().to_string()); + let fname = format!("sqlite-contention-{}.db", uuid::Uuid::new_v4()); let key = generate_raw_store_key(None).expect("Error creating raw key"); block_on(async move { diff --git a/tests/local_key.rs b/tests/local_key.rs index 47d0da53..90581bed 100644 --- a/tests/local_key.rs +++ b/tests/local_key.rs @@ -1,8 +1,10 @@ +#![allow(clippy::bool_assert_comparison)] + use aries_askar::kms::{KeyAlg, LocalKey}; -const ERR_CREATE_KEYPAIR: &'static str = "Error creating keypair"; -const ERR_SIGN: &'static str = "Error signing message"; -const ERR_VERIFY: &'static str = "Error verifying signature"; +const ERR_CREATE_KEYPAIR: &str = "Error creating keypair"; +const ERR_SIGN: &str = "Error signing message"; +const ERR_VERIFY: &str = "Error verifying signature"; pub async fn localkey_sign_verify() { let keypair = LocalKey::generate(KeyAlg::Ed25519, true).expect(ERR_CREATE_KEYPAIR); diff --git a/tests/utils/mod.rs b/tests/utils/mod.rs index f6da69d2..04afabf8 100644 --- a/tests/utils/mod.rs +++ b/tests/utils/mod.rs @@ -7,24 +7,24 @@ use aries_askar::{ use tokio::task::spawn; -const ERR_PROFILE: &'static str = "Error creating profile"; -const ERR_SESSION: &'static str = "Error starting session"; -const ERR_TRANSACTION: &'static str = "Error starting transaction"; -const ERR_COMMIT: &'static str = "Error committing transaction"; -const ERR_COUNT: &'static str = "Error performing count"; -const ERR_FETCH: &'static str = "Error fetching test row"; -const ERR_FETCH_ALL: &'static str = "Error fetching all test rows"; -const ERR_REQ_ROW: &'static str = "Expected row"; -const ERR_REQ_ERR: &'static str = "Expected error"; -const ERR_INSERT: &'static str = "Error inserting test row"; -const ERR_REPLACE: &'static str = "Error replacing test row"; -const ERR_REMOVE_ALL: &'static str = "Error removing test rows"; -const ERR_SCAN: &'static str = "Error starting scan"; -const ERR_SCAN_NEXT: &'static str = "Error fetching scan rows"; -const ERR_CREATE_KEYPAIR: &'static str = "Error creating keypair"; -const ERR_INSERT_KEY: &'static str = "Error inserting key"; -const ERR_FETCH_KEY: &'static str = "Error fetching key"; -const ERR_LOAD_KEY: &'static str = "Error loading key"; +const ERR_PROFILE: &str = "Error creating profile"; +const ERR_SESSION: &str = "Error starting session"; +const ERR_TRANSACTION: &str = "Error starting transaction"; +const ERR_COMMIT: &str = "Error committing transaction"; +const ERR_COUNT: &str = "Error performing count"; +const ERR_FETCH: &str = "Error fetching test row"; +const ERR_FETCH_ALL: &str = "Error fetching all test rows"; +const ERR_REQ_ROW: &str = "Expected row"; +const ERR_REQ_ERR: &str = "Expected error"; +const ERR_INSERT: &str = "Error inserting test row"; +const ERR_REPLACE: &str = "Error replacing test row"; +const ERR_REMOVE_ALL: &str = "Error removing test rows"; +const ERR_SCAN: &str = "Error starting scan"; +const ERR_SCAN_NEXT: &str = "Error fetching scan rows"; +const ERR_CREATE_KEYPAIR: &str = "Error creating keypair"; +const ERR_INSERT_KEY: &str = "Error inserting key"; +const ERR_FETCH_KEY: &str = "Error fetching key"; +const ERR_LOAD_KEY: &str = "Error loading key"; pub trait TestStore: Clone + Deref> + Send + Sync { type DB: Backend + Debug + 'static; @@ -43,24 +43,20 @@ impl TestStore for Arc> { pub async fn db_create_remove_profile(db: impl TestStore) { let profile = db.create_profile(None).await.expect(ERR_PROFILE); - assert_eq!( - db.remove_profile(profile) - .await - .expect("Error removing profile"), - true - ); - assert_eq!( - db.remove_profile("not a profile".to_string()) - .await - .expect("Error removing profile"), - false - ); + assert!(db + .remove_profile(profile) + .await + .expect("Error removing profile"),); + assert!(!db + .remove_profile("not a profile".to_string()) + .await + .expect("Error removing profile"),); } pub async fn db_fetch_fail(db: impl TestStore) { let mut conn = db.session(None).await.expect(ERR_SESSION); let result = conn.fetch("cat", "name", false).await.expect(ERR_FETCH); - assert_eq!(result.is_none(), true); + assert!(result.is_none()); } pub async fn db_insert_fetch(db: impl TestStore) { @@ -364,7 +360,7 @@ pub async fn db_count_exist(db: impl TestStore) { assert_eq!( conn.count( &test_row.category, - Some(TagFilter::not(TagFilter::exist(vec![ + Some(TagFilter::negate(TagFilter::exist(vec![ "enc".to_string(), "other".to_string() ]),)) @@ -490,19 +486,19 @@ pub async fn db_keypair_insert_fetch(db: impl TestStore) { let key_name = "testkey"; let metadata = "meta"; - conn.insert_key(&key_name, &keypair, Some(metadata), None, None) + conn.insert_key(key_name, &keypair, Some(metadata), None, None) .await .expect(ERR_INSERT_KEY); let found = conn - .fetch_key(&key_name, false) + .fetch_key(key_name, false) .await .expect(ERR_FETCH_KEY) .expect(ERR_REQ_ROW); assert_eq!(found.algorithm(), Some(KeyAlg::Ed25519.as_str())); assert_eq!(found.name(), key_name); assert_eq!(found.metadata(), Some(metadata)); - assert_eq!(found.is_local(), true); + assert!(found.is_local()); found.load_local_key().expect(ERR_LOAD_KEY); } @@ -695,7 +691,7 @@ pub async fn db_txn_contention(db: impl TestStore + 'static) { conn.replace( &category, &name, - &format!("{}", val + 1).as_bytes(), + format!("{}", val + 1).as_bytes(), Some(row.tags.as_slice()), None, )