Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master'
Browse files Browse the repository at this point in the history
  • Loading branch information
antouhou committed Oct 9, 2021
2 parents 2a54ac7 + c5047fe commit 4b928a6
Show file tree
Hide file tree
Showing 16 changed files with 333 additions and 161 deletions.
41 changes: 40 additions & 1 deletion .github/workflows/publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ on:
- v*

jobs:
publish:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
Expand All @@ -18,6 +18,45 @@ jobs:
override: true
- name: Run tests
run: cargo test
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
override: true
- name: Install Clippy
run: rustup component add clippy
- name: Run linter
run: cargo clippy -- -D warnings
format:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
override: true
- name: Install rustfmt
run: rustup component add rustfmt
- name: Check format
run: cargo fmt --all -- --check
publish:
needs: [test, lint, format]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
override: true
- shell: bash
env:
CARGO_TOKEN: ${{ secrets.CARGO_TOKEN }}
Expand Down
30 changes: 29 additions & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ on:
branches: [ master ]

jobs:
build:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
Expand All @@ -19,3 +19,31 @@ jobs:
override: true
- name: Run tests
run: cargo test
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
override: true
- name: Install Clippy
run: rustup component add clippy
- name: Run linter
run: cargo clippy -- -D warnings
format:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
override: true
- name: Install rustfmt
run: rustup component add rustfmt
- name: Check format
run: cargo fmt --all -- --check
2 changes: 1 addition & 1 deletion src/algorithms/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
//! This module contains built-in implementations of `rs_merkle::Hasher`
mod sha256;

pub use sha256::Sha256Algorithm as Sha256;
pub use sha256::Sha256Algorithm as Sha256;
4 changes: 2 additions & 2 deletions src/algorithms/sha256.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use sha2::{Sha256, Digest, digest::FixedOutput};
use crate::Hasher;
use sha2::{digest::FixedOutput, Digest, Sha256};

/// Sha256 implementation of the `rs_merkle::Hasher` trait
#[derive(Clone)]
Expand All @@ -14,4 +14,4 @@ impl Hasher for Sha256Algorithm {
hasher.update(data);
<[u8; 32]>::from(hasher.finalize_fixed())
}
}
}
14 changes: 8 additions & 6 deletions src/error.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
use std::fmt::{Debug, Formatter, Display};
use std::fmt::{Debug, Display, Formatter};

#[derive(Copy, Clone, Debug)]
pub enum ErrorKind {
SerializedProofSizeIsIncorrect,
NotEnoughHelperNodes
NotEnoughHelperNodes,
}

#[derive(Clone, Debug)]
pub struct Error {
kind: ErrorKind,
message: String
message: String,
}

impl Error {
Expand All @@ -20,15 +20,17 @@ impl Error {
pub fn not_enough_helper_nodes() -> Self {
Self::new(
ErrorKind::NotEnoughHelperNodes,
String::from("Not enough hashes to reconstruct the root")
String::from("Not enough hashes to reconstruct the root"),
)
}

pub fn kind(&self) -> ErrorKind {
self.kind
}

pub fn message(&self) -> &str { &self.message }
pub fn message(&self) -> &str {
&self.message
}
}

impl std::error::Error for Error {}
Expand All @@ -37,4 +39,4 @@ impl Display for Error {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.message)
}
}
}
10 changes: 5 additions & 5 deletions src/hasher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,15 +53,15 @@ pub trait Hasher: Clone {
/// if the left node doesn't have a sibling it is concatenated to itself and
/// then hashed instead of just being propagated to the next level.
fn concat_and_hash(left: &Self::Hash, right: Option<&Self::Hash>) -> Self::Hash {
let mut concatenated: Vec<u8> = left.clone().into();
let mut concatenated: Vec<u8> = (*left).into();

match right {
Some(right_node) => {
let mut right_node_clone: Vec<u8> = right_node.clone().into();
let mut right_node_clone: Vec<u8> = (*right_node).into();
concatenated.append(&mut right_node_clone);
Self::hash(&concatenated)
},
None => left.clone()
}
None => *left,
}
}

Expand All @@ -71,4 +71,4 @@ pub trait Hasher: Clone {
fn hash_size() -> usize {
mem::size_of::<Self::Hash>()
}
}
}
5 changes: 2 additions & 3 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,11 @@ pub use merkle_proof::MerkleProof;
pub use merkle_tree::MerkleTree;
pub use partial_tree::PartialTree;

mod merkle_tree;
mod hasher;
mod merkle_proof;
mod merkle_tree;
mod partial_tree;
mod hasher;

pub mod algorithms;
pub mod error;
mod utils;

67 changes: 50 additions & 17 deletions src/merkle_proof.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
use std::convert::TryInto;

use crate::{Hasher, utils};
use crate::error::Error;
use crate::error::ErrorKind;
use crate::partial_tree::PartialTree;
use crate::{utils, Hasher};

/// `MerkleProof` is used to parse, verify, calculate a root for merkle proofs.
///
Expand All @@ -29,9 +29,7 @@ pub struct MerkleProof<T: Hasher> {

impl<T: Hasher> MerkleProof<T> {
pub fn new(proof_hashes: Vec<T::Hash>) -> Self {
MerkleProof {
proof_hashes,
}
MerkleProof { proof_hashes }
}

/// Parses proof serialized as bytes
Expand All @@ -42,20 +40,29 @@ impl<T: Hasher> MerkleProof<T> {
if bytes.len() % hash_size != 0 {
return Err(Error::new(
ErrorKind::SerializedProofSizeIsIncorrect,
format!("Proof of size {} bytes can not be divided into chunks of {} bytes", bytes.len(), hash_size)));
format!(
"Proof of size {} bytes can not be divided into chunks of {} bytes",
bytes.len(),
hash_size
),
));
}

let hashes_count = bytes.len() / hash_size;
let proof_hashes_slices: Vec<T::Hash> = (0..hashes_count)
.map(|i| {
let x: Vec<u8> = bytes.get(i * hash_size..(i + 1) * hash_size).unwrap().try_into().unwrap();
let x: Vec<u8> = bytes
.get(i * hash_size..(i + 1) * hash_size)
.unwrap()
.try_into()
.unwrap();
match x.try_into() {
Ok(val) => val,
// Because of the check above the initial bytes are always slices perfectly
// into appropriately sized hashes.
// Unwrap is not used here due to more complex trait bounds on T::Hash
// that would be require to satisfy .unwrap usage
Err(_) => panic!("Unexpected error during proof parsing")
Err(_) => panic!("Unexpected error during proof parsing"),
}
})
.collect();
Expand All @@ -76,15 +83,25 @@ impl<T: Hasher> MerkleProof<T> {
}

/// Calculates merkle root based on provided leaves and proof hashes
pub fn root(&self, leaf_indices: &[usize], leaf_hashes: &[T::Hash], total_leaves_count: usize) -> T::Hash {
pub fn root(
&self,
leaf_indices: &[usize],
leaf_hashes: &[T::Hash],
total_leaves_count: usize,
) -> T::Hash {
let tree_depth = utils::indices::tree_depth(total_leaves_count);

// Zipping indices and hashes into a vector of (original_index_in_tree, leaf_hash)
let mut leaf_tuples: Vec<(usize, T::Hash)> = leaf_indices.iter().cloned().zip(leaf_hashes.iter().cloned()).collect();
let mut leaf_tuples: Vec<(usize, T::Hash)> = leaf_indices
.iter()
.cloned()
.zip(leaf_hashes.iter().cloned())
.collect();
// Sorting leaves by indexes in case they weren't sorted already
leaf_tuples.sort_by(|(a, _), (b, _)| a.cmp(b));
// Getting back _sorted_ indices
let proof_indices_by_layers = utils::indices::proof_indices_by_layers(leaf_indices, total_leaves_count);
let proof_indices_by_layers =
utils::indices::proof_indices_by_layers(leaf_indices, total_leaves_count);

// The next lines copy hashes from proof hashes and group them by layer index
let mut proof_layers: Vec<Vec<(usize, T::Hash)>> = Vec::with_capacity(tree_depth + 1);
Expand All @@ -98,31 +115,47 @@ impl<T: Hasher> MerkleProof<T> {
Some(first_layer) => {
first_layer.append(&mut leaf_tuples);
first_layer.sort_by(|(a, _), (b, _)| a.cmp(b));
},
None => proof_layers.push(leaf_tuples)
}
None => proof_layers.push(leaf_tuples),
}

// TODO: remove the unwrap!
let partial_tree = PartialTree::<T>::build(proof_layers, tree_depth).unwrap();

return partial_tree.root().unwrap().clone();
*partial_tree.root().unwrap()
}

/// Calculates the root and serializes it into a hex string
pub fn hex_root(&self, leaf_indices: &[usize], leaf_hashes: &[T::Hash], total_leaves_count: usize) -> String {
pub fn hex_root(
&self,
leaf_indices: &[usize],
leaf_hashes: &[T::Hash],
total_leaves_count: usize,
) -> String {
let root = self.root(leaf_indices, leaf_hashes, total_leaves_count);
utils::collections::to_hex_string(&root)
}

/// Verifies
pub fn verify(&self, root: T::Hash, leaf_indices: &[usize], leaf_hashes: &[T::Hash], total_leaves_count: usize) -> bool {
pub fn verify(
&self,
root: T::Hash,
leaf_indices: &[usize],
leaf_hashes: &[T::Hash],
total_leaves_count: usize,
) -> bool {
let extracted_root = self.root(leaf_indices, leaf_hashes, total_leaves_count);
root == extracted_root
}

/// Serializes proof hashes to a flat vector of bytes
pub fn to_bytes(&self) -> Vec<u8> {
let vectors: Vec<Vec<u8>> = self.proof_hashes().iter().cloned().map(|hash| hash.into()).collect();
let vectors: Vec<Vec<u8>> = self
.proof_hashes()
.iter()
.cloned()
.map(|hash| hash.into())
.collect();
vectors.iter().cloned().flatten().collect()
}
}
}
Loading

0 comments on commit 4b928a6

Please sign in to comment.