Skip to content

Commit

Permalink
Merge #27: Process all messages from a connection
Browse files Browse the repository at this point in the history
1561833 ci: workaround for LLVM pass API break (Antoine Poinsot)
444630c main: process all messages of a connection (Antoine Poinsot)
b572cea processing: cache the secp context (Antoine Poinsot)
0477e31 processing: use a signature hash cache (Antoine Poinsot)
4f21d97 processing: remove the check for duplicated inputs (Antoine Poinsot)
f813fd4 Update to latest revault_tx (slight behaviour change) (Antoine Poinsot)

Pull request description:

  This updates to latest `revault_tx` (revault/revault_tx#107) and fixes #24

ACKs for top commit:
  darosior:
    ACK 1561833
  danielabrozzoni:
    ACK 1561833

Tree-SHA512: 559b38a881c334c1f04822f32b67c5163d36a49cd2c290fe1e2bdf92b4083fdafbd83c2baf6e92bafaa97fdb243fe29467c94e33d1138cb19f6f200d90cd3f50
  • Loading branch information
darosior committed Oct 28, 2021
2 parents ed0c9a5 + 1561833 commit e41e5ca
Show file tree
Hide file tree
Showing 10 changed files with 120 additions and 78 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ jobs:
override: true
profile: minimal
- name: Run fuzz testing script
run: sudo apt install build-essential binutils-dev libunwind-dev libblocksruntime-dev liblzma-dev && cargo install --force honggfuzz && cd fuzz && git clone https://github.com/revault/cosignerd_fuzz_corpus && HFUZZ_RUN_ARGS="--exit_upon_crash --iterations 10000 -v --timeout 2 --input cosignerd_fuzz_corpus" cargo hfuzz run process_sign_message && cd ..
run: sudo apt install build-essential binutils-dev libunwind-dev libblocksruntime-dev liblzma-dev && cargo install --force honggfuzz && cd fuzz && git clone https://github.com/revault/cosignerd_fuzz_corpus && RUSTFLAGS="-Znew-llvm-pass-manager=no" HFUZZ_RUN_ARGS="--exit_upon_crash --iterations 10000 -v --timeout 2 --input cosignerd_fuzz_corpus" cargo hfuzz run process_sign_message

rustfmt_check:
runs-on: ubuntu-latest
Expand Down
8 changes: 4 additions & 4 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 8 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,12 @@ name = "cosignerd"
version = "0.0.2"
authors = ["JSwambo <jake.t.swambo@hotmail.co.uk>", "Antoine Poinsot <darosior@protonmail.com>"]
edition = "2018"
repository = "https://github.com/revault/revaultd"
license-file = "LICENCE"
keywords = ["revault", "bitcoin", "vault"]
description = "Revault cosigning server"
exclude = [".github/"]


[dependencies]
# For the configuration file
Expand All @@ -22,8 +28,8 @@ libc = "0.2"
rusqlite = { version = "0.24.2", features = ["bundled"] }

# Revault-specific libraries
revault_tx = { version = "0.3", features = ["use-serde"] }
revault_net = "0.1"
revault_tx = { version = "0.4", features = ["use-serde"] }
revault_net = "0.2"

# For fuzz testing
bitcoin = {version = "0.27", features = ["rand"], optional = true}
Expand Down
39 changes: 33 additions & 6 deletions fuzz/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions fuzz/targets/process_sign_message.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ fn main() {
&builder.config,
msg,
&builder.bitcoin_privkey,
&builder.secp,
) {
Ok(res) => res,
Err(cosignerd::processing::SignProcessingError::Database(e)) => panic!("{}", e),
Expand Down
64 changes: 37 additions & 27 deletions src/bin/cosignerd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,35 @@ fn setup_logger(log_level: log::LevelFilter) -> Result<(), fern::InitError> {
Ok(())
}

fn process_message(
secp_ctx: &secp256k1::Secp256k1<secp256k1::All>,
config: &Config,
bitcoin_privkey: &secp256k1::SecretKey,
message: RequestParams,
) -> Option<revault_net::message::ResponseResult> {
match message {
RequestParams::Sign(sign_req) => {
log::trace!("Decoded request: {:#?}", sign_req);

let res = match process_sign_message(&config, sign_req, bitcoin_privkey, &secp_ctx) {
Ok(res) => res,
Err(e) => {
log::error!("Error when processing 'sign' message: '{}'", e);
return None;
}
};
log::trace!("Decoded response: {:#?}", res);

Some(ResponseResult::SignResult(res))
}
_ => {
// FIXME: This should probably be fatal, they are violating the protocol
log::error!("Unexpected message: '{:?}'", message);
None
}
}
}

// Wait for connections from managers on the configured interface and process `sign` messages.
fn daemon_main(
config: Config,
Expand All @@ -65,6 +94,7 @@ fn daemon_main(
});
let managers_noise_pubkeys: Vec<NoisePubkey> =
config.managers.iter().map(|m| m.noise_key).collect();
let secp_ctx = secp256k1::Secp256k1::new();

// We expect a single connection once in a while, there is *no need* for complexity here so
// just treat incoming connections sequentially.
Expand All @@ -81,37 +111,17 @@ fn daemon_main(
}
};

match kk_stream.read_req(|req_params| {
match req_params {
RequestParams::Sign(sign_req) => {
log::trace!("Decoded request: {:#?}", sign_req);

let res = match process_sign_message(&config, sign_req, bitcoin_privkey) {
Ok(res) => res,
Err(e) => {
log::error!("Error when processing 'sign' message: '{}'", e);
return None;
}
};
log::trace!("Decoded response: {:#?}", res);

Some(ResponseResult::SignResult(res))
}
_ => {
// FIXME: This should probably be fatal, they are violating the protocol
log::error!("Unexpected message: '{:?}'", req_params);
None
}
}
}) {
Ok(buf) => buf,
Err(e) => {
// Process all messages from this connection.
loop {
if let Err(e) =
kk_stream.read_req(|msg| process_message(&secp_ctx, &config, &bitcoin_privkey, msg))
{
log::error!(
"Error handling request from stream '{:?}': '{}'",
"Error handling request from stream '{:?}': '{}'. Dropping connection.",
kk_stream,
e
);
continue;
break;
}
}
}
Expand Down
4 changes: 3 additions & 1 deletion src/database/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,8 @@ impl TryFrom<&Row<'_>> for DbSignedOutpoint {
vout: row.get(1)?,
};
let signature = row.get::<_, Vec<u8>>(2)?;
let signature = Signature::from_der(&signature)
.expect("We only ever store valid DER-encoded signatures");

Ok(DbSignedOutpoint {
outpoint,
Expand Down Expand Up @@ -251,7 +253,7 @@ mod test {
.unwrap()
.unwrap()
.signature,
sig.serialize_der().to_vec()
sig
);
}
}
4 changes: 2 additions & 2 deletions src/database/schema.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use revault_tx::miniscript::bitcoin::OutPoint;
use revault_tx::miniscript::bitcoin::{secp256k1::Signature, OutPoint};

pub const SCHEMA: &str = "\
Expand All @@ -21,5 +21,5 @@ pub struct DbSignedOutpoint {
pub outpoint: OutPoint,
// We don't even take care of parsing it as a Signature, as we only input it with
// to_der() and use it to insert in partial_sigs (which takes raw bytes)
pub signature: Vec<u8>,
pub signature: Signature,
}
61 changes: 27 additions & 34 deletions src/processing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use crate::{

use revault_net::message::cosigner::{SignRequest, SignResult};
use revault_tx::{
bitcoin::{secp256k1, PublicKey as BitcoinPubkey, SigHashType},
bitcoin::{secp256k1, util::bip143::SigHashCache, PublicKey as BitcoinPubkey, SigHashType},
error::InputSatisfactionError,
transactions::RevaultTransaction,
};
Expand Down Expand Up @@ -42,9 +42,9 @@ pub fn process_sign_message(
config: &Config,
sign_msg: SignRequest,
bitcoin_privkey: &secp256k1::SecretKey,
secp: &secp256k1::Secp256k1<secp256k1::All>,
) -> Result<SignResult, SignProcessingError> {
let db_path = config.db_file();
let secp = secp256k1::Secp256k1::signing_only();
let our_pubkey = BitcoinPubkey {
compressed: true,
key: secp256k1::PublicKey::from_secret_key(&secp, bitcoin_privkey),
Expand All @@ -60,23 +60,6 @@ pub fn process_sign_message(
// Gather what signatures we have for these prevouts
let mut signatures = Vec::with_capacity(n_inputs);
for txin in spend_tx.tx().input.iter() {
if spend_tx
.tx()
.input
.iter()
.filter_map(|curr| {
if curr.previous_output == txin.previous_output {
Some(curr)
} else {
None
}
})
.count()
> 1
{
return Err(SignProcessingError::Garbage);
}

if let Some(signed_op) = db_signed_outpoint(&db_path, &txin.previous_output)
.map_err(SignProcessingError::Database)?
{
Expand All @@ -90,14 +73,20 @@ pub fn process_sign_message(
// soon..
}

// If we had all the signatures for all these outpoints, send them. They'll figure out whether
// they are valid or not.
// If we had all the signatures for all these outpoints, send them if they are valid.
if signatures.len() == n_inputs {
for (i, mut sig) in signatures.into_iter().enumerate() {
sig.push(SigHashType::All as u8);
spend_tx.psbt_mut().inputs[i]
.partial_sigs
.insert(our_pubkey, sig);
for (i, sig) in signatures.into_iter().enumerate() {
// Don't let them fool you!
if spend_tx
.add_signature(i, our_pubkey.key, sig, &secp)
.is_err()
{
log::error!(
"Invalid signature. Got a request for a modified Spend: '{}'",
spend_tx
);
return Ok(null_signature());
}
}
return Ok(SignResult { tx: Some(spend_tx) });
}
Expand All @@ -108,19 +97,20 @@ pub fn process_sign_message(
}

// If we signed none of the input, append fresh signatures for each of them to the PSBT.
let mut psbtins = spend_tx.psbt().inputs.clone();
for (i, psbtin) in psbtins.iter_mut().enumerate() {
// FIXME: sighash cache upstream...
let unsigned_tx = spend_tx.tx().clone();
let mut sighash_cache = SigHashCache::new(&unsigned_tx);
for i in 0..spend_tx.psbt().inputs.len() {
let sighash = spend_tx
.signature_hash(i, SigHashType::All)
.signature_hash_cached(i, SigHashType::All, &mut sighash_cache)
.map_err(SignProcessingError::InsanePsbtMissingInput)?;
let sighash = secp256k1::Message::from_slice(&sighash).expect("Sighash is 32 bytes");

let signature = secp.sign(&sighash, bitcoin_privkey);
let mut raw_sig = signature.serialize_der().to_vec();
raw_sig.push(SigHashType::All as u8);
let res = spend_tx
.add_signature(i, our_pubkey.key, signature, &secp)
.expect("We must provide valid signatures");
assert!(
psbtin.partial_sigs.insert(our_pubkey, raw_sig).is_none(),
res.is_none(),
"If there was a signature for our pubkey already and we didn't return \
above, we have big problems.."
);
Expand All @@ -132,7 +122,6 @@ pub fn process_sign_message(
)
.map_err(SignProcessingError::Database)?;
}
spend_tx.psbt_mut().inputs = psbtins;

// Belt-and-suspender: if it was not empty, we would have signed a prevout twice.
assert!(signatures.is_empty());
Expand Down Expand Up @@ -183,6 +172,7 @@ mod test {
&test_framework.config,
sign_a.clone(),
&test_framework.bitcoin_privkey,
&test_framework.secp,
)
.unwrap();
let tx = tx.unwrap();
Expand All @@ -200,6 +190,7 @@ mod test {
&test_framework.config,
sign_a,
&test_framework.bitcoin_privkey,
&test_framework.secp,
)
.unwrap();
assert_eq!(tx, second_psbt.unwrap());
Expand All @@ -221,6 +212,7 @@ mod test {
&test_framework.config,
sign_a,
&test_framework.bitcoin_privkey,
&test_framework.secp,
)
.unwrap();
assert!(tx.is_none(), "It contains a duplicated outpoint");
Expand All @@ -236,6 +228,7 @@ mod test {
&test_framework.config,
sign_msg,
&test_framework.bitcoin_privkey,
&test_framework.secp,
)
.unwrap_err();
}
Expand Down
Loading

0 comments on commit e41e5ca

Please sign in to comment.