diff --git a/.github/workflows/code_coverage.yml b/.github/workflows/code_coverage.yml index b53c47a63..37a7cc46b 100644 --- a/.github/workflows/code_coverage.yml +++ b/.github/workflows/code_coverage.yml @@ -16,44 +16,37 @@ jobs: uses: actions/checkout@v2 - name: Install lcov tools run: sudo apt-get install lcov -y - - name: Install rustup - run: curl https://sh.rustup.rs -sSf | sh -s -- -y - - name: Set default toolchain - run: rustup default nightly - - name: Set profile - run: rustup set profile minimal - - name: Add llvm tools - run: rustup component add llvm-tools-preview - - name: Update toolchain - run: rustup update - - name: Cache cargo - uses: actions/cache@v3 + - name: Install Rust toolchain + uses: actions-rs/toolchain@v1 with: - path: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + toolchain: "1.65.0" + override: true + profile: minimal + components: llvm-tools-preview + - name: Rust Cache + uses: Swatinem/rust-cache@v2.2.1 - name: Install grcov run: if [[ ! -e ~/.cargo/bin/grcov ]]; then cargo install grcov; fi + - name: Build simulator image + run: docker build -t hwi/ledger_emulator ./ci -f ci/Dockerfile.ledger + - name: Run simulator image + run: docker run --name simulator --network=host hwi/ledger_emulator & + - name: Install Python + uses: actions/setup-python@v4 + with: + python-version: '3.9' + - name: Install python dependencies + run: pip install hwi==2.1.1 protobuf==3.20.1 - name: Test - # WARNING: this is not testing the following features: test-esplora, test-hardware-signer, async-interface - # This is because some of our features are mutually exclusive, and generating various reports and - # merging them doesn't seem to be working very well. - # For more info, see: - # - https://github.com/bitcoindevkit/bdk/issues/696 - # - https://github.com/bitcoindevkit/bdk/pull/748#issuecomment-1242721040 - run: cargo test --features all-keys,compact_filters,compiler,key-value-db,sqlite,sqlite-bundled,test-electrum,test-rpc,verify + run: cargo test --all-features - name: Run grcov run: mkdir coverage; grcov . --binary-path ./target/debug/ -s . -t lcov --branch --ignore-not-existing --ignore '/*' -o ./coverage/lcov.info - name: Generate HTML coverage report run: genhtml -o coverage-report.html ./coverage/lcov.info - - - name: Coveralls upload - uses: coverallsapp/github-action@master - with: - github-token: ${{ secrets.GITHUB_TOKEN }} + # - name: Coveralls upload + # uses: coverallsapp/github-action@master + # with: + # github-token: ${{ secrets.GITHUB_TOKEN }} - name: Upload artifact uses: actions/upload-artifact@v2 with: diff --git a/.github/workflows/cont_integration.yml b/.github/workflows/cont_integration.yml index e7def9d09..128ba7251 100644 --- a/.github/workflows/cont_integration.yml +++ b/.github/workflows/cont_integration.yml @@ -10,118 +10,27 @@ jobs: strategy: matrix: rust: - - version: 1.65.0 # STABLE + - version: stable clippy: true - version: 1.57.0 # MSRV features: - - default - - minimal - - all-keys - - minimal,use-esplora-blocking - - key-value-db - - electrum - - compact_filters - - use-esplora-blocking,key-value-db,electrum - - compiler - - rpc - - verify - - async-interface - - use-esplora-async - - sqlite - - sqlite-bundled + - --no-default-features + - --all-features steps: - name: checkout uses: actions/checkout@v2 - - name: Generate cache key - run: echo "${{ matrix.rust.version }} ${{ matrix.features }}" | tee .cache_key - - name: cache - uses: actions/cache@v2 - with: - path: | - ~/.cargo/registry - ~/.cargo/git - target - key: ${{ runner.os }}-cargo-${{ hashFiles('.cache_key') }}-${{ hashFiles('**/Cargo.toml','**/Cargo.lock') }} - - name: Set default toolchain - run: rustup default ${{ matrix.rust.version }} - - name: Set profile - run: rustup set profile minimal - - name: Add clippy - if: ${{ matrix.rust.clippy }} - run: rustup component add clippy - - name: Update toolchain - run: rustup update - - name: Build - run: cargo build --features ${{ matrix.features }} --no-default-features - - name: Clippy - if: ${{ matrix.rust.clippy }} - run: cargo clippy --all-targets --features ${{ matrix.features }} --no-default-features -- -D warnings - - name: Test - run: cargo test --features ${{ matrix.features }} --no-default-features - - test-readme-examples: - name: Test README.md examples - runs-on: ubuntu-latest - steps: - - name: checkout - uses: actions/checkout@v2 - - name: cache - uses: actions/cache@v2 - with: - path: | - ~/.cargo/registry - ~/.cargo/git - target - key: ${{ runner.os }}-cargo-test-md-docs-${{ hashFiles('**/Cargo.toml','**/Cargo.lock') }} - - name: Set default toolchain - run: rustup default nightly - - name: Set profile - run: rustup set profile minimal - - name: Update toolchain - run: rustup update - - name: Test - run: cargo test --features test-md-docs --no-default-features -- doctest::ReadmeDoctests - - test-blockchains: - name: Blockchain ${{ matrix.blockchain.features }} - runs-on: ubuntu-20.04 - strategy: - fail-fast: false - matrix: - blockchain: - - name: electrum - testprefix: blockchain::electrum::test - features: test-electrum,verify - - name: rpc - testprefix: blockchain::rpc::test - features: test-rpc - - name: rpc-legacy - testprefix: blockchain::rpc::test - features: test-rpc-legacy - - name: esplora - testprefix: esplora - features: test-esplora,use-esplora-async,verify - - name: esplora - testprefix: esplora - features: test-esplora,use-esplora-blocking,verify - steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Cache - uses: actions/cache@v2 - with: - path: | - ~/.cargo/registry - ~/.cargo/git - target - key: ${{ runner.os }}-cargo-${{ github.job }}-${{ hashFiles('**/Cargo.toml','**/Cargo.lock') }} - - name: Setup rust toolchain + - name: Install Rust toolchain uses: actions-rs/toolchain@v1 with: - toolchain: stable - override: true + toolchain: ${{ matrix.rust.version }} + override: true + profile: minimal + - name: Rust Cache + uses: Swatinem/rust-cache@v2.2.1 + - name: Build + run: cargo build ${{ matrix.features }} - name: Test - run: cargo test --no-default-features --features ${{ matrix.blockchain.features }} ${{ matrix.blockchain.testprefix }}::bdk_blockchain_tests + run: cargo test ${{ matrix.features }} check-wasm: name: Check WASM @@ -132,29 +41,26 @@ jobs: steps: - name: Checkout uses: actions/checkout@v2 - - name: Cache - uses: actions/cache@v2 - with: - path: | - ~/.cargo/registry - ~/.cargo/git - target - key: ${{ runner.os }}-cargo-${{ github.job }}-${{ hashFiles('**/Cargo.toml','**/Cargo.lock') }} # Install a recent version of clang that supports wasm32 - run: wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add - || exit 1 - run: sudo apt-add-repository "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-10 main" || exit 1 - run: sudo apt-get update || exit 1 - run: sudo apt-get install -y libclang-common-10-dev clang-10 libc6-dev-i386 || exit 1 - - name: Set default toolchain - run: rustup default 1.65.0 # STABLE - - name: Set profile - run: rustup set profile minimal - - name: Add target wasm32 - run: rustup target add wasm32-unknown-unknown - - name: Update toolchain - run: rustup update - - name: Check - run: cargo check --target wasm32-unknown-unknown --features async-interface,use-esplora-async,dev-getrandom-wasm --no-default-features + - name: Install Rust toolchain + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + override: true + profile: minimal + target: "wasm32-unknown-unknown" + - name: Rust Cache + uses: Swatinem/rust-cache@v2.2.1 + - name: Check bdk + working-directory: ./crates/bdk + run: cargo check --target wasm32-unknown-unknown --features dev-getrandom-wasm + - name: Check esplora + working-directory: ./crates/esplora + run: cargo check --target wasm32-unknown-unknown --features async --no-default-features fmt: name: Rust fmt @@ -162,42 +68,30 @@ jobs: steps: - name: Checkout uses: actions/checkout@v2 - - name: Set default toolchain - run: rustup default nightly - - name: Set profile - run: rustup set profile minimal - - name: Add rustfmt - run: rustup component add rustfmt - - name: Update toolchain - run: rustup update + - name: Install Rust toolchain + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + override: true + profile: minimal + components: rustfmt - name: Check fmt run: cargo fmt --all -- --config format_code_in_doc_comments=true --check - test_hardware_wallet: - runs-on: ubuntu-20.04 - strategy: - matrix: - rust: - - version: 1.65.0 # STABLE - - version: 1.57.0 # MSRV + clippy_check: + runs-on: ubuntu-latest steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Build simulator image - run: docker build -t hwi/ledger_emulator ./ci -f ci/Dockerfile.ledger - - name: Run simulator image - run: docker run --name simulator --network=host hwi/ledger_emulator & - - name: Install Python - uses: actions/setup-python@v4 - with: - python-version: '3.9' - - name: Install python dependencies - run: pip install hwi==2.1.1 protobuf==3.20.1 - - name: Set default toolchain - run: rustup default ${{ matrix.rust.version }} - - name: Set profile - run: rustup set profile minimal - - name: Update toolchain - run: rustup update - - name: Test - run: cargo test --features test-hardware-signer + - uses: actions/checkout@v1 + - uses: actions-rs/toolchain@v1 + with: + # we pin clippy instead of using "stable" so that our CI doesn't break + # at each new cargo release + toolchain: "1.67.0" + components: clippy + override: true + - name: Rust Cache + uses: Swatinem/rust-cache@v2.2.1 + - uses: actions-rs/clippy-check@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + args: --all-features --all-targets -- -D warnings diff --git a/.github/workflows/nightly_docs.yml b/.github/workflows/nightly_docs.yml index 1b07937d9..0321cc533 100644 --- a/.github/workflows/nightly_docs.yml +++ b/.github/workflows/nightly_docs.yml @@ -9,22 +9,18 @@ jobs: steps: - name: Checkout sources uses: actions/checkout@v2 - - name: Setup cache - uses: actions/cache@v2 - with: - path: | - ~/.cargo/registry - ~/.cargo/git - target - key: nightly-docs-${{ hashFiles('**/Cargo.toml','**/Cargo.lock') }} - name: Set default toolchain run: rustup default nightly-2022-12-14 - name: Set profile run: rustup set profile minimal - name: Update toolchain run: rustup update + - name: Rust Cache + uses: Swatinem/rust-cache@v2.2.1 - name: Build docs - run: cargo rustdoc --verbose --features=compiler,electrum,esplora,use-esplora-blocking,compact_filters,rpc,key-value-db,sqlite,all-keys,verify,hardware-signer -- --cfg docsrs -Dwarnings + run: cargo doc --no-deps + env: + RUSTDOCFLAGS: '--cfg docsrs -Dwarnings' - name: Upload artifact uses: actions/upload-artifact@v2 with: diff --git a/Cargo.toml b/Cargo.toml index 18fcfef62..2104196be 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,169 +1,18 @@ -[package] -name = "bdk" -version = "0.27.1" -edition = "2018" -authors = ["Alekos Filini ", "Riccardo Casatta "] -homepage = "https://bitcoindevkit.org" -repository = "https://github.com/bitcoindevkit/bdk" -documentation = "https://docs.rs/bdk" -description = "A modern, lightweight, descriptor-based wallet library" -keywords = ["bitcoin", "wallet", "descriptor", "psbt"] -readme = "README.md" -license = "MIT OR Apache-2.0" - -[dependencies] -bdk-macros = "^0.6" -log = "^0.4" -miniscript = { version = "9.0", features = ["serde"] } -bitcoin = { version = "0.29.1", features = ["serde", "base64", "rand"] } -serde = { version = "^1.0", features = ["derive"] } -serde_json = { version = "^1.0" } -rand = "^0.8" - -# Optional dependencies -sled = { version = "0.34", optional = true } -electrum-client = { version = "0.12", optional = true } -esplora-client = { version = "0.3", default-features = false, optional = true } -rusqlite = { version = "0.28.0", optional = true } -ahash = { version = "0.7.6", optional = true } -futures = { version = "0.3", optional = true } -async-trait = { version = "0.1", optional = true } -rocksdb = { version = "0.14", default-features = false, features = ["snappy"], optional = true } -cc = { version = ">=1.0.64", optional = true } -socks = { version = "0.3", optional = true } -hwi = { version = "0.5", optional = true, features = [ "use-miniscript"] } - -bip39 = { version = "1.0.1", optional = true } -bitcoinconsensus = { version = "0.19.0-3", optional = true } - -# Needed by bdk_blockchain_tests macro and the `rpc` feature -bitcoincore-rpc = { version = "0.16", optional = true } - -# Platform-specific dependencies -[target.'cfg(not(target_arch = "wasm32"))'.dependencies] -tokio = { version = "1", features = ["rt", "macros"] } - -[target.'cfg(target_arch = "wasm32")'.dependencies] -getrandom = "0.2" -async-trait = "0.1" -js-sys = "0.3" - -[features] -minimal = [] -compiler = ["miniscript/compiler"] -verify = ["bitcoinconsensus"] -default = ["key-value-db", "electrum"] -sqlite = ["rusqlite", "ahash"] -sqlite-bundled = ["sqlite", "rusqlite/bundled"] -compact_filters = ["rocksdb", "socks", "cc"] -key-value-db = ["sled"] -all-keys = ["keys-bip39"] -keys-bip39 = ["bip39"] -rpc = ["bitcoincore-rpc"] -hardware-signer = ["hwi"] - -# We currently provide mulitple implementations of `Blockchain`, all are -# blocking except for the `EsploraBlockchain` which can be either async or -# blocking, depending on the HTTP client in use. -# -# - Users wanting asynchronous HTTP calls should enable `async-interface` to get -# access to the asynchronous method implementations. Then, if Esplora is wanted, -# enable the `use-esplora-async` feature. -# - Users wanting blocking HTTP calls can use any of the other blockchain -# implementations (`compact_filters`, `electrum`, or `esplora`). Users wanting to -# use Esplora should enable the `use-esplora-blocking` feature. -# -# WARNING: Please take care with the features below, various combinations will -# fail to build. We cannot currently build `bdk` with `--all-features`. -async-interface = ["async-trait"] -electrum = ["electrum-client"] -# MUST ALSO USE `--no-default-features`. -use-esplora-async = ["esplora", "esplora-client/async", "futures"] -use-esplora-blocking = ["esplora", "esplora-client/blocking"] -# Deprecated aliases -use-esplora-reqwest = ["use-esplora-async"] -use-esplora-ureq = ["use-esplora-blocking"] -# Typical configurations will not need to use `esplora` feature directly. -esplora = [] - -# Use below feature with `use-esplora-async` to enable reqwest default TLS support -reqwest-default-tls = ["esplora-client/async-https"] - -# Debug/Test features -test-blockchains = ["bitcoincore-rpc", "electrum-client"] -test-electrum = ["electrum", "electrsd/electrs_0_8_10", "electrsd/bitcoind_22_0", "test-blockchains"] -test-rpc = ["rpc", "electrsd/electrs_0_8_10", "electrsd/bitcoind_22_0", "test-blockchains"] -test-rpc-legacy = ["rpc", "electrsd/electrs_0_8_10", "electrsd/bitcoind_0_20_0", "test-blockchains"] -test-esplora = ["electrsd/legacy", "electrsd/esplora_a33e97e1", "electrsd/bitcoind_22_0", "test-blockchains"] -test-md-docs = ["electrum"] -test-hardware-signer = ["hardware-signer"] - -# This feature is used to run `cargo check` in our CI targeting wasm. It's not recommended -# for libraries to explicitly include the "getrandom/js" feature, so we only do it when -# necessary for running our CI. See: https://docs.rs/getrandom/0.2.8/getrandom/#webassembly-support -dev-getrandom-wasm = ["getrandom/js"] - -[dev-dependencies] -lazy_static = "1.4" -env_logger = "0.7" -electrsd = "0.22" -# Move back to importing from rust-bitcoin once https://github.com/rust-bitcoin/rust-bitcoin/pull/1342 is released -base64 = "^0.13" -assert_matches = "1.5.0" -# zip versions after 0.6.3 don't work with our MSRV 1.57.0 -zip = "=0.6.3" - -[[example]] -name = "compact_filters_balance" -required-features = ["compact_filters"] - -[[example]] -name = "miniscriptc" -path = "examples/compiler.rs" -required-features = ["compiler"] - -[[example]] -name = "policy" -path = "examples/policy.rs" - -[[example]] -name = "rpcwallet" -path = "examples/rpcwallet.rs" -required-features = ["keys-bip39", "key-value-db", "rpc", "electrsd/bitcoind_22_0"] - -[[example]] -name = "psbt_signer" -path = "examples/psbt_signer.rs" -required-features = ["electrum"] - -[[example]] -name = "hardware_signer" -path = "examples/hardware_signer.rs" -required-features = ["electrum", "hardware-signer"] - -[[example]] -name = "electrum_backend" -path = "examples/electrum_backend.rs" -required-features = ["electrum"] - -[[example]] -name = "esplora_backend_synchronous" -path = "examples/esplora_backend_synchronous.rs" -required-features = ["use-esplora-ureq"] - -[[example]] -name = "esplora_backend_asynchronous" -path = "examples/esplora_backend_asynchronous.rs" -required-features = ["use-esplora-reqwest", "reqwest-default-tls", "async-interface"] - -[[example]] -name = "mnemonic_to_descriptors" -path = "examples/mnemonic_to_descriptors.rs" -required-features = ["all-keys"] - [workspace] -members = ["macros"] -[package.metadata.docs.rs] -features = ["compiler", "electrum", "esplora", "use-esplora-blocking", "compact_filters", "rpc", "key-value-db", "sqlite", "all-keys", "verify", "hardware-signer"] -# defines the configuration attribute `docsrs` -rustdoc-args = ["--cfg", "docsrs"] +members = [ + "crates/bdk", + "crates/chain", + "crates/file_store", + "crates/electrum", + "example-crates/keychain_tracker_electrum", + "example-crates/keychain_tracker_esplora", + "example-crates/keychain_tracker_example_cli", + "example-crates/wallet_electrum", + "example-crates/wallet_esplora", + "example-crates/wallet_esplora_async", + "nursery/tmp_plan", + "nursery/coin_select" +] + +[workspace.package] +authors = ["Bitcoin Dev Kit Developers"] diff --git a/README.md b/README.md index 096d98e1d..e50de4b84 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +# The Bitcoin Dev Kit +

BDK

@@ -26,178 +28,27 @@ ## About -The `bdk` library aims to be the core building block for Bitcoin wallets of any kind. - -* It uses [Miniscript](https://github.com/rust-bitcoin/rust-miniscript) to support descriptors with generalized conditions. This exact same library can be used to build - single-sig wallets, multisigs, timelocked contracts and more. -* It supports multiple blockchain backends and databases, allowing developers to choose exactly what's right for their projects. -* It's built to be cross-platform: the core logic works on desktop, mobile, and even WebAssembly. -* It's very easy to extend: developers can implement customized logic for blockchain backends, databases, signers, coin selection, and more, without having to fork and modify this library. - -## Examples - -### Sync the balance of a descriptor - -```rust,no_run -use bdk::Wallet; -use bdk::database::MemoryDatabase; -use bdk::blockchain::ElectrumBlockchain; -use bdk::SyncOptions; -use bdk::electrum_client::Client; -use bdk::bitcoin::Network; - -fn main() -> Result<(), bdk::Error> { - let blockchain = ElectrumBlockchain::from(Client::new("ssl://electrum.blockstream.info:60002")?); - let wallet = Wallet::new( - "wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)", - Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"), - Network::Testnet, - MemoryDatabase::default(), - )?; - - wallet.sync(&blockchain, SyncOptions::default())?; - - println!("Descriptor balance: {} SAT", wallet.get_balance()?); - - Ok(()) -} -``` - -### Generate a few addresses - -```rust -use bdk::{Wallet, database::MemoryDatabase}; -use bdk::wallet::AddressIndex::New; -use bdk::bitcoin::Network; - -fn main() -> Result<(), bdk::Error> { - let wallet = Wallet::new( - "wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)", - Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"), - Network::Testnet, - MemoryDatabase::default(), - )?; - - println!("Address #0: {}", wallet.get_address(New)?); - println!("Address #1: {}", wallet.get_address(New)?); - println!("Address #2: {}", wallet.get_address(New)?); - - Ok(()) -} -``` - -### Create a transaction - -```rust,no_run -use bdk::{FeeRate, Wallet, SyncOptions}; -use bdk::database::MemoryDatabase; -use bdk::blockchain::ElectrumBlockchain; - -use bdk::electrum_client::Client; -use bdk::wallet::AddressIndex::New; - -use base64; -use bdk::bitcoin::consensus::serialize; -use bdk::bitcoin::Network; - -fn main() -> Result<(), bdk::Error> { - let blockchain = ElectrumBlockchain::from(Client::new("ssl://electrum.blockstream.info:60002")?); - let wallet = Wallet::new( - "wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/0/*)", - Some("wpkh([c258d2e4/84h/1h/0h]tpubDDYkZojQFQjht8Tm4jsS3iuEmKjTiEGjG6KnuFNKKJb5A6ZUCUZKdvLdSDWofKi4ToRCwb9poe1XdqfUnP4jaJjCB2Zwv11ZLgSbnZSNecE/1/*)"), - Network::Testnet, - MemoryDatabase::default(), - )?; - - wallet.sync(&blockchain, SyncOptions::default())?; - - let send_to = wallet.get_address(New)?; - let (psbt, details) = { - let mut builder = wallet.build_tx(); - builder - .add_recipient(send_to.script_pubkey(), 50_000) - .enable_rbf() - .do_not_spend_change() - .fee_rate(FeeRate::from_sat_per_vb(5.0)); - builder.finish()? - }; - - println!("Transaction details: {:#?}", details); - println!("Unsigned PSBT: {}", base64::encode(&serialize(&psbt))); - - Ok(()) -} -``` - -### Sign a transaction - -```rust,no_run -use bdk::{Wallet, SignOptions, database::MemoryDatabase}; - -use base64; -use bdk::bitcoin::consensus::deserialize; -use bdk::bitcoin::Network; - -fn main() -> Result<(), bdk::Error> { - let wallet = Wallet::new( - "wpkh([c258d2e4/84h/1h/0h]tprv8griRPhA7342zfRyB6CqeKF8CJDXYu5pgnj1cjL1u2ngKcJha5jjTRimG82ABzJQ4MQe71CV54xfn25BbhCNfEGGJZnxvCDQCd6JkbvxW6h/0/*)", - Some("wpkh([c258d2e4/84h/1h/0h]tprv8griRPhA7342zfRyB6CqeKF8CJDXYu5pgnj1cjL1u2ngKcJha5jjTRimG82ABzJQ4MQe71CV54xfn25BbhCNfEGGJZnxvCDQCd6JkbvxW6h/1/*)"), - Network::Testnet, - MemoryDatabase::default(), - )?; - - let psbt = "..."; - let mut psbt = deserialize(&base64::decode(psbt).unwrap())?; - - let _finalized = wallet.sign(&mut psbt, SignOptions::default())?; - - Ok(()) -} -``` - -## Testing - -### Unit testing - -```bash -cargo test -``` - -### Integration testing - -Integration testing require testing features, for example: - -```bash -cargo test --features test-electrum -``` - -The other options are `test-esplora`, `test-rpc` or `test-rpc-legacy` which runs against an older version of Bitcoin Core. -Note that `electrs` and `bitcoind` binaries are automatically downloaded (on mac and linux), to specify you already have installed binaries you must use `--no-default-features` and provide `BITCOIND_EXE` and `ELECTRS_EXE` as environment variables. - -## Running under WASM - -If you want to run this library under WASM you will probably have to add the following lines to you `Cargo.toml`: - -```toml -[dependencies] -getrandom = { version = "0.2", features = ["js"] } -``` - -This enables the `rand` crate to work in environments where JavaScript is available. See [this link](https://docs.rs/getrandom/0.2.8/getrandom/#webassembly-support) to learn more. +The `bdk` libraries aims to provide well engineered and reviewed components for Bitcoin based applications. +It is built upon the excellent [`rust-bitcoin`] and [`rust-miniscript`] crates. -## License +> ⚠ The Bitcoin Dev Kit developers are in the process of releasing a `v1.0` which is a fundamental re-write of how the library works. +> See for some background on this project: https://bitcoindevkit.org/blog/road-to-bdk-1/ (ignore the timeline 😁) +> For a release timeline see the [`bdk_core_staging`] repo where a lot of the component work is being done. The plan is that everything in the `bdk_core_staging` repo will be moved into the `crates` directory here. -Licensed under either of +## Architecture - * Apache License, Version 2.0 - ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) - * MIT license - ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) +The project is split up into several crates in the `/crates` directory: -at your option. +- [`bdk`](./crates/bdk): Contains the central high level `Wallet` type that is built from the low-level mechanisms provided by the other components +- [`chain`](./crates/chain): Tools for storing and indexing chain data +- [`file_store`](./crates/file_store): A (experimental) persistence backend for storing chain data in a single file. +- [`esplora`](./crates/esplora): Extends the [`esplora-client`] crate with methods to fetch chain data from an esplora HTTP server in the form that [`bdk_chain`] and `Wallet` can consume. +- [`electrum`](./crates/electrum): Extends the [`electrum-client`] crate with methods to fetch chain data from an electrum server in the form that [`bdk_chain`] and `Wallet` can consume. -## Contribution +Fully working examples of how to use these components are in `/example-crates` -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in the work by you, as defined in the Apache-2.0 license, shall be -dual licensed as above, without any additional terms or conditions. +[`bdk_core_staging`]: https://github.com/LLFourn/bdk_core_staging +[`rust-miniscript`]: https://github.com/rust-bitcoin/rust-miniscript +[`rust-bitcoin`]: https://github.com/rust-bitcoin/rust-bitcoin +[`esplora-client`]: https://docs.rs/esplora-client/0.3.0/esplora_client/ +[`electrum-client`]: https://docs.rs/electrum-client/0.13.0/electrum_client/ diff --git a/clippy.toml b/clippy.toml new file mode 100644 index 000000000..3f726dbda --- /dev/null +++ b/clippy.toml @@ -0,0 +1 @@ +msrv="1.57.0" diff --git a/crates/bdk/Cargo.toml b/crates/bdk/Cargo.toml new file mode 100644 index 000000000..bc00b6a76 --- /dev/null +++ b/crates/bdk/Cargo.toml @@ -0,0 +1,69 @@ +[package] +name = "bdk" +homepage = "https://bitcoindevkit.org" +version = "1.0.0-alpha.0" +repository = "https://github.com/bitcoindevkit/bdk" +documentation = "https://docs.rs/bdk" +description = "A modern, lightweight, descriptor-based wallet library" +keywords = ["bitcoin", "wallet", "descriptor", "psbt"] +readme = "README.md" +license = "MIT OR Apache-2.0" +authors = ["Bitcoin Dev Kit Developers"] +edition = "2021" +rust-version = "1.57" + +[dependencies] +log = "^0.4" +rand = "^0.8" +miniscript = { version = "9", features = ["serde"] } +bitcoin = { version = "0.29", features = ["serde", "base64", "rand"] } +serde = { version = "^1.0", features = ["derive"] } +serde_json = { version = "^1.0" } +bdk_chain = { path = "../chain", version = "0.3.1", features = ["miniscript", "serde"] } + +# Optional dependencies +hwi = { version = "0.5", optional = true, features = [ "use-miniscript"] } +bip39 = { version = "1.0.1", optional = true } + +[target.'cfg(target_arch = "wasm32")'.dependencies] +getrandom = "0.2" +js-sys = "0.3" + + +[features] +default = ["std"] +std = [] +compiler = ["miniscript/compiler"] +all-keys = ["keys-bip39"] +keys-bip39 = ["bip39"] +hardware-signer = ["hwi"] +test-hardware-signer = ["hardware-signer"] + + +# This feature is used to run `cargo check` in our CI targeting wasm. It's not recommended +# for libraries to explicitly include the "getrandom/js" feature, so we only do it when +# necessary for running our CI. See: https://docs.rs/getrandom/0.2.8/getrandom/#webassembly-support +dev-getrandom-wasm = ["getrandom/js"] + +[dev-dependencies] +lazy_static = "1.4" +env_logger = "0.7" +# Move back to importing from rust-bitcoin once https://github.com/rust-bitcoin/rust-bitcoin/pull/1342 is released +base64 = "^0.13" +assert_matches = "1.5.0" + + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + + +[[example]] +name = "mnemonic_to_descriptors" +path = "examples/mnemonic_to_descriptors.rs" +required-features = ["all-keys"] + +[[example]] +name = "miniscriptc" +path = "examples/compiler.rs" +required-features = ["compiler"] diff --git a/LICENSE b/crates/bdk/LICENSE similarity index 100% rename from LICENSE rename to crates/bdk/LICENSE diff --git a/LICENSE-APACHE b/crates/bdk/LICENSE-APACHE similarity index 100% rename from LICENSE-APACHE rename to crates/bdk/LICENSE-APACHE diff --git a/LICENSE-MIT b/crates/bdk/LICENSE-MIT similarity index 100% rename from LICENSE-MIT rename to crates/bdk/LICENSE-MIT diff --git a/crates/bdk/README.md b/crates/bdk/README.md new file mode 100644 index 000000000..00ce504a4 --- /dev/null +++ b/crates/bdk/README.md @@ -0,0 +1,227 @@ +
+

BDK

+ + + +

+ A modern, lightweight, descriptor-based wallet library written in Rust! +

+ +

+ Crate Info + MIT or Apache-2.0 Licensed + CI Status + + API Docs + Rustc Version 1.57.0+ + Chat on Discord +

+ +

+ Project Homepage + | + Documentation +

+
+ +## `bdk` + +The `bdk` crate provides the [`Wallet`](`crate::Wallet`) type which is a simple, high-level +interface built from the low-level components of [`bdk_chain`]. `Wallet` is a good starting point +for many simple applications as well as a good demonstration of how to use the other mechanisms to +construct a wallet. It has two keychains (external and internal) which are defined by +[miniscript descriptors][`rust-miniscript`] and uses them to generate addresses. When you give it +chain data it also uses the descriptors to find transaction outputs owned by them. From there, you +can create and sign transactions. + +For more information, see the [`Wallet`'s documentation](https://docs.rs/bdk/latest/bdk/wallet/struct.Wallet.html). + +### Blockchain data + +In order to get blockchain data for `Wallet` to consume, you have to put it into particular form. +Right now this is [`KeychainScan`] which is defined in [`bdk_chain`]. + +This can be created manually or from blockchain-scanning crates. + +**Blockchain Data Sources** + +* [`bdk_esplora`]: Grabs blockchain data from Esplora for updating BDK structures. +* [`bdk_electrum`]: Grabs blockchain data from Electrum for updating BDK structures. + +**Examples** + +* [`example-crates/wallet_esplora`](https://github.com/bitcoindevkit/bdk/tree/master/example-crates/wallet_esplora) +* [`example-crates/wallet_electrum`](https://github.com/bitcoindevkit/bdk/tree/master/example-crates/wallet_electrum) + +### Persistence + +To persist the `Wallet` on disk, `Wallet` needs to be constructed with a +[`Persist`](https://docs.rs/bdk_chain/latest/bdk_chain/keychain/struct.KeychainPersist.html) implementation. + +**Implementations** + +* [`bdk_file_store`]: a simple flat-file implementation of `Persist`. + +**Example** + +```rust,no_run +use bdk::{bitcoin::Network, wallet::{AddressIndex, Wallet}}; + +fn main() { + // a type that implements `Persist` + let db = (); + + let descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; + let mut wallet = Wallet::new(descriptor, None, db, Network::Testnet).expect("should create"); + + // get a new address (this increments revealed derivation index) + println!("revealed address: {}", wallet.get_address(AddressIndex::New)); + println!("staged changes: {:?}", wallet.staged()); + // persist changes + wallet.commit().expect("must save"); +} +``` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Testing + +### Unit testing + +```bash +cargo test +``` + +## License + +Licensed under either of + + * Apache License, Version 2.0 + ([LICENSE-APACHE](LICENSE-APACHE) or ) + * MIT license + ([LICENSE-MIT](LICENSE-MIT) or ) + +at your option. + +## Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in the work by you, as defined in the Apache-2.0 license, shall be +dual licensed as above, without any additional terms or conditions. + +[`bdk_chain`]: https://docs.rs/bdk_chain/latest +[`bdk_file_store`]: https://docs.rs/bdk_file_store/latest +[`bdk_electrum`]: https://docs.rs/bdk_electrum/latest +[`bdk_esplora`]: https://docs.rs/bdk_esplora/latest +[`KeychainScan`]: https://docs.rs/bdk_chain/latest/bdk_chain/keychain/struct.KeychainScan.html +[`rust-miniscript`]: https://docs.rs/miniscript/latest/miniscript/index.html diff --git a/examples/compiler.rs b/crates/bdk/examples/compiler.rs similarity index 93% rename from examples/compiler.rs rename to crates/bdk/examples/compiler.rs index e17feb3ad..f8918895c 100644 --- a/examples/compiler.rs +++ b/crates/bdk/examples/compiler.rs @@ -24,7 +24,6 @@ use bitcoin::Network; use miniscript::policy::Concrete; use miniscript::Descriptor; -use bdk::database::memory::MemoryDatabase; use bdk::wallet::AddressIndex::New; use bdk::{KeychainKind, Wallet}; @@ -54,14 +53,12 @@ fn main() -> Result<(), Box> { info!("Compiled into following Descriptor: \n{}", descriptor); - let database = MemoryDatabase::new(); - // Create a new wallet from this descriptor - let wallet = Wallet::new(&format!("{}", descriptor), None, Network::Regtest, database)?; + let mut wallet = Wallet::new_no_persist(&format!("{}", descriptor), None, Network::Regtest)?; info!( "First derived address from the descriptor: \n{}", - wallet.get_address(New)? + wallet.get_address(New) ); // BDK also has it's own `Policy` structure to represent the spending condition in a more diff --git a/examples/mnemonic_to_descriptors.rs b/crates/bdk/examples/mnemonic_to_descriptors.rs similarity index 100% rename from examples/mnemonic_to_descriptors.rs rename to crates/bdk/examples/mnemonic_to_descriptors.rs diff --git a/examples/policy.rs b/crates/bdk/examples/policy.rs similarity index 100% rename from examples/policy.rs rename to crates/bdk/examples/policy.rs diff --git a/src/descriptor/checksum.rs b/crates/bdk/src/descriptor/checksum.rs similarity index 98% rename from src/descriptor/checksum.rs rename to crates/bdk/src/descriptor/checksum.rs index b4ba0e8ff..07120ab7d 100644 --- a/src/descriptor/checksum.rs +++ b/crates/bdk/src/descriptor/checksum.rs @@ -15,6 +15,7 @@ //! checksum of a descriptor use crate::descriptor::DescriptorError; +use alloc::string::String; const INPUT_CHARSET: &[u8] = b"0123456789()[],'/*abcdefgh@:$%{}IJKLMNOPQRSTUVWXYZ&+-.;<=>?!^_|~ijklmnopqrstuvwxyzABCDEFGH`#\"\\ "; const CHECKSUM_CHARSET: &[u8] = b"qpzry9x8gf2tvdw0s3jn54khce6mua7l"; @@ -170,7 +171,7 @@ mod test { #[test] fn test_calc_checksum_invalid_character() { - let sparkle_heart = unsafe { std::str::from_utf8_unchecked(&[240, 159, 146, 150]) }; + let sparkle_heart = unsafe { core::str::from_utf8_unchecked(&[240, 159, 146, 150]) }; let invalid_desc = format!("wpkh(tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcL{}fjdi5qUvw3VDfgYiH5mNsj5izuiu2N/1/2/*)", sparkle_heart); assert_matches!( diff --git a/src/descriptor/dsl.rs b/crates/bdk/src/descriptor/dsl.rs similarity index 96% rename from src/descriptor/dsl.rs rename to crates/bdk/src/descriptor/dsl.rs index 67ef67057..60fac19e5 100644 --- a/src/descriptor/dsl.rs +++ b/crates/bdk/src/descriptor/dsl.rs @@ -23,7 +23,7 @@ macro_rules! impl_top_level_sh { }; ( $inner_struct:ident, $constructor:ident, $sortedmulti_constructor:ident, $ctx:ident, sortedmulti $( $inner:tt )* ) => {{ - use std::marker::PhantomData; + use core::marker::PhantomData; use $crate::miniscript::descriptor::{$inner_struct, Descriptor, DescriptorPublicKey}; use $crate::miniscript::$ctx; @@ -35,7 +35,7 @@ macro_rules! impl_top_level_sh { $crate::impl_sortedmulti!(build_desc, sortedmulti $( $inner )*) }}; ( $inner_struct:ident, $constructor:ident, $sortedmulti_constructor:ident, $ctx:ident, sortedmulti_vec $( $inner:tt )* ) => {{ - use std::marker::PhantomData; + use core::marker::PhantomData; use $crate::miniscript::descriptor::{$inner_struct, Descriptor, DescriptorPublicKey}; use $crate::miniscript::$ctx; @@ -203,8 +203,8 @@ macro_rules! impl_node_opcode_two { a_keymap.extend(b_keymap.into_iter()); let minisc = $crate::miniscript::Miniscript::from_ast($crate::miniscript::miniscript::decode::Terminal::$terminal_variant( - std::sync::Arc::new(a_minisc), - std::sync::Arc::new(b_minisc), + $crate::alloc::sync::Arc::new(a_minisc), + $crate::alloc::sync::Arc::new(b_minisc), ))?; minisc.check_miniscript()?; @@ -234,9 +234,9 @@ macro_rules! impl_node_opcode_three { let networks = $crate::keys::merge_networks(&networks, &c_networks); let minisc = $crate::miniscript::Miniscript::from_ast($crate::miniscript::miniscript::decode::Terminal::$terminal_variant( - std::sync::Arc::new(a_minisc), - std::sync::Arc::new(b_minisc), - std::sync::Arc::new(c_minisc), + $crate::alloc::sync::Arc::new(a_minisc), + $crate::alloc::sync::Arc::new(b_minisc), + $crate::alloc::sync::Arc::new(c_minisc), ))?; minisc.check_miniscript()?; @@ -263,7 +263,7 @@ macro_rules! impl_sortedmulti { )* ]; - keys.into_iter().collect::, _>>() + keys.into_iter().collect::, _>>() .map_err($crate::descriptor::DescriptorError::Key) .and_then(|keys| $crate::keys::make_sortedmulti($thresh, keys, $build_desc, &secp)) }); @@ -274,7 +274,7 @@ macro_rules! impl_sortedmulti { #[macro_export] macro_rules! parse_tap_tree { ( @merge $tree_a:expr, $tree_b:expr) => {{ - use std::sync::Arc; + use $crate::alloc::sync::Arc; use $crate::miniscript::descriptor::TapTree; $tree_a @@ -318,7 +318,7 @@ macro_rules! parse_tap_tree { // Single leaf ( $op:ident ( $( $minisc:tt )* ) ) => {{ - use std::sync::Arc; + use $crate::alloc::sync::Arc; use $crate::miniscript::descriptor::TapTree; $crate::fragment!( $op ( $( $minisc )* ) ) @@ -337,7 +337,7 @@ macro_rules! apply_modifier { .and_then(|(minisc, keymap, networks)| { let minisc = $crate::miniscript::Miniscript::from_ast( $crate::miniscript::miniscript::decode::Terminal::$terminal_variant( - std::sync::Arc::new(minisc), + $crate::alloc::sync::Arc::new(minisc), ), )?; @@ -374,8 +374,8 @@ macro_rules! apply_modifier { $inner.and_then(|(a_minisc, a_keymap, a_networks)| { $crate::impl_leaf_opcode_value_two!( AndV, - std::sync::Arc::new(a_minisc), - std::sync::Arc::new($crate::fragment!(true).unwrap().0) + $crate::alloc::sync::Arc::new(a_minisc), + $crate::alloc::sync::Arc::new($crate::fragment!(true).unwrap().0) ) .map(|(minisc, _, _)| (minisc, a_keymap, a_networks)) }) @@ -384,8 +384,8 @@ macro_rules! apply_modifier { $inner.and_then(|(a_minisc, a_keymap, a_networks)| { $crate::impl_leaf_opcode_value_two!( OrI, - std::sync::Arc::new($crate::fragment!(false).unwrap().0), - std::sync::Arc::new(a_minisc) + $crate::alloc::sync::Arc::new($crate::fragment!(false).unwrap().0), + $crate::alloc::sync::Arc::new(a_minisc) ) .map(|(minisc, _, _)| (minisc, a_keymap, a_networks)) }) @@ -394,8 +394,8 @@ macro_rules! apply_modifier { $inner.and_then(|(a_minisc, a_keymap, a_networks)| { $crate::impl_leaf_opcode_value_two!( OrI, - std::sync::Arc::new(a_minisc), - std::sync::Arc::new($crate::fragment!(false).unwrap().0) + $crate::alloc::sync::Arc::new(a_minisc), + $crate::alloc::sync::Arc::new($crate::fragment!(false).unwrap().0) ) .map(|(minisc, _, _)| (minisc, a_keymap, a_networks)) }) @@ -495,6 +495,8 @@ macro_rules! apply_modifier { /// let (descriptor, key_map, networks) = bdk::descriptor!(wpkh(my_key))?; /// # Ok::<(), Box>(()) /// ``` +/// +/// [`Vec`]: alloc::vec::Vec #[macro_export] macro_rules! descriptor { ( bare ( $( $minisc:tt )* ) ) => ({ @@ -599,7 +601,7 @@ macro_rules! group_multi_keys { )* ]; - keys.into_iter().collect::, _>>() + keys.into_iter().collect::, _>>() .map_err($crate::descriptor::DescriptorError::Key) }}; } @@ -744,8 +746,8 @@ macro_rules! fragment { ( thresh_vec ( $thresh:expr, $items:expr ) ) => ({ use $crate::miniscript::descriptor::KeyMap; - let (items, key_maps_networks): (Vec<_>, Vec<_>) = $items.into_iter().map(|(a, b, c)| (a, (b, c))).unzip(); - let items = items.into_iter().map(std::sync::Arc::new).collect(); + let (items, key_maps_networks): ($crate::alloc::vec::Vec<_>, $crate::alloc::vec::Vec<_>) = $items.into_iter().map(|(a, b, c)| (a, (b, c))).unzip(); + let items = items.into_iter().map($crate::alloc::sync::Arc::new).collect(); let (key_maps, valid_networks) = key_maps_networks.into_iter().fold((KeyMap::default(), $crate::keys::any_network()), |(mut keys_acc, net_acc), (key, net)| { keys_acc.extend(key.into_iter()); @@ -760,7 +762,7 @@ macro_rules! fragment { ( thresh ( $thresh:expr, $( $inner:tt )* ) ) => ({ let items = $crate::fragment_internal!( @v $( $inner )* ); - items.into_iter().collect::, _>>() + items.into_iter().collect::, _>>() .and_then(|items| $crate::fragment!(thresh_vec($thresh, items))) }); ( multi_vec ( $thresh:expr, $keys:expr ) ) => ({ @@ -793,12 +795,13 @@ macro_rules! fragment { #[cfg(test)] mod test { + use alloc::string::ToString; use bitcoin::hashes::hex::ToHex; use bitcoin::secp256k1::Secp256k1; use miniscript::descriptor::{DescriptorPublicKey, KeyMap}; use miniscript::{Descriptor, Legacy, Segwitv0}; - use std::str::FromStr; + use core::str::FromStr; use crate::descriptor::{DescriptorError, DescriptorMeta}; use crate::keys::{DescriptorKey, IntoDescriptorKey, ValidNetworks}; diff --git a/src/descriptor/error.rs b/crates/bdk/src/descriptor/error.rs similarity index 99% rename from src/descriptor/error.rs rename to crates/bdk/src/descriptor/error.rs index d558c926c..83efb01a3 100644 --- a/src/descriptor/error.rs +++ b/crates/bdk/src/descriptor/error.rs @@ -76,6 +76,7 @@ impl std::fmt::Display for Error { } } +#[cfg(feature = "std")] impl std::error::Error for Error {} impl_error!(bitcoin::util::bip32::Error, Bip32); diff --git a/src/descriptor/mod.rs b/crates/bdk/src/descriptor/mod.rs similarity index 97% rename from src/descriptor/mod.rs rename to crates/bdk/src/descriptor/mod.rs index b307bbf1c..9a6dc2b0c 100644 --- a/src/descriptor/mod.rs +++ b/crates/bdk/src/descriptor/mod.rs @@ -14,7 +14,9 @@ //! This module contains generic utilities to work with descriptors, plus some re-exported types //! from [`miniscript`]. -use std::collections::BTreeMap; +use crate::collections::BTreeMap; +use alloc::string::String; +use alloc::vec::Vec; use bitcoin::util::bip32::{ChildNumber, DerivationPath, ExtendedPubKey, Fingerprint, KeySource}; use bitcoin::util::{psbt, taproot}; @@ -353,27 +355,27 @@ where pub(crate) trait DescriptorMeta { fn is_witness(&self) -> bool; fn is_taproot(&self) -> bool; - fn get_extended_keys(&self) -> Result>, DescriptorError>; - fn derive_from_hd_keypaths<'s>( + fn get_extended_keys(&self) -> Vec>; + fn derive_from_hd_keypaths( &self, hd_keypaths: &HdKeyPaths, - secp: &'s SecpCtx, + secp: &SecpCtx, ) -> Option; - fn derive_from_tap_key_origins<'s>( + fn derive_from_tap_key_origins( &self, tap_key_origins: &TapKeyOrigins, - secp: &'s SecpCtx, + secp: &SecpCtx, ) -> Option; - fn derive_from_psbt_key_origins<'s>( + fn derive_from_psbt_key_origins( &self, key_origins: BTreeMap, - secp: &'s SecpCtx, + secp: &SecpCtx, ) -> Option; - fn derive_from_psbt_input<'s>( + fn derive_from_psbt_input( &self, psbt_input: &psbt::Input, utxo: Option, - secp: &'s SecpCtx, + secp: &SecpCtx, ) -> Option; } @@ -394,7 +396,7 @@ impl DescriptorMeta for ExtendedDescriptor { self.desc_type() == DescriptorType::Tr } - fn get_extended_keys(&self) -> Result>, DescriptorError> { + fn get_extended_keys(&self) -> Vec> { let mut answer = Vec::new(); self.for_each_key(|pk| { @@ -405,13 +407,13 @@ impl DescriptorMeta for ExtendedDescriptor { true }); - Ok(answer) + answer } - fn derive_from_psbt_key_origins<'s>( + fn derive_from_psbt_key_origins( &self, key_origins: BTreeMap, - secp: &'s SecpCtx, + secp: &SecpCtx, ) -> Option { // Ensure that deriving `xpub` with `path` yields `expected` let verify_key = |xpub: &DescriptorXKey, @@ -495,10 +497,10 @@ impl DescriptorMeta for ExtendedDescriptor { path_found.map(|path| self.at_derivation_index(path)) } - fn derive_from_hd_keypaths<'s>( + fn derive_from_hd_keypaths( &self, hd_keypaths: &HdKeyPaths, - secp: &'s SecpCtx, + secp: &SecpCtx, ) -> Option { // "Convert" an hd_keypaths map to the format required by `derive_from_psbt_key_origins` let key_origins = hd_keypaths @@ -513,10 +515,10 @@ impl DescriptorMeta for ExtendedDescriptor { self.derive_from_psbt_key_origins(key_origins, secp) } - fn derive_from_tap_key_origins<'s>( + fn derive_from_tap_key_origins( &self, tap_key_origins: &TapKeyOrigins, - secp: &'s SecpCtx, + secp: &SecpCtx, ) -> Option { // "Convert" a tap_key_origins map to the format required by `derive_from_psbt_key_origins` let key_origins = tap_key_origins @@ -526,11 +528,11 @@ impl DescriptorMeta for ExtendedDescriptor { self.derive_from_psbt_key_origins(key_origins, secp) } - fn derive_from_psbt_input<'s>( + fn derive_from_psbt_input( &self, psbt_input: &psbt::Input, utxo: Option, - secp: &'s SecpCtx, + secp: &SecpCtx, ) -> Option { if let Some(derived) = self.derive_from_hd_keypaths(&psbt_input.bip32_derivation, secp) { return Some(derived); @@ -579,7 +581,8 @@ impl DescriptorMeta for ExtendedDescriptor { #[cfg(test)] mod test { - use std::str::FromStr; + use alloc::string::ToString; + use core::str::FromStr; use assert_matches::assert_matches; use bitcoin::consensus::encode::deserialize; diff --git a/src/descriptor/policy.rs b/crates/bdk/src/descriptor/policy.rs similarity index 99% rename from src/descriptor/policy.rs rename to crates/bdk/src/descriptor/policy.rs index 6923ce3fb..af3e4a3b8 100644 --- a/src/descriptor/policy.rs +++ b/crates/bdk/src/descriptor/policy.rs @@ -32,13 +32,15 @@ //! //! let signers = Arc::new(SignersContainer::build(key_map, &extended_desc, &secp)); //! let policy = extended_desc.extract_policy(&signers, BuildSatisfaction::None, &secp)?; -//! println!("policy: {}", serde_json::to_string(&policy)?); +//! println!("policy: {}", serde_json::to_string(&policy).unwrap()); //! # Ok::<(), bdk::Error>(()) //! ``` -use std::cmp::max; -use std::collections::{BTreeMap, HashSet, VecDeque}; -use std::fmt; +use crate::collections::{BTreeMap, HashSet, VecDeque}; +use alloc::string::String; +use alloc::vec::Vec; +use core::cmp::max; +use core::fmt; use serde::ser::SerializeMap; use serde::{Serialize, Serializer}; @@ -523,6 +525,7 @@ impl fmt::Display for PolicyError { } } +#[cfg(feature = "std")] impl std::error::Error for PolicyError {} impl Policy { @@ -1146,12 +1149,12 @@ mod test { use crate::descriptor::policy::SatisfiableItem::{EcdsaSignature, Multisig, Thresh}; use crate::keys::{DescriptorKey, IntoDescriptorKey}; use crate::wallet::signer::SignersContainer; + use alloc::{string::ToString, sync::Arc}; use assert_matches::assert_matches; use bitcoin::secp256k1::Secp256k1; use bitcoin::util::bip32; use bitcoin::Network; - use std::str::FromStr; - use std::sync::Arc; + use core::str::FromStr; const TPRV0_STR:&str = "tprv8ZgxMBicQKsPdZXrcHNLf5JAJWFAoJ2TrstMRdSKtEggz6PddbuSkvHKM9oKJyFgZV1B7rw8oChspxyYbtmEXYyg1AjfWbL3ho3XHDpHRZf"; const TPRV1_STR:&str = "tprv8ZgxMBicQKsPdpkqS7Eair4YxjcuuvDPNYmKX3sCniCf16tHEVrjjiSXEkFRnUH77yXc6ZcwHHcLNfjdi5qUvw3VDfgYiH5mNsj5izuiu2N"; @@ -1444,12 +1447,12 @@ mod test { .into_wallet_descriptor(&secp, Network::Testnet) .unwrap(); let signers_container = Arc::new(SignersContainer::build(keymap, &wallet_desc, &secp)); - let policy = wallet_desc + let _policy = wallet_desc .extract_policy(&signers_container, BuildSatisfaction::None, &secp) .unwrap() .unwrap(); - println!("desc policy = {:?}", policy); // TODO remove - // TODO how should this fail with mixed timelocks? + // println!("desc policy = {:?}", policy); // TODO remove + // TODO how should this fail with mixed timelocks? } // - multiple timelocks of the same type should be correctly merged together @@ -1469,12 +1472,12 @@ mod test { .into_wallet_descriptor(&secp, Network::Testnet) .unwrap(); let signers_container = Arc::new(SignersContainer::build(keymap, &wallet_desc, &secp)); - let policy = wallet_desc + let _policy = wallet_desc .extract_policy(&signers_container, BuildSatisfaction::None, &secp) .unwrap() .unwrap(); - println!("desc policy = {:?}", policy); // TODO remove - // TODO how should this merge timelocks? + // println!("desc policy = {:?}", policy); // TODO remove + // TODO how should this merge timelocks? let (prvkey1, _pubkey1, _fingerprint1) = setup_keys(TPRV0_STR, PATH, &secp); let locktime_seconds0 = 500000100; let locktime_seconds1 = 500000200; @@ -1487,12 +1490,12 @@ mod test { .into_wallet_descriptor(&secp, Network::Testnet) .unwrap(); let signers_container = Arc::new(SignersContainer::build(keymap, &wallet_desc, &secp)); - let policy = wallet_desc + let _policy = wallet_desc .extract_policy(&signers_container, BuildSatisfaction::None, &secp) .unwrap() .unwrap(); - println!("desc policy = {:?}", policy); // TODO remove + // println!("desc policy = {:?}", policy); // TODO remove // TODO how should this merge timelocks? } diff --git a/src/descriptor/template.rs b/crates/bdk/src/descriptor/template.rs similarity index 88% rename from src/descriptor/template.rs rename to crates/bdk/src/descriptor/template.rs index 060cd9971..cf0296c91 100644 --- a/src/descriptor/template.rs +++ b/crates/bdk/src/descriptor/template.rs @@ -73,22 +73,16 @@ impl IntoWalletDescriptor for T { /// /// ``` /// # use bdk::bitcoin::{PrivateKey, Network}; -/// # use bdk::{Wallet}; -/// # use bdk::database::MemoryDatabase; +/// # use bdk::Wallet; /// # use bdk::wallet::AddressIndex::New; /// use bdk::template::P2Pkh; /// /// let key = /// bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")?; -/// let wallet = Wallet::new( -/// P2Pkh(key), -/// None, -/// Network::Testnet, -/// MemoryDatabase::default(), -/// )?; +/// let mut wallet = Wallet::new_no_persist(P2Pkh(key), None, Network::Testnet)?; /// /// assert_eq!( -/// wallet.get_address(New)?.to_string(), +/// wallet.get_address(New).to_string(), /// "mwJ8hxFYW19JLuc65RCTaP4v1rzVU8cVMT" /// ); /// # Ok::<_, Box>(()) @@ -107,22 +101,16 @@ impl> DescriptorTemplate for P2Pkh { /// /// ``` /// # use bdk::bitcoin::{PrivateKey, Network}; -/// # use bdk::{Wallet}; -/// # use bdk::database::MemoryDatabase; -/// # use bdk::wallet::AddressIndex::New; +/// # use bdk::Wallet; /// use bdk::template::P2Wpkh_P2Sh; +/// use bdk::wallet::AddressIndex; /// /// let key = /// bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")?; -/// let wallet = Wallet::new( -/// P2Wpkh_P2Sh(key), -/// None, -/// Network::Testnet, -/// MemoryDatabase::default(), -/// )?; +/// let mut wallet = Wallet::new_no_persist(P2Wpkh_P2Sh(key), None, Network::Testnet)?; /// /// assert_eq!( -/// wallet.get_address(New)?.to_string(), +/// wallet.get_address(AddressIndex::New).to_string(), /// "2NB4ox5VDRw1ecUv6SnT3VQHPXveYztRqk5" /// ); /// # Ok::<_, Box>(()) @@ -143,21 +131,15 @@ impl> DescriptorTemplate for P2Wpkh_P2Sh { /// ``` /// # use bdk::bitcoin::{PrivateKey, Network}; /// # use bdk::{Wallet}; -/// # use bdk::database::MemoryDatabase; -/// # use bdk::wallet::AddressIndex::New; /// use bdk::template::P2Wpkh; +/// use bdk::wallet::AddressIndex::New; /// /// let key = /// bitcoin::PrivateKey::from_wif("cTc4vURSzdx6QE6KVynWGomDbLaA75dNALMNyfjh3p8DRRar84Um")?; -/// let wallet = Wallet::new( -/// P2Wpkh(key), -/// None, -/// Network::Testnet, -/// MemoryDatabase::default(), -/// )?; +/// let mut wallet = Wallet::new_no_persist(P2Wpkh(key), None, Network::Testnet)?; /// /// assert_eq!( -/// wallet.get_address(New)?.to_string(), +/// wallet.get_address(New).to_string(), /// "tb1q4525hmgw265tl3drrl8jjta7ayffu6jf68ltjd" /// ); /// # Ok::<_, Box>(()) @@ -182,20 +164,18 @@ impl> DescriptorTemplate for P2Wpkh { /// # use std::str::FromStr; /// # use bdk::bitcoin::{PrivateKey, Network}; /// # use bdk::{Wallet, KeychainKind}; -/// # use bdk::database::MemoryDatabase; /// # use bdk::wallet::AddressIndex::New; /// use bdk::template::Bip44; /// /// let key = bitcoin::util::bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPeZRHk4rTG6orPS2CRNFX3njhUXx5vj9qGog5ZMH4uGReDWN5kCkY3jmWEtWause41CDvBRXD1shKknAMKxT99o9qUTRVC6m")?; -/// let wallet = Wallet::new( +/// let mut wallet = Wallet::new_no_persist( /// Bip44(key.clone(), KeychainKind::External), /// Some(Bip44(key, KeychainKind::Internal)), /// Network::Testnet, -/// MemoryDatabase::default() /// )?; /// -/// assert_eq!(wallet.get_address(New)?.to_string(), "mmogjc7HJEZkrLqyQYqJmxUqFaC7i4uf89"); -/// assert_eq!(wallet.public_descriptor(KeychainKind::External)?.unwrap().to_string(), "pkh([c55b303f/44'/1'/0']tpubDCuorCpzvYS2LCD75BR46KHE8GdDeg1wsAgNZeNr6DaB5gQK1o14uErKwKLuFmeemkQ6N2m3rNgvctdJLyr7nwu2yia7413Hhg8WWE44cgT/0/*)#5wrnv0xt"); +/// assert_eq!(wallet.get_address(New).to_string(), "mmogjc7HJEZkrLqyQYqJmxUqFaC7i4uf89"); +/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "pkh([c55b303f/44'/1'/0']tpubDCuorCpzvYS2LCD75BR46KHE8GdDeg1wsAgNZeNr6DaB5gQK1o14uErKwKLuFmeemkQ6N2m3rNgvctdJLyr7nwu2yia7413Hhg8WWE44cgT/0/*)#5wrnv0xt"); /// # Ok::<_, Box>(()) /// ``` pub struct Bip44>(pub K, pub KeychainKind); @@ -221,21 +201,19 @@ impl> DescriptorTemplate for Bip44 { /// # use std::str::FromStr; /// # use bdk::bitcoin::{PrivateKey, Network}; /// # use bdk::{Wallet, KeychainKind}; -/// # use bdk::database::MemoryDatabase; /// # use bdk::wallet::AddressIndex::New; /// use bdk::template::Bip44Public; /// /// let key = bitcoin::util::bip32::ExtendedPubKey::from_str("tpubDDDzQ31JkZB7VxUr9bjvBivDdqoFLrDPyLWtLapArAi51ftfmCb2DPxwLQzX65iNcXz1DGaVvyvo6JQ6rTU73r2gqdEo8uov9QKRb7nKCSU")?; /// let fingerprint = bitcoin::util::bip32::Fingerprint::from_str("c55b303f")?; -/// let wallet = Wallet::new( +/// let mut wallet = Wallet::new_no_persist( /// Bip44Public(key.clone(), fingerprint, KeychainKind::External), /// Some(Bip44Public(key, fingerprint, KeychainKind::Internal)), /// Network::Testnet, -/// MemoryDatabase::default() /// )?; /// -/// assert_eq!(wallet.get_address(New)?.to_string(), "miNG7dJTzJqNbFS19svRdTCisC65dsubtR"); -/// assert_eq!(wallet.public_descriptor(KeychainKind::External)?.unwrap().to_string(), "pkh([c55b303f/44'/1'/0']tpubDDDzQ31JkZB7VxUr9bjvBivDdqoFLrDPyLWtLapArAi51ftfmCb2DPxwLQzX65iNcXz1DGaVvyvo6JQ6rTU73r2gqdEo8uov9QKRb7nKCSU/0/*)#cfhumdqz"); +/// assert_eq!(wallet.get_address(New).to_string(), "miNG7dJTzJqNbFS19svRdTCisC65dsubtR"); +/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "pkh([c55b303f/44'/1'/0']tpubDDDzQ31JkZB7VxUr9bjvBivDdqoFLrDPyLWtLapArAi51ftfmCb2DPxwLQzX65iNcXz1DGaVvyvo6JQ6rTU73r2gqdEo8uov9QKRb7nKCSU/0/*)#cfhumdqz"); /// # Ok::<_, Box>(()) /// ``` pub struct Bip44Public>(pub K, pub bip32::Fingerprint, pub KeychainKind); @@ -261,20 +239,18 @@ impl> DescriptorTemplate for Bip44Public { /// # use std::str::FromStr; /// # use bdk::bitcoin::{PrivateKey, Network}; /// # use bdk::{Wallet, KeychainKind}; -/// # use bdk::database::MemoryDatabase; /// # use bdk::wallet::AddressIndex::New; /// use bdk::template::Bip49; /// /// let key = bitcoin::util::bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPeZRHk4rTG6orPS2CRNFX3njhUXx5vj9qGog5ZMH4uGReDWN5kCkY3jmWEtWause41CDvBRXD1shKknAMKxT99o9qUTRVC6m")?; -/// let wallet = Wallet::new( +/// let mut wallet = Wallet::new_no_persist( /// Bip49(key.clone(), KeychainKind::External), /// Some(Bip49(key, KeychainKind::Internal)), /// Network::Testnet, -/// MemoryDatabase::default() /// )?; /// -/// assert_eq!(wallet.get_address(New)?.to_string(), "2N4zkWAoGdUv4NXhSsU8DvS5MB36T8nKHEB"); -/// assert_eq!(wallet.public_descriptor(KeychainKind::External)?.unwrap().to_string(), "sh(wpkh([c55b303f/49'/1'/0']tpubDDYr4kdnZgjjShzYNjZUZXUUtpXaofdkMaipyS8ThEh45qFmhT4hKYways7UXmg6V7het1QiFo9kf4kYUXyDvV4rHEyvSpys9pjCB3pukxi/0/*))#s9vxlc8e"); +/// assert_eq!(wallet.get_address(New).to_string(), "2N4zkWAoGdUv4NXhSsU8DvS5MB36T8nKHEB"); +/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "sh(wpkh([c55b303f/49'/1'/0']tpubDDYr4kdnZgjjShzYNjZUZXUUtpXaofdkMaipyS8ThEh45qFmhT4hKYways7UXmg6V7het1QiFo9kf4kYUXyDvV4rHEyvSpys9pjCB3pukxi/0/*))#s9vxlc8e"); /// # Ok::<_, Box>(()) /// ``` pub struct Bip49>(pub K, pub KeychainKind); @@ -300,21 +276,19 @@ impl> DescriptorTemplate for Bip49 { /// # use std::str::FromStr; /// # use bdk::bitcoin::{PrivateKey, Network}; /// # use bdk::{Wallet, KeychainKind}; -/// # use bdk::database::MemoryDatabase; /// # use bdk::wallet::AddressIndex::New; /// use bdk::template::Bip49Public; /// /// let key = bitcoin::util::bip32::ExtendedPubKey::from_str("tpubDC49r947KGK52X5rBWS4BLs5m9SRY3pYHnvRrm7HcybZ3BfdEsGFyzCMzayi1u58eT82ZeyFZwH7DD6Q83E3fM9CpfMtmnTygnLfP59jL9L")?; /// let fingerprint = bitcoin::util::bip32::Fingerprint::from_str("c55b303f")?; -/// let wallet = Wallet::new( +/// let mut wallet = Wallet::new_no_persist( /// Bip49Public(key.clone(), fingerprint, KeychainKind::External), /// Some(Bip49Public(key, fingerprint, KeychainKind::Internal)), /// Network::Testnet, -/// MemoryDatabase::default() /// )?; /// -/// assert_eq!(wallet.get_address(New)?.to_string(), "2N3K4xbVAHoiTQSwxkZjWDfKoNC27pLkYnt"); -/// assert_eq!(wallet.public_descriptor(KeychainKind::External)?.unwrap().to_string(), "sh(wpkh([c55b303f/49'/1'/0']tpubDC49r947KGK52X5rBWS4BLs5m9SRY3pYHnvRrm7HcybZ3BfdEsGFyzCMzayi1u58eT82ZeyFZwH7DD6Q83E3fM9CpfMtmnTygnLfP59jL9L/0/*))#3tka9g0q"); +/// assert_eq!(wallet.get_address(New).to_string(), "2N3K4xbVAHoiTQSwxkZjWDfKoNC27pLkYnt"); +/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "sh(wpkh([c55b303f/49'/1'/0']tpubDC49r947KGK52X5rBWS4BLs5m9SRY3pYHnvRrm7HcybZ3BfdEsGFyzCMzayi1u58eT82ZeyFZwH7DD6Q83E3fM9CpfMtmnTygnLfP59jL9L/0/*))#3tka9g0q"); /// # Ok::<_, Box>(()) /// ``` pub struct Bip49Public>(pub K, pub bip32::Fingerprint, pub KeychainKind); @@ -340,20 +314,18 @@ impl> DescriptorTemplate for Bip49Public { /// # use std::str::FromStr; /// # use bdk::bitcoin::{PrivateKey, Network}; /// # use bdk::{Wallet, KeychainKind}; -/// # use bdk::database::MemoryDatabase; /// # use bdk::wallet::AddressIndex::New; /// use bdk::template::Bip84; /// /// let key = bitcoin::util::bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPeZRHk4rTG6orPS2CRNFX3njhUXx5vj9qGog5ZMH4uGReDWN5kCkY3jmWEtWause41CDvBRXD1shKknAMKxT99o9qUTRVC6m")?; -/// let wallet = Wallet::new( +/// let mut wallet = Wallet::new_no_persist( /// Bip84(key.clone(), KeychainKind::External), /// Some(Bip84(key, KeychainKind::Internal)), /// Network::Testnet, -/// MemoryDatabase::default() /// )?; /// -/// assert_eq!(wallet.get_address(New)?.to_string(), "tb1qhl85z42h7r4su5u37rvvw0gk8j2t3n9y7zsg4n"); -/// assert_eq!(wallet.public_descriptor(KeychainKind::External)?.unwrap().to_string(), "wpkh([c55b303f/84'/1'/0']tpubDDc5mum24DekpNw92t6fHGp8Gr2JjF9J7i4TZBtN6Vp8xpAULG5CFaKsfugWa5imhrQQUZKXe261asP5koDHo5bs3qNTmf3U3o4v9SaB8gg/0/*)#6kfecsmr"); +/// assert_eq!(wallet.get_address(New).to_string(), "tb1qhl85z42h7r4su5u37rvvw0gk8j2t3n9y7zsg4n"); +/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "wpkh([c55b303f/84'/1'/0']tpubDDc5mum24DekpNw92t6fHGp8Gr2JjF9J7i4TZBtN6Vp8xpAULG5CFaKsfugWa5imhrQQUZKXe261asP5koDHo5bs3qNTmf3U3o4v9SaB8gg/0/*)#6kfecsmr"); /// # Ok::<_, Box>(()) /// ``` pub struct Bip84>(pub K, pub KeychainKind); @@ -379,21 +351,19 @@ impl> DescriptorTemplate for Bip84 { /// # use std::str::FromStr; /// # use bdk::bitcoin::{PrivateKey, Network}; /// # use bdk::{Wallet, KeychainKind}; -/// # use bdk::database::MemoryDatabase; /// # use bdk::wallet::AddressIndex::New; /// use bdk::template::Bip84Public; /// /// let key = bitcoin::util::bip32::ExtendedPubKey::from_str("tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q")?; /// let fingerprint = bitcoin::util::bip32::Fingerprint::from_str("c55b303f")?; -/// let wallet = Wallet::new( +/// let mut wallet = Wallet::new_no_persist( /// Bip84Public(key.clone(), fingerprint, KeychainKind::External), /// Some(Bip84Public(key, fingerprint, KeychainKind::Internal)), /// Network::Testnet, -/// MemoryDatabase::default() /// )?; /// -/// assert_eq!(wallet.get_address(New)?.to_string(), "tb1qedg9fdlf8cnnqfd5mks6uz5w4kgpk2pr6y4qc7"); -/// assert_eq!(wallet.public_descriptor(KeychainKind::External)?.unwrap().to_string(), "wpkh([c55b303f/84'/1'/0']tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q/0/*)#dhu402yv"); +/// assert_eq!(wallet.get_address(New).to_string(), "tb1qedg9fdlf8cnnqfd5mks6uz5w4kgpk2pr6y4qc7"); +/// assert_eq!(wallet.public_descriptor(KeychainKind::External).unwrap().to_string(), "wpkh([c55b303f/84'/1'/0']tpubDC2Qwo2TFsaNC4ju8nrUJ9mqVT3eSgdmy1yPqhgkjwmke3PRXutNGRYAUo6RCHTcVQaDR3ohNU9we59brGHuEKPvH1ags2nevW5opEE9Z5Q/0/*)#dhu402yv"); /// # Ok::<_, Box>(()) /// ``` pub struct Bip84Public>(pub K, pub bip32::Fingerprint, pub KeychainKind); @@ -418,7 +388,7 @@ macro_rules! expand_make_bipxx { keychain: KeychainKind, network: Network, ) -> Result, DescriptorError> { - let mut derivation_path = Vec::with_capacity(4); + let mut derivation_path = alloc::vec::Vec::with_capacity(4); derivation_path.push(bip32::ChildNumber::from_hardened_idx(bip)?); match network { @@ -478,7 +448,8 @@ expand_make_bipxx!(segwit_v0, Segwitv0); mod test { // test existing descriptor templates, make sure they are expanded to the right descriptors - use std::str::FromStr; + use alloc::{string::ToString, vec::Vec}; + use core::str::FromStr; use super::*; use crate::descriptor::{DescriptorError, DescriptorMeta}; diff --git a/src/error.rs b/crates/bdk/src/error.rs similarity index 56% rename from src/error.rs rename to crates/bdk/src/error.rs index fba0fd2d7..fcbb8288a 100644 --- a/src/error.rs +++ b/crates/bdk/src/error.rs @@ -9,21 +9,17 @@ // You may not use this file except in accordance with one or both of these // licenses. -use std::fmt; - use crate::bitcoin::Network; use crate::{descriptor, wallet}; +use alloc::{string::String, vec::Vec}; use bitcoin::{OutPoint, Txid}; +use core::fmt; /// Errors that can be thrown by the [`Wallet`](crate::wallet::Wallet) #[derive(Debug)] pub enum Error { - /// Wrong number of bytes found when trying to convert to u32 - InvalidU32Bytes(Vec), /// Generic error Generic(String), - /// This error is thrown when trying to convert Bare and Public key script to address - ScriptDoesntHaveAddressForm, /// Cannot build a tx without recipients NoRecipients, /// `manually_selected_only` option is selected but no utxo has been passed @@ -79,74 +75,18 @@ pub enum Error { InvalidPolicyPathError(crate::descriptor::policy::PolicyError), /// Signing error Signer(crate::wallet::signer::SignerError), - /// Invalid network - InvalidNetwork { - /// requested network, for example what is given as bdk-cli option - requested: Network, - /// found network, for example the network of the bitcoin node - found: Network, - }, - #[cfg(feature = "verify")] - /// Transaction verification error - Verification(crate::wallet::verify::VerifyError), - - /// Progress value must be between `0.0` (included) and `100.0` (included) - InvalidProgressValue(f32), - /// Progress update error (maybe the channel has been closed) - ProgressUpdateError, /// Requested outpoint doesn't exist in the tx (vout greater than available outputs) InvalidOutpoint(OutPoint), - /// Error related to the parsing and usage of descriptors Descriptor(crate::descriptor::error::Error), - /// Encoding error - Encode(bitcoin::consensus::encode::Error), /// Miniscript error Miniscript(miniscript::Error), /// Miniscript PSBT error MiniscriptPsbt(MiniscriptPsbtError), /// BIP32 error Bip32(bitcoin::util::bip32::Error), - /// A secp256k1 error - Secp256k1(bitcoin::secp256k1::Error), - /// Error serializing or deserializing JSON data - Json(serde_json::Error), - /// Hex decoding error - Hex(bitcoin::hashes::hex::Error), /// Partially signed bitcoin transaction error Psbt(bitcoin::util::psbt::Error), - /// Partially signed bitcoin transaction parse error - PsbtParse(bitcoin::util::psbt::PsbtParseError), - - //KeyMismatch(bitcoin::secp256k1::PublicKey, bitcoin::secp256k1::PublicKey), - //MissingInputUTXO(usize), - //InvalidAddressNetwork(Address), - //DifferentTransactions, - //DifferentDescriptorStructure, - //Uncapable(crate::blockchain::Capability), - //MissingCachedAddresses, - /// [`crate::blockchain::WalletSync`] sync attempt failed due to missing scripts in cache which - /// are needed to satisfy `stop_gap`. - MissingCachedScripts(MissingCachedScripts), - - #[cfg(feature = "electrum")] - /// Electrum client error - Electrum(electrum_client::Error), - #[cfg(feature = "esplora")] - /// Esplora client error - Esplora(Box), - #[cfg(feature = "compact_filters")] - /// Compact filters client error) - CompactFilters(crate::blockchain::compact_filters::CompactFiltersError), - #[cfg(feature = "key-value-db")] - /// Sled database error - Sled(sled::Error), - #[cfg(feature = "rpc")] - /// Rpc client error - Rpc(bitcoincore_rpc::Error), - #[cfg(feature = "sqlite")] - /// Rusqlite client error - Rusqlite(rusqlite::Error), } /// Errors returned by miniscript when updating inconsistent PSBTs @@ -169,25 +109,10 @@ impl fmt::Display for MiniscriptPsbtError { impl std::error::Error for MiniscriptPsbtError {} -/// Represents the last failed [`crate::blockchain::WalletSync`] sync attempt in which we were short -/// on cached `scriptPubKey`s. -#[derive(Debug)] -pub struct MissingCachedScripts { - /// Number of scripts in which txs were requested during last request. - pub last_count: usize, - /// Minimum number of scripts to cache more of in order to satisfy `stop_gap`. - pub missing_count: usize, -} - impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Self::InvalidU32Bytes(_) => write!( - f, - "Wrong number of bytes found when trying to convert to u32" - ), Self::Generic(err) => write!(f, "Generic error: {}", err), - Self::ScriptDoesntHaveAddressForm => write!(f, "Script doesn't have address form"), Self::NoRecipients => write!(f, "Cannot build tx without recipients"), Self::NoUtxosSelected => write!(f, "No UTXO selected"), Self::OutputBelowDustLimit(limit) => { @@ -223,54 +148,21 @@ impl fmt::Display for Error { } Self::InvalidPolicyPathError(err) => write!(f, "Invalid policy path: {}", err), Self::Signer(err) => write!(f, "Signer error: {}", err), - Self::InvalidNetwork { requested, found } => write!( - f, - "Invalid network: requested {} but found {}", - requested, found - ), - #[cfg(feature = "verify")] - Self::Verification(err) => write!(f, "Transaction verification error: {}", err), - Self::InvalidProgressValue(progress) => { - write!(f, "Invalid progress value: {}", progress) - } - Self::ProgressUpdateError => write!( - f, - "Progress update error (maybe the channel has been closed)" - ), Self::InvalidOutpoint(outpoint) => write!( f, "Requested outpoint doesn't exist in the tx: {}", outpoint ), Self::Descriptor(err) => write!(f, "Descriptor error: {}", err), - Self::Encode(err) => write!(f, "Encoding error: {}", err), Self::Miniscript(err) => write!(f, "Miniscript error: {}", err), Self::MiniscriptPsbt(err) => write!(f, "Miniscript PSBT error: {}", err), Self::Bip32(err) => write!(f, "BIP32 error: {}", err), - Self::Secp256k1(err) => write!(f, "Secp256k1 error: {}", err), - Self::Json(err) => write!(f, "Serialize/Deserialize JSON error: {}", err), - Self::Hex(err) => write!(f, "Hex decoding error: {}", err), Self::Psbt(err) => write!(f, "PSBT error: {}", err), - Self::PsbtParse(err) => write!(f, "Impossible to parse PSBT: {}", err), - Self::MissingCachedScripts(missing_cached_scripts) => { - write!(f, "Missing cached scripts: {:?}", missing_cached_scripts) - } - #[cfg(feature = "electrum")] - Self::Electrum(err) => write!(f, "Electrum client error: {}", err), - #[cfg(feature = "esplora")] - Self::Esplora(err) => write!(f, "Esplora client error: {}", err), - #[cfg(feature = "compact_filters")] - Self::CompactFilters(err) => write!(f, "Compact filters client error: {}", err), - #[cfg(feature = "key-value-db")] - Self::Sled(err) => write!(f, "Sled database error: {}", err), - #[cfg(feature = "rpc")] - Self::Rpc(err) => write!(f, "RPC client error: {}", err), - #[cfg(feature = "sqlite")] - Self::Rusqlite(err) => write!(f, "SQLite error: {}", err), } } } +#[cfg(feature = "std")] impl std::error::Error for Error {} macro_rules! impl_error { @@ -278,7 +170,7 @@ macro_rules! impl_error { impl_error!($from, $to, Error); }; ( $from:ty, $to:ident, $impl_for:ty ) => { - impl std::convert::From<$from> for $impl_for { + impl core::convert::From<$from> for $impl_for { fn from(err: $from) -> Self { <$impl_for>::$to(err) } @@ -301,48 +193,7 @@ impl From for Error { } } -impl_error!(bitcoin::consensus::encode::Error, Encode); impl_error!(miniscript::Error, Miniscript); impl_error!(MiniscriptPsbtError, MiniscriptPsbt); impl_error!(bitcoin::util::bip32::Error, Bip32); -impl_error!(bitcoin::secp256k1::Error, Secp256k1); -impl_error!(serde_json::Error, Json); -impl_error!(bitcoin::hashes::hex::Error, Hex); impl_error!(bitcoin::util::psbt::Error, Psbt); -impl_error!(bitcoin::util::psbt::PsbtParseError, PsbtParse); - -#[cfg(feature = "electrum")] -impl_error!(electrum_client::Error, Electrum); -#[cfg(feature = "key-value-db")] -impl_error!(sled::Error, Sled); -#[cfg(feature = "rpc")] -impl_error!(bitcoincore_rpc::Error, Rpc); -#[cfg(feature = "sqlite")] -impl_error!(rusqlite::Error, Rusqlite); - -#[cfg(feature = "compact_filters")] -impl From for Error { - fn from(other: crate::blockchain::compact_filters::CompactFiltersError) -> Self { - match other { - crate::blockchain::compact_filters::CompactFiltersError::Global(e) => *e, - err => Error::CompactFilters(err), - } - } -} - -#[cfg(feature = "verify")] -impl From for Error { - fn from(other: crate::wallet::verify::VerifyError) -> Self { - match other { - crate::wallet::verify::VerifyError::Global(inner) => *inner, - err => Error::Verification(err), - } - } -} - -#[cfg(feature = "esplora")] -impl From for Error { - fn from(other: crate::blockchain::esplora::EsploraError) -> Self { - Error::Esplora(Box::new(other)) - } -} diff --git a/src/keys/bip39.rs b/crates/bdk/src/keys/bip39.rs similarity index 98% rename from src/keys/bip39.rs rename to crates/bdk/src/keys/bip39.rs index c79683acf..78f544934 100644 --- a/src/keys/bip39.rs +++ b/crates/bdk/src/keys/bip39.rs @@ -14,6 +14,7 @@ // TODO: maybe write our own implementation of bip39? Seems stupid to have an extra dependency for // something that should be fairly simple to re-implement. +use alloc::string::String; use bitcoin::util::bip32; use bitcoin::Network; @@ -150,7 +151,8 @@ impl GeneratableKey for Mnemonic { #[cfg(test)] mod test { - use std::str::FromStr; + use alloc::string::ToString; + use core::str::FromStr; use bitcoin::util::bip32; diff --git a/src/keys/mod.rs b/crates/bdk/src/keys/mod.rs similarity index 98% rename from src/keys/mod.rs rename to crates/bdk/src/keys/mod.rs index 84c447fe0..e8c9ca07e 100644 --- a/src/keys/mod.rs +++ b/crates/bdk/src/keys/mod.rs @@ -11,11 +11,13 @@ //! Key formats -use std::any::TypeId; -use std::collections::HashSet; -use std::marker::PhantomData; -use std::ops::Deref; -use std::str::FromStr; +use crate::collections::HashSet; +use alloc::string::{String, ToString}; +use alloc::vec::Vec; +use core::any::TypeId; +use core::marker::PhantomData; +use core::ops::Deref; +use core::str::FromStr; use bitcoin::secp256k1::{self, Secp256k1, Signing}; @@ -277,7 +279,7 @@ impl ExtScriptContext for Ctx { /// /// ```compile_fail /// use bdk::bitcoin::PublicKey; -/// use std::str::FromStr; +/// use core::str::FromStr; /// /// use bdk::keys::{DescriptorKey, IntoDescriptorKey, KeyError}; /// @@ -460,12 +462,11 @@ impl From for ExtendedKey { /// [`ExtendedPubKey`]: (bip32::ExtendedPubKey) pub trait DerivableKey: Sized { /// Consume `self` and turn it into an [`ExtendedKey`] - /// - /// This can be used to get direct access to `xprv`s and `xpub`s for types that implement this trait, - /// like [`Mnemonic`](bip39::Mnemonic) when the `keys-bip39` feature is enabled. #[cfg_attr( feature = "keys-bip39", doc = r##" +This can be used to get direct access to `xprv`s and `xpub`s for types that implement this trait, +like [`Mnemonic`](bip39::Mnemonic) when the `keys-bip39` feature is enabled. ```rust use bdk::bitcoin::Network; use bdk::keys::{DerivableKey, ExtendedKey}; @@ -619,7 +620,7 @@ pub trait GeneratableKey: Sized { /// Extra options required by the `generate_with_entropy` type Options; /// Returned error in case of failure - type Error: std::fmt::Debug; + type Error: core::fmt::Debug; /// Generate a key given the extra options and the entropy fn generate_with_entropy( @@ -946,6 +947,7 @@ impl std::fmt::Display for KeyError { } } +#[cfg(feature = "std")] impl std::error::Error for KeyError {} #[cfg(test)] diff --git a/crates/bdk/src/lib.rs b/crates/bdk/src/lib.rs new file mode 100644 index 000000000..19aa55408 --- /dev/null +++ b/crates/bdk/src/lib.rs @@ -0,0 +1,46 @@ +#![doc = include_str!("../README.md")] +#![no_std] +#[cfg(feature = "std")] +#[macro_use] +extern crate std; + +#[doc(hidden)] +#[macro_use] +pub extern crate alloc; + +pub extern crate bitcoin; +#[cfg(feature = "hardware-signer")] +pub extern crate hwi; +extern crate log; +pub extern crate miniscript; +extern crate serde; +extern crate serde_json; + +#[cfg(feature = "keys-bip39")] +extern crate bip39; + +#[allow(unused_imports)] +#[macro_use] +pub(crate) mod error; +pub mod descriptor; +pub mod keys; +pub mod psbt; +pub(crate) mod types; +pub mod wallet; + +pub use descriptor::template; +pub use descriptor::HdKeyPaths; +pub use error::Error; +pub use types::*; +pub use wallet::signer; +pub use wallet::signer::SignOptions; +pub use wallet::tx_builder::TxBuilder; +pub use wallet::Wallet; + +/// Get the version of BDK at runtime +pub fn version() -> &'static str { + env!("CARGO_PKG_VERSION", "unknown") +} + +pub use bdk_chain as chain; +pub(crate) use bdk_chain::collections; diff --git a/crates/bdk/src/psbt/mod.rs b/crates/bdk/src/psbt/mod.rs new file mode 100644 index 000000000..872432367 --- /dev/null +++ b/crates/bdk/src/psbt/mod.rs @@ -0,0 +1,79 @@ +// Bitcoin Dev Kit +// Written in 2020 by Alekos Filini +// +// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers +// +// This file is licensed under the Apache License, Version 2.0 or the MIT license +// , at your option. +// You may not use this file except in accordance with one or both of these +// licenses. + +//! Additional functions on the `rust-bitcoin` `PartiallySignedTransaction` structure. + +use crate::FeeRate; +use alloc::vec::Vec; +use bitcoin::util::psbt::PartiallySignedTransaction as Psbt; +use bitcoin::TxOut; + +// TODO upstream the functions here to `rust-bitcoin`? + +/// Trait to add functions to extract utxos and calculate fees. +pub trait PsbtUtils { + /// Get the `TxOut` for the specified input index, if it doesn't exist in the PSBT `None` is returned. + fn get_utxo_for(&self, input_index: usize) -> Option; + + /// The total transaction fee amount, sum of input amounts minus sum of output amounts, in sats. + /// If the PSBT is missing a TxOut for an input returns None. + fn fee_amount(&self) -> Option; + + /// The transaction's fee rate. This value will only be accurate if calculated AFTER the + /// `PartiallySignedTransaction` is finalized and all witness/signature data is added to the + /// transaction. + /// If the PSBT is missing a TxOut for an input returns None. + fn fee_rate(&self) -> Option; +} + +impl PsbtUtils for Psbt { + #[allow(clippy::all)] // We want to allow `manual_map` but it is too new. + fn get_utxo_for(&self, input_index: usize) -> Option { + let tx = &self.unsigned_tx; + + if input_index >= tx.input.len() { + return None; + } + + if let Some(input) = self.inputs.get(input_index) { + if let Some(wit_utxo) = &input.witness_utxo { + Some(wit_utxo.clone()) + } else if let Some(in_tx) = &input.non_witness_utxo { + Some(in_tx.output[tx.input[input_index].previous_output.vout as usize].clone()) + } else { + None + } + } else { + None + } + } + + fn fee_amount(&self) -> Option { + let tx = &self.unsigned_tx; + let utxos: Option> = (0..tx.input.len()).map(|i| self.get_utxo_for(i)).collect(); + + utxos.map(|inputs| { + let input_amount: u64 = inputs.iter().map(|i| i.value).sum(); + let output_amount: u64 = self.unsigned_tx.output.iter().map(|o| o.value).sum(); + input_amount + .checked_sub(output_amount) + .expect("input amount must be greater than output amount") + }) + } + + fn fee_rate(&self) -> Option { + let fee_amount = self.fee_amount(); + fee_amount.map(|fee| { + let weight = self.clone().extract_tx().weight(); + FeeRate::from_wu(fee, weight) + }) + } +} diff --git a/src/types.rs b/crates/bdk/src/types.rs similarity index 61% rename from src/types.rs rename to crates/bdk/src/types.rs index cd2ad5b36..4472508f7 100644 --- a/src/types.rs +++ b/crates/bdk/src/types.rs @@ -9,16 +9,18 @@ // You may not use this file except in accordance with one or both of these // licenses. -use std::convert::AsRef; -use std::ops::Sub; +use alloc::boxed::Box; +use core::convert::AsRef; +use core::ops::Sub; +use bdk_chain::ConfirmationTime; use bitcoin::blockdata::transaction::{OutPoint, Transaction, TxOut}; use bitcoin::{hash_types::Txid, util::psbt}; use serde::{Deserialize, Serialize}; /// Types of keychains -#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] pub enum KeychainKind { /// External External = 0, @@ -123,7 +125,7 @@ impl FeeRate { } } -impl std::default::Default for FeeRate { +impl Default for FeeRate { fn default() -> Self { FeeRate::default_min_relay_fee() } @@ -163,6 +165,10 @@ pub struct LocalUtxo { pub keychain: KeychainKind, /// Whether this UTXO is spent or not pub is_spent: bool, + /// The derivation index for the script pubkey in the wallet + pub derivation_index: u32, + /// The confirmation time for transaction containing this utxo + pub confirmation_time: ConfirmationTime, } /// A [`Utxo`] with its `satisfaction_weight`. @@ -236,220 +242,30 @@ pub struct TransactionDetails { /// Sent value (sats) /// Sum of owned inputs of this transaction. pub sent: u64, - /// Fee value (sats) if confirmed. - /// The availability of the fee depends on the backend. It's never `None` with an Electrum - /// Server backend, but it could be `None` with a Bitcoin RPC node without txindex that receive - /// funds while offline. + /// Fee value in sats if it was available. pub fee: Option, /// If the transaction is confirmed, contains height and Unix timestamp of the block containing the /// transaction, unconfirmed transaction contains `None`. - pub confirmation_time: Option, + pub confirmation_time: ConfirmationTime, } impl PartialOrd for TransactionDetails { - fn partial_cmp(&self, other: &Self) -> Option { + fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } impl Ord for TransactionDetails { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { + fn cmp(&self, other: &Self) -> core::cmp::Ordering { self.confirmation_time .cmp(&other.confirmation_time) .then_with(|| self.txid.cmp(&other.txid)) } } -/// Block height and timestamp of a block -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)] -pub struct BlockTime { - /// confirmation block height - pub height: u32, - /// confirmation block timestamp - pub timestamp: u64, -} - -impl PartialOrd for BlockTime { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for BlockTime { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.height - .cmp(&other.height) - .then_with(|| self.timestamp.cmp(&other.timestamp)) - } -} - -/// **DEPRECATED**: Confirmation time of a transaction -/// -/// The structure has been renamed to `BlockTime` -#[deprecated(note = "This structure has been renamed to `BlockTime`")] -pub type ConfirmationTime = BlockTime; - -impl BlockTime { - /// Returns `Some` `BlockTime` if both `height` and `timestamp` are `Some` - pub fn new(height: Option, timestamp: Option) -> Option { - match (height, timestamp) { - (Some(height), Some(timestamp)) => Some(BlockTime { height, timestamp }), - _ => None, - } - } -} - -/// Balance differentiated in various categories -#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone, Default)] -pub struct Balance { - /// All coinbase outputs not yet matured - pub immature: u64, - /// Unconfirmed UTXOs generated by a wallet tx - pub trusted_pending: u64, - /// Unconfirmed UTXOs received from an external wallet - pub untrusted_pending: u64, - /// Confirmed and immediately spendable balance - pub confirmed: u64, -} - -impl Balance { - /// Get sum of trusted_pending and confirmed coins - pub fn get_spendable(&self) -> u64 { - self.confirmed + self.trusted_pending - } - - /// Get the whole balance visible to the wallet - pub fn get_total(&self) -> u64 { - self.confirmed + self.trusted_pending + self.untrusted_pending + self.immature - } -} - -impl std::fmt::Display for Balance { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "{{ immature: {}, trusted_pending: {}, untrusted_pending: {}, confirmed: {} }}", - self.immature, self.trusted_pending, self.untrusted_pending, self.confirmed - ) - } -} - -impl std::ops::Add for Balance { - type Output = Self; - - fn add(self, other: Self) -> Self { - Self { - immature: self.immature + other.immature, - trusted_pending: self.trusted_pending + other.trusted_pending, - untrusted_pending: self.untrusted_pending + other.untrusted_pending, - confirmed: self.confirmed + other.confirmed, - } - } -} - -impl std::iter::Sum for Balance { - fn sum>(iter: I) -> Self { - iter.fold( - Balance { - ..Default::default() - }, - |a, b| a + b, - ) - } -} - #[cfg(test)] mod tests { use super::*; - use bitcoin::hashes::Hash; - - #[test] - fn sort_block_time() { - let block_time_a = BlockTime { - height: 100, - timestamp: 100, - }; - - let block_time_b = BlockTime { - height: 100, - timestamp: 110, - }; - - let block_time_c = BlockTime { - height: 0, - timestamp: 0, - }; - - let mut vec = vec![ - block_time_a.clone(), - block_time_b.clone(), - block_time_c.clone(), - ]; - vec.sort(); - let expected = vec![block_time_c, block_time_a, block_time_b]; - - assert_eq!(vec, expected) - } - - #[test] - fn sort_tx_details() { - let block_time_a = BlockTime { - height: 100, - timestamp: 100, - }; - - let block_time_b = BlockTime { - height: 0, - timestamp: 0, - }; - - let tx_details_a = TransactionDetails { - transaction: None, - txid: Txid::from_inner([0; 32]), - received: 0, - sent: 0, - fee: None, - confirmation_time: None, - }; - - let tx_details_b = TransactionDetails { - transaction: None, - txid: Txid::from_inner([0; 32]), - received: 0, - sent: 0, - fee: None, - confirmation_time: Some(block_time_a), - }; - - let tx_details_c = TransactionDetails { - transaction: None, - txid: Txid::from_inner([0; 32]), - received: 0, - sent: 0, - fee: None, - confirmation_time: Some(block_time_b.clone()), - }; - - let tx_details_d = TransactionDetails { - transaction: None, - txid: Txid::from_inner([1; 32]), - received: 0, - sent: 0, - fee: None, - confirmation_time: Some(block_time_b), - }; - - let mut vec = vec![ - tx_details_a.clone(), - tx_details_b.clone(), - tx_details_c.clone(), - tx_details_d.clone(), - ]; - vec.sort(); - let expected = vec![tx_details_a, tx_details_c, tx_details_d, tx_details_b]; - - assert_eq!(vec, expected) - } #[test] fn can_store_feerate_in_const() { diff --git a/src/wallet/coin_selection.rs b/crates/bdk/src/wallet/coin_selection.rs similarity index 84% rename from src/wallet/coin_selection.rs rename to crates/bdk/src/wallet/coin_selection.rs index 8482f96a9..373dbdc38 100644 --- a/src/wallet/coin_selection.rs +++ b/crates/bdk/src/wallet/coin_selection.rs @@ -27,17 +27,15 @@ //! # use std::str::FromStr; //! # use bitcoin::*; //! # use bdk::wallet::{self, coin_selection::*}; -//! # use bdk::database::Database; //! # use bdk::*; //! # use bdk::wallet::coin_selection::decide_change; //! # const TXIN_BASE_WEIGHT: usize = (32 + 4 + 4) * 4; //! #[derive(Debug)] //! struct AlwaysSpendEverything; //! -//! impl CoinSelectionAlgorithm for AlwaysSpendEverything { +//! impl CoinSelectionAlgorithm for AlwaysSpendEverything { //! fn coin_select( //! &self, -//! database: &D, //! required_utxos: Vec, //! optional_utxos: Vec, //! fee_rate: FeeRate, @@ -79,7 +77,7 @@ //! } //! } //! -//! # let wallet = doctest_wallet!(); +//! # let mut wallet = doctest_wallet!(); //! // create wallet, sync, ... //! //! let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap(); @@ -96,26 +94,19 @@ use crate::types::FeeRate; use crate::wallet::utils::IsDust; -use crate::{database::Database, WeightedUtxo}; +use crate::WeightedUtxo; use crate::{error::Error, Utxo}; +use alloc::vec::Vec; use bitcoin::consensus::encode::serialize; use bitcoin::Script; -#[cfg(test)] -use assert_matches::assert_matches; +use core::convert::TryInto; use rand::seq::SliceRandom; -#[cfg(not(test))] -use rand::thread_rng; -use std::collections::HashMap; -use std::convert::TryInto; /// Default coin selection algorithm used by [`TxBuilder`](super::tx_builder::TxBuilder) if not /// overridden -#[cfg(not(test))] pub type DefaultCoinSelectionAlgorithm = BranchAndBoundCoinSelection; -#[cfg(test)] -pub type DefaultCoinSelectionAlgorithm = LargestFirstCoinSelection; // make the tests more predictable // Base weight of a Txin, not counting the weight needed for satisfying it. // prev_txid (32 bytes) + prev_vout (4 bytes) + sequence (4 bytes) @@ -177,7 +168,7 @@ impl CoinSelectionResult { /// selection algorithm when it creates transactions. /// /// For an example see [this module](crate::wallet::coin_selection)'s documentation. -pub trait CoinSelectionAlgorithm: std::fmt::Debug { +pub trait CoinSelectionAlgorithm: core::fmt::Debug { /// Perform the coin selection /// /// - `database`: a reference to the wallet's database that can be used to lookup additional @@ -193,7 +184,6 @@ pub trait CoinSelectionAlgorithm: std::fmt::Debug { #[allow(clippy::too_many_arguments)] fn coin_select( &self, - database: &D, required_utxos: Vec, optional_utxos: Vec, fee_rate: FeeRate, @@ -209,10 +199,9 @@ pub trait CoinSelectionAlgorithm: std::fmt::Debug { #[derive(Debug, Default, Clone, Copy)] pub struct LargestFirstCoinSelection; -impl CoinSelectionAlgorithm for LargestFirstCoinSelection { +impl CoinSelectionAlgorithm for LargestFirstCoinSelection { fn coin_select( &self, - _database: &D, required_utxos: Vec, mut optional_utxos: Vec, fee_rate: FeeRate, @@ -246,46 +235,22 @@ impl CoinSelectionAlgorithm for LargestFirstCoinSelection { #[derive(Debug, Default, Clone, Copy)] pub struct OldestFirstCoinSelection; -impl CoinSelectionAlgorithm for OldestFirstCoinSelection { +impl CoinSelectionAlgorithm for OldestFirstCoinSelection { fn coin_select( &self, - database: &D, required_utxos: Vec, mut optional_utxos: Vec, fee_rate: FeeRate, target_amount: u64, drain_script: &Script, ) -> Result { - // query db and create a blockheight lookup table - let blockheights = optional_utxos - .iter() - .map(|wu| wu.utxo.outpoint().txid) - // fold is used so we can skip db query for txid that already exist in hashmap acc - .fold(Ok(HashMap::new()), |bh_result_acc, txid| { - bh_result_acc.and_then(|mut bh_acc| { - if bh_acc.contains_key(&txid) { - Ok(bh_acc) - } else { - database.get_tx(&txid, false).map(|details| { - bh_acc.insert( - txid, - details.and_then(|d| d.confirmation_time.map(|ct| ct.height)), - ); - bh_acc - }) - } - }) - })?; - // We put the "required UTXOs" first and make sure the optional UTXOs are sorted from // oldest to newest according to blocktime // For utxo that doesn't exist in DB, they will have lowest priority to be selected let utxos = { - optional_utxos.sort_unstable_by_key(|wu| { - match blockheights.get(&wu.utxo.outpoint().txid) { - Some(Some(blockheight)) => blockheight, - _ => &u32::MAX, - } + optional_utxos.sort_unstable_by_key(|wu| match &wu.utxo { + Utxo::Local(local) => Some(local.confirmation_time), + Utxo::Foreign { .. } => None, }); required_utxos @@ -399,7 +364,7 @@ impl OutputGroup { /// Branch and bound coin selection /// /// Code adapted from Bitcoin Core's implementation and from Mark Erhardt Master's Thesis: -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct BranchAndBoundCoinSelection { size_of_change: u64, } @@ -422,10 +387,9 @@ impl BranchAndBoundCoinSelection { const BNB_TOTAL_TRIES: usize = 100_000; -impl CoinSelectionAlgorithm for BranchAndBoundCoinSelection { +impl CoinSelectionAlgorithm for BranchAndBoundCoinSelection { fn coin_select( &self, - _database: &D, required_utxos: Vec, optional_utxos: Vec, fee_rate: FeeRate, @@ -667,16 +631,7 @@ impl BranchAndBoundCoinSelection { drain_script: &Script, fee_rate: FeeRate, ) -> CoinSelectionResult { - #[cfg(not(test))] - optional_utxos.shuffle(&mut thread_rng()); - #[cfg(test)] - { - use rand::{rngs::StdRng, SeedableRng}; - let seed = [0; 32]; - let mut rng: StdRng = SeedableRng::from_seed(seed); - optional_utxos.shuffle(&mut rng); - } - + optional_utxos.shuffle(&mut rand::thread_rng()); let selected_utxos = optional_utxos.into_iter().fold( (curr_value, vec![]), |(mut amount, mut utxos), utxo| { @@ -722,18 +677,19 @@ impl BranchAndBoundCoinSelection { #[cfg(test)] mod test { - use std::str::FromStr; + use assert_matches::assert_matches; + use core::str::FromStr; + use bdk_chain::ConfirmationTime; use bitcoin::{OutPoint, Script, TxOut}; use super::*; - use crate::database::{BatchOperations, MemoryDatabase}; use crate::types::*; use crate::wallet::Vbytes; use rand::rngs::StdRng; use rand::seq::SliceRandom; - use rand::{Rng, SeedableRng}; + use rand::{Rng, RngCore, SeedableRng}; // n. of items on witness (1WU) + signature len (1WU) + signature and sighash (72WU) // + pubkey len (1WU) + pubkey (33WU) + script sig len (1 byte, 4WU) @@ -741,7 +697,7 @@ mod test { const FEE_AMOUNT: u64 = 50; - fn utxo(value: u64, index: u32) -> WeightedUtxo { + fn utxo(value: u64, index: u32, confirmation_time: ConfirmationTime) -> WeightedUtxo { assert!(index < 10); let outpoint = OutPoint::from_str(&format!( "000000000000000000000000000000000000000000000000000000000000000{}:0", @@ -758,70 +714,46 @@ mod test { }, keychain: KeychainKind::External, is_spent: false, + derivation_index: 42, + confirmation_time, }), } } fn get_test_utxos() -> Vec { vec![ - utxo(100_000, 0), - utxo(FEE_AMOUNT as u64 - 40, 1), - utxo(200_000, 2), + utxo(100_000, 0, ConfirmationTime::Unconfirmed), + utxo(FEE_AMOUNT - 40, 1, ConfirmationTime::Unconfirmed), + utxo(200_000, 2, ConfirmationTime::Unconfirmed), ] } - fn setup_database_and_get_oldest_first_test_utxos( - database: &mut D, - ) -> Vec { + fn get_oldest_first_test_utxos() -> Vec { // ensure utxos are from different tx - let utxo1 = utxo(120_000, 1); - let utxo2 = utxo(80_000, 2); - let utxo3 = utxo(300_000, 3); - - // add tx to DB so utxos are sorted by blocktime asc - // utxos will be selected by the following order - // utxo1(blockheight 1) -> utxo2(blockheight 2), utxo3 (blockheight 3) - // timestamp are all set as the same to ensure that only block height is used in sorting - let utxo1_tx_details = TransactionDetails { - transaction: None, - txid: utxo1.utxo.outpoint().txid, - received: 1, - sent: 0, - fee: None, - confirmation_time: Some(BlockTime { + let utxo1 = utxo( + 120_000, + 1, + ConfirmationTime::Confirmed { height: 1, - timestamp: 1231006505, - }), - }; - - let utxo2_tx_details = TransactionDetails { - transaction: None, - txid: utxo2.utxo.outpoint().txid, - received: 1, - sent: 0, - fee: None, - confirmation_time: Some(BlockTime { + time: 1231006505, + }, + ); + let utxo2 = utxo( + 80_000, + 2, + ConfirmationTime::Confirmed { height: 2, - timestamp: 1231006505, - }), - }; - - let utxo3_tx_details = TransactionDetails { - transaction: None, - txid: utxo3.utxo.outpoint().txid, - received: 1, - sent: 0, - fee: None, - confirmation_time: Some(BlockTime { + time: 1231006505, + }, + ); + let utxo3 = utxo( + 300_000, + 3, + ConfirmationTime::Confirmed { height: 3, - timestamp: 1231006505, - }), - }; - - database.set_tx(&utxo1_tx_details).unwrap(); - database.set_tx(&utxo2_tx_details).unwrap(); - database.set_tx(&utxo3_tx_details).unwrap(); - + time: 1231006505, + }, + ); vec![utxo1, utxo2, utxo3] } @@ -841,6 +773,15 @@ mod test { }, keychain: KeychainKind::External, is_spent: false, + derivation_index: rng.next_u32(), + confirmation_time: if rng.gen_bool(0.5) { + ConfirmationTime::Confirmed { + height: rng.next_u32(), + time: rng.next_u64(), + } + } else { + ConfirmationTime::Unconfirmed + }, }), }); } @@ -861,6 +802,8 @@ mod test { }, keychain: KeychainKind::External, is_spent: false, + derivation_index: 42, + confirmation_time: ConfirmationTime::Unconfirmed, }), }; vec![utxo; utxos_number] @@ -878,13 +821,11 @@ mod test { #[test] fn test_largest_first_coin_selection_success() { let utxos = get_test_utxos(); - let database = MemoryDatabase::default(); let drain_script = Script::default(); let target_amount = 250_000 + FEE_AMOUNT; let result = LargestFirstCoinSelection::default() .coin_select( - &database, utxos, vec![], FeeRate::from_sat_per_vb(1.0), @@ -901,13 +842,11 @@ mod test { #[test] fn test_largest_first_coin_selection_use_all() { let utxos = get_test_utxos(); - let database = MemoryDatabase::default(); let drain_script = Script::default(); let target_amount = 20_000 + FEE_AMOUNT; let result = LargestFirstCoinSelection::default() .coin_select( - &database, utxos, vec![], FeeRate::from_sat_per_vb(1.0), @@ -924,13 +863,11 @@ mod test { #[test] fn test_largest_first_coin_selection_use_only_necessary() { let utxos = get_test_utxos(); - let database = MemoryDatabase::default(); let drain_script = Script::default(); let target_amount = 20_000 + FEE_AMOUNT; let result = LargestFirstCoinSelection::default() .coin_select( - &database, vec![], utxos, FeeRate::from_sat_per_vb(1.0), @@ -948,13 +885,11 @@ mod test { #[should_panic(expected = "InsufficientFunds")] fn test_largest_first_coin_selection_insufficient_funds() { let utxos = get_test_utxos(); - let database = MemoryDatabase::default(); let drain_script = Script::default(); let target_amount = 500_000 + FEE_AMOUNT; LargestFirstCoinSelection::default() .coin_select( - &database, vec![], utxos, FeeRate::from_sat_per_vb(1.0), @@ -968,13 +903,11 @@ mod test { #[should_panic(expected = "InsufficientFunds")] fn test_largest_first_coin_selection_insufficient_funds_high_fees() { let utxos = get_test_utxos(); - let database = MemoryDatabase::default(); let drain_script = Script::default(); let target_amount = 250_000 + FEE_AMOUNT; LargestFirstCoinSelection::default() .coin_select( - &database, vec![], utxos, FeeRate::from_sat_per_vb(1000.0), @@ -986,14 +919,12 @@ mod test { #[test] fn test_oldest_first_coin_selection_success() { - let mut database = MemoryDatabase::default(); - let utxos = setup_database_and_get_oldest_first_test_utxos(&mut database); + let utxos = get_oldest_first_test_utxos(); let drain_script = Script::default(); let target_amount = 180_000 + FEE_AMOUNT; let result = OldestFirstCoinSelection::default() .coin_select( - &database, vec![], utxos, FeeRate::from_sat_per_vb(1.0), @@ -1007,75 +938,14 @@ mod test { assert_eq!(result.fee_amount, 136) } - #[test] - fn test_oldest_first_coin_selection_utxo_not_in_db_will_be_selected_last() { - // ensure utxos are from different tx - let utxo1 = utxo(120_000, 1); - let utxo2 = utxo(80_000, 2); - let utxo3 = utxo(300_000, 3); - let drain_script = Script::default(); - - let mut database = MemoryDatabase::default(); - - // add tx to DB so utxos are sorted by blocktime asc - // utxos will be selected by the following order - // utxo1(blockheight 1) -> utxo2(blockheight 2), utxo3 (not exist in DB) - // timestamp are all set as the same to ensure that only block height is used in sorting - let utxo1_tx_details = TransactionDetails { - transaction: None, - txid: utxo1.utxo.outpoint().txid, - received: 1, - sent: 0, - fee: None, - confirmation_time: Some(BlockTime { - height: 1, - timestamp: 1231006505, - }), - }; - - let utxo2_tx_details = TransactionDetails { - transaction: None, - txid: utxo2.utxo.outpoint().txid, - received: 1, - sent: 0, - fee: None, - confirmation_time: Some(BlockTime { - height: 2, - timestamp: 1231006505, - }), - }; - - database.set_tx(&utxo1_tx_details).unwrap(); - database.set_tx(&utxo2_tx_details).unwrap(); - - let target_amount = 180_000 + FEE_AMOUNT; - - let result = OldestFirstCoinSelection::default() - .coin_select( - &database, - vec![], - vec![utxo3, utxo1, utxo2], - FeeRate::from_sat_per_vb(1.0), - target_amount, - &drain_script, - ) - .unwrap(); - - assert_eq!(result.selected.len(), 2); - assert_eq!(result.selected_amount(), 200_000); - assert_eq!(result.fee_amount, 136) - } - #[test] fn test_oldest_first_coin_selection_use_all() { - let mut database = MemoryDatabase::default(); - let utxos = setup_database_and_get_oldest_first_test_utxos(&mut database); + let utxos = get_oldest_first_test_utxos(); let drain_script = Script::default(); let target_amount = 20_000 + FEE_AMOUNT; let result = OldestFirstCoinSelection::default() .coin_select( - &database, utxos, vec![], FeeRate::from_sat_per_vb(1.0), @@ -1091,14 +961,12 @@ mod test { #[test] fn test_oldest_first_coin_selection_use_only_necessary() { - let mut database = MemoryDatabase::default(); - let utxos = setup_database_and_get_oldest_first_test_utxos(&mut database); + let utxos = get_oldest_first_test_utxos(); let drain_script = Script::default(); let target_amount = 20_000 + FEE_AMOUNT; let result = OldestFirstCoinSelection::default() .coin_select( - &database, vec![], utxos, FeeRate::from_sat_per_vb(1.0), @@ -1115,14 +983,12 @@ mod test { #[test] #[should_panic(expected = "InsufficientFunds")] fn test_oldest_first_coin_selection_insufficient_funds() { - let mut database = MemoryDatabase::default(); - let utxos = setup_database_and_get_oldest_first_test_utxos(&mut database); + let utxos = get_oldest_first_test_utxos(); let drain_script = Script::default(); let target_amount = 600_000 + FEE_AMOUNT; OldestFirstCoinSelection::default() .coin_select( - &database, vec![], utxos, FeeRate::from_sat_per_vb(1.0), @@ -1135,15 +1001,13 @@ mod test { #[test] #[should_panic(expected = "InsufficientFunds")] fn test_oldest_first_coin_selection_insufficient_funds_high_fees() { - let mut database = MemoryDatabase::default(); - let utxos = setup_database_and_get_oldest_first_test_utxos(&mut database); + let utxos = get_oldest_first_test_utxos(); let target_amount: u64 = utxos.iter().map(|wu| wu.utxo.txout().value).sum::() - 50; let drain_script = Script::default(); OldestFirstCoinSelection::default() .coin_select( - &database, vec![], utxos, FeeRate::from_sat_per_vb(1000.0), @@ -1159,14 +1023,12 @@ mod test { // select three outputs let utxos = generate_same_value_utxos(100_000, 20); - let database = MemoryDatabase::default(); let drain_script = Script::default(); let target_amount = 250_000 + FEE_AMOUNT; let result = BranchAndBoundCoinSelection::default() .coin_select( - &database, vec![], utxos, FeeRate::from_sat_per_vb(1.0), @@ -1183,13 +1045,11 @@ mod test { #[test] fn test_bnb_coin_selection_required_are_enough() { let utxos = get_test_utxos(); - let database = MemoryDatabase::default(); let drain_script = Script::default(); let target_amount = 20_000 + FEE_AMOUNT; let result = BranchAndBoundCoinSelection::default() .coin_select( - &database, utxos.clone(), utxos, FeeRate::from_sat_per_vb(1.0), @@ -1206,13 +1066,11 @@ mod test { #[test] fn test_bnb_coin_selection_optional_are_enough() { let utxos = get_test_utxos(); - let database = MemoryDatabase::default(); let drain_script = Script::default(); let target_amount = 299756 + FEE_AMOUNT; let result = BranchAndBoundCoinSelection::default() .coin_select( - &database, vec![], utxos, FeeRate::from_sat_per_vb(1.0), @@ -1230,11 +1088,10 @@ mod test { #[ignore] fn test_bnb_coin_selection_required_not_enough() { let utxos = get_test_utxos(); - let database = MemoryDatabase::default(); let required = vec![utxos[0].clone()]; let mut optional = utxos[1..].to_vec(); - optional.push(utxo(500_000, 3)); + optional.push(utxo(500_000, 3, ConfirmationTime::Unconfirmed)); // Defensive assertions, for sanity and in case someone changes the test utxos vector. let amount: u64 = required.iter().map(|u| u.utxo.txout().value).sum(); @@ -1247,7 +1104,6 @@ mod test { let result = BranchAndBoundCoinSelection::default() .coin_select( - &database, required, optional, FeeRate::from_sat_per_vb(1.0), @@ -1265,13 +1121,11 @@ mod test { #[should_panic(expected = "InsufficientFunds")] fn test_bnb_coin_selection_insufficient_funds() { let utxos = get_test_utxos(); - let database = MemoryDatabase::default(); let drain_script = Script::default(); let target_amount = 500_000 + FEE_AMOUNT; BranchAndBoundCoinSelection::default() .coin_select( - &database, vec![], utxos, FeeRate::from_sat_per_vb(1.0), @@ -1285,13 +1139,11 @@ mod test { #[should_panic(expected = "InsufficientFunds")] fn test_bnb_coin_selection_insufficient_funds_high_fees() { let utxos = get_test_utxos(); - let database = MemoryDatabase::default(); let drain_script = Script::default(); let target_amount = 250_000 + FEE_AMOUNT; BranchAndBoundCoinSelection::default() .coin_select( - &database, vec![], utxos, FeeRate::from_sat_per_vb(1000.0), @@ -1304,13 +1156,11 @@ mod test { #[test] fn test_bnb_coin_selection_check_fee_rate() { let utxos = get_test_utxos(); - let database = MemoryDatabase::default(); let drain_script = Script::default(); let target_amount = 99932; // first utxo's effective value let result = BranchAndBoundCoinSelection::new(0) .coin_select( - &database, vec![], utxos, FeeRate::from_sat_per_vb(1.0), @@ -1330,7 +1180,6 @@ mod test { fn test_bnb_coin_selection_exact_match() { let seed = [0; 32]; let mut rng: StdRng = SeedableRng::from_seed(seed); - let database = MemoryDatabase::default(); for _i in 0..200 { let mut optional_utxos = generate_random_utxos(&mut rng, 16); @@ -1338,7 +1187,6 @@ mod test { let drain_script = Script::default(); let result = BranchAndBoundCoinSelection::new(0) .coin_select( - &database, vec![], optional_utxos, FeeRate::from_sat_per_vb(0.0), @@ -1520,11 +1368,9 @@ mod test { #[test] fn test_bnb_exclude_negative_effective_value() { let utxos = get_test_utxos(); - let database = MemoryDatabase::default(); let drain_script = Script::default(); let selection = BranchAndBoundCoinSelection::default().coin_select( - &database, vec![], utxos, FeeRate::from_sat_per_vb(10.0), @@ -1544,7 +1390,6 @@ mod test { #[test] fn test_bnb_include_negative_effective_value_when_required() { let utxos = get_test_utxos(); - let database = MemoryDatabase::default(); let drain_script = Script::default(); let (required, optional) = utxos @@ -1552,7 +1397,6 @@ mod test { .partition(|u| matches!(u, WeightedUtxo { utxo, .. } if utxo.txout().value < 1000)); let selection = BranchAndBoundCoinSelection::default().coin_select( - &database, required, optional, FeeRate::from_sat_per_vb(10.0), @@ -1572,11 +1416,9 @@ mod test { #[test] fn test_bnb_sum_of_effective_value_negative() { let utxos = get_test_utxos(); - let database = MemoryDatabase::default(); let drain_script = Script::default(); let selection = BranchAndBoundCoinSelection::default().coin_select( - &database, utxos, vec![], FeeRate::from_sat_per_vb(10_000.0), diff --git a/src/wallet/export.rs b/crates/bdk/src/wallet/export.rs similarity index 80% rename from src/wallet/export.rs rename to crates/bdk/src/wallet/export.rs index d7c68f7ed..905638449 100644 --- a/src/wallet/export.rs +++ b/crates/bdk/src/wallet/export.rs @@ -20,7 +20,6 @@ //! ``` //! # use std::str::FromStr; //! # use bitcoin::*; -//! # use bdk::database::*; //! # use bdk::wallet::export::*; //! # use bdk::*; //! let import = r#"{ @@ -30,43 +29,39 @@ //! }"#; //! //! let import = FullyNodedExport::from_str(import)?; -//! let wallet = Wallet::new( +//! let wallet = Wallet::new_no_persist( //! &import.descriptor(), //! import.change_descriptor().as_ref(), //! Network::Testnet, -//! MemoryDatabase::default(), //! )?; -//! # Ok::<_, bdk::Error>(()) +//! # Ok::<_, Box>(()) //! ``` //! //! ### Export a `Wallet` //! ``` //! # use bitcoin::*; -//! # use bdk::database::*; //! # use bdk::wallet::export::*; //! # use bdk::*; -//! let wallet = Wallet::new( +//! let wallet = Wallet::new_no_persist( //! "wpkh([c258d2e4/84h/1h/0h]tpubDD3ynpHgJQW8VvWRzQ5WFDCrs4jqVFGHB3vLC3r49XHJSqP8bHKdK4AriuUKLccK68zfzowx7YhmDN8SiSkgCDENUFx9qVw65YyqM78vyVe/0/*)", //! Some("wpkh([c258d2e4/84h/1h/0h]tpubDD3ynpHgJQW8VvWRzQ5WFDCrs4jqVFGHB3vLC3r49XHJSqP8bHKdK4AriuUKLccK68zfzowx7YhmDN8SiSkgCDENUFx9qVw65YyqM78vyVe/1/*)"), //! Network::Testnet, -//! MemoryDatabase::default() //! )?; -//! let export = FullyNodedExport::export_wallet(&wallet, "exported wallet", true) -//! .map_err(ToString::to_string) -//! .map_err(bdk::Error::Generic)?; +//! let export = FullyNodedExport::export_wallet(&wallet, "exported wallet", true).unwrap(); //! //! println!("Exported: {}", export.to_string()); -//! # Ok::<_, bdk::Error>(()) +//! # Ok::<_, Box>(()) //! ``` -use std::str::FromStr; +use core::str::FromStr; +use alloc::string::{String, ToString}; +use bdk_chain::sparse_chain::ChainPosition; use serde::{Deserialize, Serialize}; use miniscript::descriptor::{ShInner, WshInner}; use miniscript::{Descriptor, ScriptContext, Terminal}; -use crate::database::BatchDatabase; use crate::types::KeychainKind; use crate::wallet::Wallet; @@ -116,7 +111,7 @@ impl FullyNodedExport { /// /// If the database is empty or `include_blockheight` is false, the `blockheight` field /// returned will be `0`. - pub fn export_wallet( + pub fn export_wallet( wallet: &Wallet, label: &str, include_blockheight: bool, @@ -131,14 +126,14 @@ impl FullyNodedExport { let descriptor = remove_checksum(descriptor); Self::is_compatible_with_core(&descriptor)?; - let blockheight = match wallet.database.borrow().iter_txs(false) { - _ if !include_blockheight => 0, - Err(_) => 0, - Ok(txs) => txs - .into_iter() - .filter_map(|tx| tx.confirmation_time.map(|c| c.height)) - .min() - .unwrap_or(0), + let blockheight = if include_blockheight { + wallet + .transactions() + .next() + .and_then(|(pos, _)| pos.height().into()) + .unwrap_or(0) + } else { + 0 }; let export = FullyNodedExport { @@ -147,11 +142,7 @@ impl FullyNodedExport { blockheight, }; - let change_descriptor = match wallet - .public_descriptor(KeychainKind::Internal) - .map_err(|_| "Invalid change descriptor")? - .is_some() - { + let change_descriptor = match wallet.public_descriptor(KeychainKind::Internal).is_some() { false => None, true => { let descriptor = wallet @@ -221,52 +212,43 @@ impl FullyNodedExport { #[cfg(test)] mod test { - use std::str::FromStr; + use core::str::FromStr; - use bitcoin::{Network, Txid}; + use bdk_chain::{BlockId, ConfirmationTime}; + use bitcoin::hashes::Hash; + use bitcoin::{BlockHash, Network, Transaction}; use super::*; - use crate::database::{memory::MemoryDatabase, BatchOperations}; - use crate::types::TransactionDetails; use crate::wallet::Wallet; - use crate::BlockTime; - - fn get_test_db() -> MemoryDatabase { - let mut db = MemoryDatabase::new(); - db.set_tx(&TransactionDetails { - transaction: None, - txid: Txid::from_str( - "4ddff1fa33af17f377f62b72357b43107c19110a8009b36fb832af505efed98a", - ) - .unwrap(), - received: 100_000, - sent: 0, - fee: Some(500), - confirmation_time: Some(BlockTime { - timestamp: 12345678, + fn get_test_wallet( + descriptor: &str, + change_descriptor: Option<&str>, + network: Network, + ) -> Wallet<()> { + let mut wallet = Wallet::new_no_persist(descriptor, change_descriptor, network).unwrap(); + let transaction = Transaction { + input: vec![], + output: vec![], + version: 0, + lock_time: bitcoin::PackedLockTime::ZERO, + }; + wallet + .insert_checkpoint(BlockId { height: 5001, - }), - }) - .unwrap(); - - db.set_tx(&TransactionDetails { - transaction: None, - txid: Txid::from_str( - "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + hash: BlockHash::all_zeros(), + }) + .unwrap(); + wallet + .insert_tx( + transaction, + ConfirmationTime::Confirmed { + height: 5000, + time: 0, + }, ) - .unwrap(), - received: 25_000, - sent: 0, - fee: Some(300), - confirmation_time: Some(BlockTime { - timestamp: 12345677, - height: 5000, - }), - }) - .unwrap(); - - db + .unwrap(); + wallet } #[test] @@ -274,13 +256,7 @@ mod test { let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)"; let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/1/*)"; - let wallet = Wallet::new( - descriptor, - Some(change_descriptor), - Network::Bitcoin, - get_test_db(), - ) - .unwrap(); + let wallet = get_test_wallet(descriptor, Some(change_descriptor), Network::Bitcoin); let export = FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap(); assert_eq!(export.descriptor(), descriptor); @@ -298,7 +274,7 @@ mod test { let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)"; - let wallet = Wallet::new(descriptor, None, Network::Bitcoin, get_test_db()).unwrap(); + let wallet = get_test_wallet(descriptor, None, Network::Bitcoin); FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap(); } @@ -311,13 +287,7 @@ mod test { let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)"; let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/50'/0'/1/*)"; - let wallet = Wallet::new( - descriptor, - Some(change_descriptor), - Network::Bitcoin, - get_test_db(), - ) - .unwrap(); + let wallet = get_test_wallet(descriptor, Some(change_descriptor), Network::Bitcoin); FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap(); } @@ -334,13 +304,7 @@ mod test { [c98b1535/48'/0'/0'/2']tpubDCDi5W4sP6zSnzJeowy8rQDVhBdRARaPhK1axABi8V1661wEPeanpEXj4ZLAUEoikVtoWcyK26TKKJSecSfeKxwHCcRrge9k1ybuiL71z4a/1/*\ ))"; - let wallet = Wallet::new( - descriptor, - Some(change_descriptor), - Network::Testnet, - get_test_db(), - ) - .unwrap(); + let wallet = get_test_wallet(descriptor, Some(change_descriptor), Network::Testnet); let export = FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap(); assert_eq!(export.descriptor(), descriptor); @@ -354,13 +318,7 @@ mod test { let descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/0/*)"; let change_descriptor = "wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44'/0'/0'/1/*)"; - let wallet = Wallet::new( - descriptor, - Some(change_descriptor), - Network::Bitcoin, - get_test_db(), - ) - .unwrap(); + let wallet = get_test_wallet(descriptor, Some(change_descriptor), Network::Bitcoin); let export = FullyNodedExport::export_wallet(&wallet, "Test Label", true).unwrap(); assert_eq!(export.to_string(), "{\"descriptor\":\"wpkh(xprv9s21ZrQH143K4CTb63EaMxja1YiTnSEWKMbn23uoEnAzxjdUJRQkazCAtzxGm4LSoTSVTptoV9RbchnKPW9HxKtZumdyxyikZFDLhogJ5Uj/44\'/0\'/0\'/0/*)\",\"blockheight\":5000,\"label\":\"Test Label\"}"); diff --git a/src/wallet/hardwaresigner.rs b/crates/bdk/src/wallet/hardwaresigner.rs similarity index 93% rename from src/wallet/hardwaresigner.rs rename to crates/bdk/src/wallet/hardwaresigner.rs index 230cd4cf0..dce1da8b8 100644 --- a/src/wallet/hardwaresigner.rs +++ b/crates/bdk/src/wallet/hardwaresigner.rs @@ -15,11 +15,10 @@ //! used with hardware wallets. //! ```no_run //! # use bdk::bitcoin::Network; -//! # use bdk::database::MemoryDatabase; //! # use bdk::signer::SignerOrdering; //! # use bdk::wallet::hardwaresigner::HWISigner; //! # use bdk::wallet::AddressIndex::New; -//! # use bdk::{FeeRate, KeychainKind, SignOptions, SyncOptions, Wallet}; +//! # use bdk::{FeeRate, KeychainKind, SignOptions, Wallet}; //! # use hwi::{types::HWIChain, HWIClient}; //! # use std::sync::Arc; //! # @@ -31,11 +30,10 @@ //! let first_device = devices.remove(0)?; //! let custom_signer = HWISigner::from_device(&first_device, HWIChain::Test)?; //! -//! # let mut wallet = Wallet::new( +//! # let mut wallet = Wallet::new_no_persist( //! # "", //! # None, //! # Network::Testnet, -//! # MemoryDatabase::default(), //! # )?; //! # //! // Adding the hardware signer to the BDK wallet diff --git a/crates/bdk/src/wallet/mod.rs b/crates/bdk/src/wallet/mod.rs new file mode 100644 index 000000000..67032cd3c --- /dev/null +++ b/crates/bdk/src/wallet/mod.rs @@ -0,0 +1,1803 @@ +// Bitcoin Dev Kit +// Written in 2020 by Alekos Filini +// +// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers +// +// This file is licensed under the Apache License, Version 2.0 or the MIT license +// , at your option. +// You may not use this file except in accordance with one or both of these +// licenses. + +//! Wallet +//! +//! This module defines the [`Wallet`] structure. +use crate::collections::{BTreeMap, HashMap, HashSet}; +use alloc::{ + boxed::Box, + string::{String, ToString}, + sync::Arc, + vec::Vec, +}; +pub use bdk_chain::keychain::Balance; +use bdk_chain::{ + chain_graph, + keychain::{persist, KeychainChangeSet, KeychainScan, KeychainTracker}, + sparse_chain, BlockId, ConfirmationTime, +}; +use bitcoin::consensus::encode::serialize; +use bitcoin::secp256k1::Secp256k1; +use bitcoin::util::psbt; +use bitcoin::{ + Address, BlockHash, EcdsaSighashType, LockTime, Network, OutPoint, SchnorrSighashType, Script, + Sequence, Transaction, TxOut, Txid, Witness, +}; +use core::fmt; +use core::ops::Deref; +use miniscript::psbt::{PsbtExt, PsbtInputExt, PsbtInputSatisfier}; + +#[allow(unused_imports)] +use log::{debug, error, info, trace}; + +pub mod coin_selection; +pub mod export; +pub mod signer; +pub mod tx_builder; +pub(crate) mod utils; + +#[cfg(feature = "hardware-signer")] +#[cfg_attr(docsrs, doc(cfg(feature = "hardware-signer")))] +pub mod hardwaresigner; + +pub use utils::IsDust; + +#[allow(deprecated)] +use coin_selection::DefaultCoinSelectionAlgorithm; +use signer::{SignOptions, SignerOrdering, SignersContainer, TransactionSigner}; +use tx_builder::{BumpFee, CreateTx, FeePolicy, TxBuilder, TxParams}; +use utils::{check_nsequence_rbf, After, Older, SecpCtx}; + +use crate::descriptor::policy::BuildSatisfaction; +use crate::descriptor::{ + calc_checksum, into_wallet_descriptor_checked, DerivedDescriptor, DescriptorMeta, + ExtendedDescriptor, ExtractPolicy, IntoWalletDescriptor, Policy, XKeyUtils, +}; +use crate::error::{Error, MiniscriptPsbtError}; +use crate::psbt::PsbtUtils; +use crate::signer::SignerError; +use crate::types::*; +use crate::wallet::coin_selection::Excess::{Change, NoChange}; + +const COINBASE_MATURITY: u32 = 100; + +/// A Bitcoin wallet +/// +/// The `Wallet` struct acts as a way of coherently interfacing with output descriptors and related transactions. +/// Its main components are: +/// +/// 1. output *descriptors* from which it can derive addresses. +/// 2. [`signer`]s that can contribute signatures to addresses instantiated from the descriptors. +/// +/// [`signer`]: crate::signer +#[derive(Debug)] +pub struct Wallet { + signers: Arc, + change_signers: Arc, + keychain_tracker: KeychainTracker, + persist: persist::Persist, + network: Network, + secp: SecpCtx, +} + +/// The update to a [`Wallet`] used in [`Wallet::apply_update`]. This is usually returned from blockchain data sources. +/// The type parameter `T` indicates the kind of transaction contained in the update. It's usually a [`bitcoin::Transaction`]. +pub type Update = KeychainScan; +/// Error indicating that something was wrong with an [`Update`]. +pub type UpdateError = chain_graph::UpdateError; +/// The changeset produced internally by applying an update +pub(crate) type ChangeSet = KeychainChangeSet; + +/// The address index selection strategy to use to derived an address from the wallet's external +/// descriptor. See [`Wallet::get_address`]. If you're unsure which one to use use `WalletIndex::New`. +#[derive(Debug)] +pub enum AddressIndex { + /// Return a new address after incrementing the current descriptor index. + New, + /// Return the address for the current descriptor index if it has not been used in a received + /// transaction. Otherwise return a new address as with [`AddressIndex::New`]. + /// + /// Use with caution, if the wallet has not yet detected an address has been used it could + /// return an already used address. This function is primarily meant for situations where the + /// caller is untrusted; for example when deriving donation addresses on-demand for a public + /// web page. + LastUnused, + /// Return the address for a specific descriptor index. Does not change the current descriptor + /// index used by `AddressIndex::New` and `AddressIndex::LastUsed`. + /// + /// Use with caution, if an index is given that is less than the current descriptor index + /// then the returned address may have already been used. + Peek(u32), +} + +/// A derived address and the index it was found at. +/// For convenience this automatically derefs to `Address` +#[derive(Debug, PartialEq, Eq)] +pub struct AddressInfo { + /// Child index of this address + pub index: u32, + /// Address + pub address: Address, + /// Type of keychain + pub keychain: KeychainKind, +} + +impl Deref for AddressInfo { + type Target = Address; + + fn deref(&self) -> &Self::Target { + &self.address + } +} + +impl fmt::Display for AddressInfo { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.address) + } +} + +impl Wallet { + /// Creates a wallet that does not persist data. + pub fn new_no_persist( + descriptor: E, + change_descriptor: Option, + network: Network, + ) -> Result { + Self::new(descriptor, change_descriptor, (), network).map_err(|e| match e { + NewError::Descriptor(e) => e, + NewError::Persist(_) => unreachable!("no persistence so it can't fail"), + }) + } +} + +#[derive(Debug)] +/// Error returned from [`Wallet::new`] +pub enum NewError

{ + /// There was problem with the descriptors passed in + Descriptor(crate::descriptor::DescriptorError), + /// We were unable to load the wallet's data from the persistance backend + Persist(P), +} + +impl

core::fmt::Display for NewError

+where + P: core::fmt::Display, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + NewError::Descriptor(e) => e.fmt(f), + NewError::Persist(e) => { + write!(f, "failed to load wallet from persistance backend: {}", e) + } + } + } +} + +#[cfg(feature = "std")] +impl std::error::Error for NewError

{} + +impl Wallet { + /// Create a wallet from a `descriptor` (and an optional `change_descriptor`) and load related + /// transaction data from `db`. + pub fn new( + descriptor: E, + change_descriptor: Option, + mut db: D, + network: Network, + ) -> Result> + where + D: persist::PersistBackend, + { + let secp = Secp256k1::new(); + + let mut keychain_tracker = KeychainTracker::default(); + let (descriptor, keymap) = into_wallet_descriptor_checked(descriptor, &secp, network) + .map_err(NewError::Descriptor)?; + keychain_tracker + .txout_index + .add_keychain(KeychainKind::External, descriptor.clone()); + let signers = Arc::new(SignersContainer::build(keymap, &descriptor, &secp)); + let change_signers = match change_descriptor { + Some(desc) => { + let (change_descriptor, change_keymap) = + into_wallet_descriptor_checked(desc, &secp, network) + .map_err(NewError::Descriptor)?; + + let change_signers = Arc::new(SignersContainer::build( + change_keymap, + &change_descriptor, + &secp, + )); + + keychain_tracker + .txout_index + .add_keychain(KeychainKind::Internal, change_descriptor); + + change_signers + } + None => Arc::new(SignersContainer::new()), + }; + + db.load_into_keychain_tracker(&mut keychain_tracker) + .map_err(NewError::Persist)?; + + let persist = persist::Persist::new(db); + + Ok(Wallet { + signers, + change_signers, + network, + persist, + secp, + keychain_tracker, + }) + } + + /// Get the Bitcoin network the wallet is using. + pub fn network(&self) -> Network { + self.network + } + + /// Iterator over all keychains in this wallet + pub fn keychanins(&self) -> &BTreeMap { + self.keychain_tracker.txout_index.keychains() + } + + /// Return a derived address using the external descriptor, see [`AddressIndex`] for + /// available address index selection strategies. If none of the keys in the descriptor are derivable + /// (i.e. does not end with /*) then the same address will always be returned for any [`AddressIndex`]. + pub fn get_address(&mut self, address_index: AddressIndex) -> AddressInfo + where + D: persist::PersistBackend, + { + self._get_address(address_index, KeychainKind::External) + } + + /// Return a derived address using the internal (change) descriptor. + /// + /// If the wallet doesn't have an internal descriptor it will use the external descriptor. + /// + /// see [`AddressIndex`] for available address index selection strategies. If none of the keys + /// in the descriptor are derivable (i.e. does not end with /*) then the same address will always + /// be returned for any [`AddressIndex`]. + pub fn get_internal_address(&mut self, address_index: AddressIndex) -> AddressInfo + where + D: persist::PersistBackend, + { + self._get_address(address_index, KeychainKind::Internal) + } + + fn _get_address(&mut self, address_index: AddressIndex, keychain: KeychainKind) -> AddressInfo + where + D: persist::PersistBackend, + { + let keychain = self.map_keychain(keychain); + let txout_index = &mut self.keychain_tracker.txout_index; + let (index, spk) = match address_index { + AddressIndex::New => { + let ((index, spk), changeset) = txout_index.reveal_next_spk(&keychain); + let spk = spk.clone(); + + self.persist.stage(changeset.into()); + self.persist.commit().expect("TODO"); + (index, spk) + } + AddressIndex::LastUnused => { + let index = txout_index.last_revealed_index(&keychain); + match index { + Some(index) if !txout_index.is_used(&(keychain, index)) => ( + index, + txout_index + .spk_at_index(&(keychain, index)) + .expect("must exist") + .clone(), + ), + _ => return self._get_address(AddressIndex::New, keychain), + } + } + AddressIndex::Peek(index) => txout_index + .spks_of_keychain(&keychain) + .take(index as usize + 1) + .last() + .unwrap(), + }; + AddressInfo { + index, + address: Address::from_script(&spk, self.network) + .expect("descriptor must have address form"), + keychain, + } + } + + /// Return whether or not a `script` is part of this wallet (either internal or external) + pub fn is_mine(&self, script: &Script) -> bool { + self.keychain_tracker + .txout_index + .index_of_spk(script) + .is_some() + } + + /// Finds how the wallet derived the script pubkey `spk`. + /// + /// Will only return `Some(_)` if the wallet has given out the spk. + pub fn derivation_of_spk(&self, spk: &Script) -> Option<(KeychainKind, u32)> { + self.keychain_tracker.txout_index.index_of_spk(spk).copied() + } + + /// Return the list of unspent outputs of this wallet + pub fn list_unspent(&self) -> Vec { + self.keychain_tracker + .full_utxos() + .map(|(&(keychain, derivation_index), utxo)| LocalUtxo { + outpoint: utxo.outpoint, + txout: utxo.txout, + keychain, + is_spent: false, + derivation_index, + confirmation_time: utxo.chain_position, + }) + .collect() + } + + /// Get all the checkpoints the wallet is currently storing indexed by height. + pub fn checkpoints(&self) -> &BTreeMap { + self.keychain_tracker.chain().checkpoints() + } + + /// Returns the latest checkpoint. + pub fn latest_checkpoint(&self) -> Option { + self.keychain_tracker.chain().latest_checkpoint() + } + + /// Returns a iterators of all the script pubkeys for the `Internal` and External` variants in `KeychainKind`. + /// + /// This is inteded to be used when doing a full scan of your addresses (e.g. after restoring + /// from seed words). You pass the `BTreeMap` of iterators to a blockchain data source (e.g. + /// electrum server) which will go through each address until it reaches a *stop grap*. + /// + /// Note carefully that iterators go over **all** script pubkeys on the keychains (not what + /// script pubkeys the wallet is storing internally). + pub fn spks_of_all_keychains( + &self, + ) -> BTreeMap + Clone> { + self.keychain_tracker.txout_index.spks_of_all_keychains() + } + + /// Gets an iterator over all the script pubkeys in a single keychain. + /// + /// See [`spks_of_all_keychains`] for more documentation + /// + /// [`spks_of_all_keychains`]: Self::spks_of_all_keychains + pub fn spks_of_keychain( + &self, + keychain: KeychainKind, + ) -> impl Iterator + Clone { + self.keychain_tracker + .txout_index + .spks_of_keychain(&keychain) + } + + /// Returns the utxo owned by this wallet corresponding to `outpoint` if it exists in the + /// wallet's database. + pub fn get_utxo(&self, op: OutPoint) -> Option { + self.keychain_tracker + .full_utxos() + .find_map(|(&(keychain, derivation_index), txo)| { + if op == txo.outpoint { + Some(LocalUtxo { + outpoint: txo.outpoint, + txout: txo.txout, + keychain, + is_spent: txo.spent_by.is_none(), + derivation_index, + confirmation_time: txo.chain_position, + }) + } else { + None + } + }) + } + + /// Return a single transactions made and received by the wallet + /// + /// Optionally fill the [`TransactionDetails::transaction`] field with the raw transaction if + /// `include_raw` is `true`. + pub fn get_tx(&self, txid: Txid, include_raw: bool) -> Option { + let (&confirmation_time, tx) = self.keychain_tracker.chain_graph().get_tx_in_chain(txid)?; + let graph = self.keychain_tracker.graph(); + let txout_index = &self.keychain_tracker.txout_index; + + let received = tx + .output + .iter() + .map(|txout| { + if txout_index.index_of_spk(&txout.script_pubkey).is_some() { + txout.value + } else { + 0 + } + }) + .sum(); + + let sent = tx + .input + .iter() + .map(|txin| { + if let Some((_, txout)) = txout_index.txout(txin.previous_output) { + txout.value + } else { + 0 + } + }) + .sum(); + + let inputs = tx + .input + .iter() + .map(|txin| { + graph + .get_txout(txin.previous_output) + .map(|txout| txout.value) + }) + .sum::>(); + let outputs = tx.output.iter().map(|txout| txout.value).sum(); + let fee = inputs.map(|inputs| inputs.saturating_sub(outputs)); + + Some(TransactionDetails { + transaction: if include_raw { Some(tx.clone()) } else { None }, + txid, + received, + sent, + fee, + confirmation_time, + }) + } + + /// Add a new checkpoint to the wallet's internal view of the chain. + /// This stages but does not [`commit`] the change. + /// + /// Returns whether anything changed with the insertion (e.g. `false` if checkpoint was already + /// there). + /// + /// [`commit`]: Self::commit + pub fn insert_checkpoint( + &mut self, + block_id: BlockId, + ) -> Result { + let changeset = self.keychain_tracker.insert_checkpoint(block_id)?; + let changed = changeset.is_empty(); + self.persist.stage(changeset); + Ok(changed) + } + + /// Add a transaction to the wallet's internal view of the chain. + /// This stages but does not [`commit`] the change. + /// + /// There are a number reasons `tx` could be rejected with an `Err(_)`. The most important one + /// is that the transaction is at a height that is greater than [`latest_checkpoint`]. Therefore + /// you should use [`insert_checkpoint`] to insert new checkpoints before manually inserting new + /// transactions. + /// + /// Returns whether anything changed with the transaction insertion (e.g. `false` if the + /// transaction was already inserted at the same position). + /// + /// [`commit`]: Self::commit + /// [`latest_checkpoint`]: Self::latest_checkpoint + /// [`insert_checkpoint`]: Self::insert_checkpoint + pub fn insert_tx( + &mut self, + tx: Transaction, + position: ConfirmationTime, + ) -> Result> { + let changeset = self.keychain_tracker.insert_tx(tx, position)?; + let changed = changeset.is_empty(); + self.persist.stage(changeset); + Ok(changed) + } + + #[deprecated(note = "use Wallet::transactions instead")] + /// Deprecated. use `Wallet::transactions` instead. + pub fn list_transactions(&self, include_raw: bool) -> Vec { + self.keychain_tracker + .chain() + .txids() + .map(|&(_, txid)| self.get_tx(txid, include_raw).expect("must exist")) + .collect() + } + + /// Iterate over the transactions in the wallet in order of ascending confirmation time with + /// unconfirmed transactions last. + pub fn transactions( + &self, + ) -> impl DoubleEndedIterator + '_ { + self.keychain_tracker + .chain_graph() + .transactions_in_chain() + .map(|(pos, tx)| (*pos, tx)) + } + + /// Return the balance, separated into available, trusted-pending, untrusted-pending and immature + /// values. + pub fn get_balance(&self) -> Balance { + self.keychain_tracker.balance(|keychain| match keychain { + KeychainKind::External => false, + KeychainKind::Internal => true, + }) + } + + /// Add an external signer + /// + /// See [the `signer` module](signer) for an example. + pub fn add_signer( + &mut self, + keychain: KeychainKind, + ordering: SignerOrdering, + signer: Arc, + ) { + let signers = match keychain { + KeychainKind::External => Arc::make_mut(&mut self.signers), + KeychainKind::Internal => Arc::make_mut(&mut self.change_signers), + }; + + signers.add_external(signer.id(&self.secp), ordering, signer); + } + + /// Get the signers + /// + /// ## Example + /// + /// ``` + /// # use bdk::{Wallet, KeychainKind}; + /// # use bdk::bitcoin::Network; + /// let wallet = Wallet::new_no_persist("wpkh(tprv8ZgxMBicQKsPe73PBRSmNbTfbcsZnwWhz5eVmhHpi31HW29Z7mc9B4cWGRQzopNUzZUT391DeDJxL2PefNunWyLgqCKRMDkU1s2s8bAfoSk/84'/0'/0'/0/*)", None, Network::Testnet)?; + /// for secret_key in wallet.get_signers(KeychainKind::External).signers().iter().filter_map(|s| s.descriptor_secret_key()) { + /// // secret_key: tprv8ZgxMBicQKsPe73PBRSmNbTfbcsZnwWhz5eVmhHpi31HW29Z7mc9B4cWGRQzopNUzZUT391DeDJxL2PefNunWyLgqCKRMDkU1s2s8bAfoSk/84'/0'/0'/0/* + /// println!("secret_key: {}", secret_key); + /// } + /// + /// Ok::<(), Box>(()) + /// ``` + pub fn get_signers(&self, keychain: KeychainKind) -> Arc { + match keychain { + KeychainKind::External => Arc::clone(&self.signers), + KeychainKind::Internal => Arc::clone(&self.change_signers), + } + } + + /// Start building a transaction. + /// + /// This returns a blank [`TxBuilder`] from which you can specify the parameters for the transaction. + /// + /// ## Example + /// + /// ``` + /// # use std::str::FromStr; + /// # use bitcoin::*; + /// # use bdk::*; + /// # let descriptor = "wpkh(tpubD6NzVbkrYhZ4Xferm7Pz4VnjdcDPFyjVu5K4iZXQ4pVN8Cks4pHVowTBXBKRhX64pkRyJZJN5xAKj4UDNnLPb5p2sSKXhewoYx5GbTdUFWq/*)"; + /// # let mut wallet = doctest_wallet!(); + /// # let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap(); + /// let (psbt, details) = { + /// let mut builder = wallet.build_tx(); + /// builder + /// .add_recipient(to_address.script_pubkey(), 50_000); + /// builder.finish()? + /// }; + /// + /// // sign and broadcast ... + /// # Ok::<(), bdk::Error>(()) + /// ``` + /// + /// [`TxBuilder`]: crate::TxBuilder + pub fn build_tx(&mut self) -> TxBuilder<'_, D, DefaultCoinSelectionAlgorithm, CreateTx> { + TxBuilder { + wallet: alloc::rc::Rc::new(core::cell::RefCell::new(self)), + params: TxParams::default(), + coin_selection: DefaultCoinSelectionAlgorithm::default(), + phantom: core::marker::PhantomData, + } + } + + pub(crate) fn create_tx( + &mut self, + coin_selection: Cs, + params: TxParams, + ) -> Result<(psbt::PartiallySignedTransaction, TransactionDetails), Error> + where + D: persist::PersistBackend, + { + let external_descriptor = self + .keychain_tracker + .txout_index + .keychains() + .get(&KeychainKind::External) + .expect("must exist"); + let internal_descriptor = self + .keychain_tracker + .txout_index + .keychains() + .get(&KeychainKind::Internal); + + let external_policy = external_descriptor + .extract_policy(&self.signers, BuildSatisfaction::None, &self.secp)? + .unwrap(); + let internal_policy = internal_descriptor + .as_ref() + .map(|desc| { + Ok::<_, Error>( + desc.extract_policy(&self.change_signers, BuildSatisfaction::None, &self.secp)? + .unwrap(), + ) + }) + .transpose()?; + + // The policy allows spending external outputs, but it requires a policy path that hasn't been + // provided + if params.change_policy != tx_builder::ChangeSpendPolicy::OnlyChange + && external_policy.requires_path() + && params.external_policy_path.is_none() + { + return Err(Error::SpendingPolicyRequired(KeychainKind::External)); + }; + // Same for the internal_policy path, if present + if let Some(internal_policy) = &internal_policy { + if params.change_policy != tx_builder::ChangeSpendPolicy::ChangeForbidden + && internal_policy.requires_path() + && params.internal_policy_path.is_none() + { + return Err(Error::SpendingPolicyRequired(KeychainKind::Internal)); + }; + } + + let external_requirements = external_policy.get_condition( + params + .external_policy_path + .as_ref() + .unwrap_or(&BTreeMap::new()), + )?; + let internal_requirements = internal_policy + .map(|policy| { + Ok::<_, Error>( + policy.get_condition( + params + .internal_policy_path + .as_ref() + .unwrap_or(&BTreeMap::new()), + )?, + ) + }) + .transpose()?; + + let requirements = + external_requirements.merge(&internal_requirements.unwrap_or_default())?; + debug!("Policy requirements: {:?}", requirements); + + let version = match params.version { + Some(tx_builder::Version(0)) => { + return Err(Error::Generic("Invalid version `0`".into())) + } + Some(tx_builder::Version(1)) if requirements.csv.is_some() => { + return Err(Error::Generic( + "TxBuilder requested version `1`, but at least `2` is needed to use OP_CSV" + .into(), + )) + } + Some(tx_builder::Version(x)) => x, + None if requirements.csv.is_some() => 2, + _ => 1, + }; + + // We use a match here instead of a map_or_else as it's way more readable :) + let current_height = match params.current_height { + // If they didn't tell us the current height, we assume it's the latest sync height. + None => self + .keychain_tracker + .chain() + .latest_checkpoint() + .and_then(|cp| cp.height.into()) + .map(|height| LockTime::from_height(height).expect("Invalid height")), + h => h, + }; + + let lock_time = match params.locktime { + // When no nLockTime is specified, we try to prevent fee sniping, if possible + None => { + // Fee sniping can be partially prevented by setting the timelock + // to current_height. If we don't know the current_height, + // we default to 0. + let fee_sniping_height = current_height.unwrap_or(LockTime::ZERO); + + // We choose the biggest between the required nlocktime and the fee sniping + // height + match requirements.timelock { + // No requirement, just use the fee_sniping_height + None => fee_sniping_height, + // There's a block-based requirement, but the value is lower than the fee_sniping_height + Some(value @ LockTime::Blocks(_)) if value < fee_sniping_height => fee_sniping_height, + // There's a time-based requirement or a block-based requirement greater + // than the fee_sniping_height use that value + Some(value) => value, + } + } + // Specific nLockTime required and we have no constraints, so just set to that value + Some(x) if requirements.timelock.is_none() => x, + // Specific nLockTime required and it's compatible with the constraints + Some(x) if requirements.timelock.unwrap().is_same_unit(x) && x >= requirements.timelock.unwrap() => x, + // Invalid nLockTime required + Some(x) => return Err(Error::Generic(format!("TxBuilder requested timelock of `{:?}`, but at least `{:?}` is required to spend from this script", x, requirements.timelock.unwrap()))) + }; + + let n_sequence = match (params.rbf, requirements.csv) { + // No RBF or CSV but there's an nLockTime, so the nSequence cannot be final + (None, None) if lock_time != LockTime::ZERO => Sequence::ENABLE_LOCKTIME_NO_RBF, + // No RBF, CSV or nLockTime, make the transaction final + (None, None) => Sequence::MAX, + + // No RBF requested, use the value from CSV. Note that this value is by definition + // non-final, so even if a timelock is enabled this nSequence is fine, hence why we + // don't bother checking for it here. The same is true for all the other branches below + (None, Some(csv)) => csv, + + // RBF with a specific value but that value is too high + (Some(tx_builder::RbfValue::Value(rbf)), _) if !rbf.is_rbf() => { + return Err(Error::Generic( + "Cannot enable RBF with a nSequence >= 0xFFFFFFFE".into(), + )) + } + // RBF with a specific value requested, but the value is incompatible with CSV + (Some(tx_builder::RbfValue::Value(rbf)), Some(csv)) + if !check_nsequence_rbf(rbf, csv) => + { + return Err(Error::Generic(format!( + "Cannot enable RBF with nSequence `{:?}` given a required OP_CSV of `{:?}`", + rbf, csv + ))) + } + + // RBF enabled with the default value with CSV also enabled. CSV takes precedence + (Some(tx_builder::RbfValue::Default), Some(csv)) => csv, + // Valid RBF, either default or with a specific value. We ignore the `CSV` value + // because we've already checked it before + (Some(rbf), _) => rbf.get_value(), + }; + + let (fee_rate, mut fee_amount) = match params + .fee_policy + .as_ref() + .unwrap_or(&FeePolicy::FeeRate(FeeRate::default())) + { + //FIXME: see https://github.com/bitcoindevkit/bdk/issues/256 + FeePolicy::FeeAmount(fee) => { + if let Some(previous_fee) = params.bumping_fee { + if *fee < previous_fee.absolute { + return Err(Error::FeeTooLow { + required: previous_fee.absolute, + }); + } + } + (FeeRate::from_sat_per_vb(0.0), *fee) + } + FeePolicy::FeeRate(rate) => { + if let Some(previous_fee) = params.bumping_fee { + let required_feerate = FeeRate::from_sat_per_vb(previous_fee.rate + 1.0); + if *rate < required_feerate { + return Err(Error::FeeRateTooLow { + required: required_feerate, + }); + } + } + (*rate, 0) + } + }; + + let mut tx = Transaction { + version, + lock_time: lock_time.into(), + input: vec![], + output: vec![], + }; + + if params.manually_selected_only && params.utxos.is_empty() { + return Err(Error::NoUtxosSelected); + } + + // we keep it as a float while we accumulate it, and only round it at the end + let mut outgoing: u64 = 0; + let mut received: u64 = 0; + + let recipients = params.recipients.iter().map(|(r, v)| (r, *v)); + + for (index, (script_pubkey, value)) in recipients.enumerate() { + if !params.allow_dust + && value.is_dust(script_pubkey) + && !script_pubkey.is_provably_unspendable() + { + return Err(Error::OutputBelowDustLimit(index)); + } + + if self.is_mine(script_pubkey) { + received += value; + } + + let new_out = TxOut { + script_pubkey: script_pubkey.clone(), + value, + }; + + tx.output.push(new_out); + + outgoing += value; + } + + fee_amount += fee_rate.fee_wu(tx.weight()); + + // Segwit transactions' header is 2WU larger than legacy txs' header, + // as they contain a witness marker (1WU) and a witness flag (1WU) (see BIP144). + // At this point we really don't know if the resulting transaction will be segwit + // or legacy, so we just add this 2WU to the fee_amount - overshooting the fee amount + // is better than undershooting it. + // If we pass a fee_amount that is slightly higher than the final fee_amount, we + // end up with a transaction with a slightly higher fee rate than the requested one. + // If, instead, we undershoot, we may end up with a feerate lower than the requested one + // - we might come up with non broadcastable txs! + fee_amount += fee_rate.fee_wu(2); + + if params.change_policy != tx_builder::ChangeSpendPolicy::ChangeAllowed + && internal_descriptor.is_none() + { + return Err(Error::Generic( + "The `change_policy` can be set only if the wallet has a change_descriptor".into(), + )); + } + + let (required_utxos, optional_utxos) = self.preselect_utxos( + params.change_policy, + ¶ms.unspendable, + params.utxos.clone(), + params.drain_wallet, + params.manually_selected_only, + params.bumping_fee.is_some(), // we mandate confirmed transactions if we're bumping the fee + current_height.map(LockTime::to_consensus_u32), + ); + + // get drain script + let drain_script = match params.drain_to { + Some(ref drain_recipient) => drain_recipient.clone(), + None => { + let change_keychain = self.map_keychain(KeychainKind::Internal); + let ((index, spk), changeset) = self + .keychain_tracker + .txout_index + .next_unused_spk(&change_keychain); + let spk = spk.clone(); + self.keychain_tracker + .txout_index + .mark_used(&change_keychain, index); + self.persist.stage(changeset.into()); + self.persist.commit().expect("TODO"); + spk + } + }; + + let coin_selection = coin_selection.coin_select( + required_utxos, + optional_utxos, + fee_rate, + outgoing + fee_amount, + &drain_script, + )?; + fee_amount += coin_selection.fee_amount; + let excess = &coin_selection.excess; + + tx.input = coin_selection + .selected + .iter() + .map(|u| bitcoin::TxIn { + previous_output: u.outpoint(), + script_sig: Script::default(), + sequence: n_sequence, + witness: Witness::new(), + }) + .collect(); + + if tx.output.is_empty() { + // Uh oh, our transaction has no outputs. + // We allow this when: + // - We have a drain_to address and the utxos we must spend (this happens, + // for example, when we RBF) + // - We have a drain_to address and drain_wallet set + // Otherwise, we don't know who we should send the funds to, and how much + // we should send! + if params.drain_to.is_some() && (params.drain_wallet || !params.utxos.is_empty()) { + if let NoChange { + dust_threshold, + remaining_amount, + change_fee, + } = excess + { + return Err(Error::InsufficientFunds { + needed: *dust_threshold, + available: remaining_amount.saturating_sub(*change_fee), + }); + } + } else { + return Err(Error::NoRecipients); + } + } + + match excess { + NoChange { + remaining_amount, .. + } => fee_amount += remaining_amount, + Change { amount, fee } => { + if self.is_mine(&drain_script) { + received += amount; + } + fee_amount += fee; + + // create drain output + let drain_output = TxOut { + value: *amount, + script_pubkey: drain_script, + }; + + // TODO: We should pay attention when adding a new output: this might increase + // the lenght of the "number of vouts" parameter by 2 bytes, potentially making + // our feerate too low + tx.output.push(drain_output); + } + }; + + // sort input/outputs according to the chosen algorithm + params.ordering.sort_tx(&mut tx); + + let txid = tx.txid(); + let sent = coin_selection.local_selected_amount(); + let psbt = self.complete_transaction(tx, coin_selection.selected, params)?; + + let transaction_details = TransactionDetails { + transaction: None, + txid, + confirmation_time: ConfirmationTime::Unconfirmed, + received, + sent, + fee: Some(fee_amount), + }; + + Ok((psbt, transaction_details)) + } + + /// Bump the fee of a transaction previously created with this wallet. + /// + /// Returns an error if the transaction is already confirmed or doesn't explicitly signal + /// *replace by fee* (RBF). If the transaction can be fee bumped then it returns a [`TxBuilder`] + /// pre-populated with the inputs and outputs of the original transaction. + /// + /// ## Example + /// + /// ```no_run + /// # // TODO: remove norun -- bumping fee seems to need the tx in the wallet database first. + /// # use std::str::FromStr; + /// # use bitcoin::*; + /// # use bdk::*; + /// # let descriptor = "wpkh(tpubD6NzVbkrYhZ4Xferm7Pz4VnjdcDPFyjVu5K4iZXQ4pVN8Cks4pHVowTBXBKRhX64pkRyJZJN5xAKj4UDNnLPb5p2sSKXhewoYx5GbTdUFWq/*)"; + /// # let mut wallet = doctest_wallet!(); + /// # let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap(); + /// let (mut psbt, _) = { + /// let mut builder = wallet.build_tx(); + /// builder + /// .add_recipient(to_address.script_pubkey(), 50_000) + /// .enable_rbf(); + /// builder.finish()? + /// }; + /// let _ = wallet.sign(&mut psbt, SignOptions::default())?; + /// let tx = psbt.extract_tx(); + /// // broadcast tx but it's taking too long to confirm so we want to bump the fee + /// let (mut psbt, _) = { + /// let mut builder = wallet.build_fee_bump(tx.txid())?; + /// builder + /// .fee_rate(FeeRate::from_sat_per_vb(5.0)); + /// builder.finish()? + /// }; + /// + /// let _ = wallet.sign(&mut psbt, SignOptions::default())?; + /// let fee_bumped_tx = psbt.extract_tx(); + /// // broadcast fee_bumped_tx to replace original + /// # Ok::<(), bdk::Error>(()) + /// ``` + // TODO: support for merging multiple transactions while bumping the fees + pub fn build_fee_bump( + &mut self, + txid: Txid, + ) -> Result, Error> { + let graph = self.keychain_tracker.graph(); + let txout_index = &self.keychain_tracker.txout_index; + let tx_and_height = self.keychain_tracker.chain_graph().get_tx_in_chain(txid); + let mut tx = match tx_and_height { + None => return Err(Error::TransactionNotFound), + Some((ConfirmationTime::Confirmed { .. }, _tx)) => { + return Err(Error::TransactionConfirmed) + } + Some((_, tx)) => tx.clone(), + }; + + if !tx + .input + .iter() + .any(|txin| txin.sequence.to_consensus_u32() <= 0xFFFFFFFD) + { + return Err(Error::IrreplaceableTransaction); + } + + let fee = graph.calculate_fee(&tx).ok_or(Error::FeeRateUnavailable)?; + if fee < 0 { + // It's available but it's wrong so let's say it's unavailable + return Err(Error::FeeRateUnavailable)?; + } + let fee = fee as u64; + let feerate = FeeRate::from_wu(fee, tx.weight()); + + // remove the inputs from the tx and process them + let original_txin = tx.input.drain(..).collect::>(); + let original_utxos = original_txin + .iter() + .map(|txin| -> Result<_, Error> { + let (&confirmation_time, prev_tx) = self + .keychain_tracker + .chain_graph() + .get_tx_in_chain(txin.previous_output.txid) + .ok_or(Error::UnknownUtxo)?; + let txout = &prev_tx.output[txin.previous_output.vout as usize]; + + let weighted_utxo = match txout_index.index_of_spk(&txout.script_pubkey) { + Some(&(keychain, derivation_index)) => { + let satisfaction_weight = self + .get_descriptor_for_keychain(keychain) + .max_satisfaction_weight() + .unwrap(); + WeightedUtxo { + utxo: Utxo::Local(LocalUtxo { + outpoint: txin.previous_output, + txout: txout.clone(), + keychain, + is_spent: true, + derivation_index, + confirmation_time, + }), + satisfaction_weight, + } + } + None => { + let satisfaction_weight = + serialize(&txin.script_sig).len() * 4 + serialize(&txin.witness).len(); + WeightedUtxo { + satisfaction_weight, + utxo: Utxo::Foreign { + outpoint: txin.previous_output, + psbt_input: Box::new(psbt::Input { + witness_utxo: Some(txout.clone()), + non_witness_utxo: Some(prev_tx.clone()), + ..Default::default() + }), + }, + } + } + }; + + Ok(weighted_utxo) + }) + .collect::, _>>()?; + + if tx.output.len() > 1 { + let mut change_index = None; + for (index, txout) in tx.output.iter().enumerate() { + let change_type = self.map_keychain(KeychainKind::Internal); + match txout_index.index_of_spk(&txout.script_pubkey) { + Some(&(keychain, _)) if keychain == change_type => change_index = Some(index), + _ => {} + } + } + + if let Some(change_index) = change_index { + tx.output.remove(change_index); + } + } + + let params = TxParams { + // TODO: figure out what rbf option should be? + version: Some(tx_builder::Version(tx.version)), + recipients: tx + .output + .into_iter() + .map(|txout| (txout.script_pubkey, txout.value)) + .collect(), + utxos: original_utxos, + bumping_fee: Some(tx_builder::PreviousFee { + absolute: fee, + rate: feerate.as_sat_per_vb(), + }), + ..Default::default() + }; + + Ok(TxBuilder { + wallet: alloc::rc::Rc::new(core::cell::RefCell::new(self)), + params, + coin_selection: DefaultCoinSelectionAlgorithm::default(), + phantom: core::marker::PhantomData, + }) + } + + /// Sign a transaction with all the wallet's signers, in the order specified by every signer's + /// [`SignerOrdering`]. This function returns the `Result` type with an encapsulated `bool` that has the value true if the PSBT was finalized, or false otherwise. + /// + /// The [`SignOptions`] can be used to tweak the behavior of the software signers, and the way + /// the transaction is finalized at the end. Note that it can't be guaranteed that *every* + /// signers will follow the options, but the "software signers" (WIF keys and `xprv`) defined + /// in this library will. + /// + /// ## Example + /// + /// ``` + /// # use std::str::FromStr; + /// # use bitcoin::*; + /// # use bdk::*; + /// # let descriptor = "wpkh(tpubD6NzVbkrYhZ4Xferm7Pz4VnjdcDPFyjVu5K4iZXQ4pVN8Cks4pHVowTBXBKRhX64pkRyJZJN5xAKj4UDNnLPb5p2sSKXhewoYx5GbTdUFWq/*)"; + /// # let mut wallet = doctest_wallet!(); + /// # let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap(); + /// let (mut psbt, _) = { + /// let mut builder = wallet.build_tx(); + /// builder.add_recipient(to_address.script_pubkey(), 50_000); + /// builder.finish()? + /// }; + /// let finalized = wallet.sign(&mut psbt, SignOptions::default())?; + /// assert!(finalized, "we should have signed all the inputs"); + /// # Ok::<(), bdk::Error>(()) + pub fn sign( + &self, + psbt: &mut psbt::PartiallySignedTransaction, + sign_options: SignOptions, + ) -> Result { + // This adds all the PSBT metadata for the inputs, which will help us later figure out how + // to derive our keys + self.update_psbt_with_descriptor(psbt)?; + + // If we aren't allowed to use `witness_utxo`, ensure that every input (except p2tr and finalized ones) + // has the `non_witness_utxo` + if !sign_options.trust_witness_utxo + && psbt + .inputs + .iter() + .filter(|i| i.final_script_witness.is_none() && i.final_script_sig.is_none()) + .filter(|i| i.tap_internal_key.is_none() && i.tap_merkle_root.is_none()) + .any(|i| i.non_witness_utxo.is_none()) + { + return Err(Error::Signer(signer::SignerError::MissingNonWitnessUtxo)); + } + + // If the user hasn't explicitly opted-in, refuse to sign the transaction unless every input + // is using `SIGHASH_ALL` or `SIGHASH_DEFAULT` for taproot + if !sign_options.allow_all_sighashes + && !psbt.inputs.iter().all(|i| { + i.sighash_type.is_none() + || i.sighash_type == Some(EcdsaSighashType::All.into()) + || i.sighash_type == Some(SchnorrSighashType::All.into()) + || i.sighash_type == Some(SchnorrSighashType::Default.into()) + }) + { + return Err(Error::Signer(signer::SignerError::NonStandardSighash)); + } + + for signer in self + .signers + .signers() + .iter() + .chain(self.change_signers.signers().iter()) + { + signer.sign_transaction(psbt, &sign_options, &self.secp)?; + } + + // attempt to finalize + if sign_options.try_finalize { + self.finalize_psbt(psbt, sign_options) + } else { + Ok(false) + } + } + + /// Return the spending policies for the wallet's descriptor + pub fn policies(&self, keychain: KeychainKind) -> Result, Error> { + let signers = match keychain { + KeychainKind::External => &self.signers, + KeychainKind::Internal => &self.change_signers, + }; + + match self.public_descriptor(keychain) { + Some(desc) => Ok(desc.extract_policy(signers, BuildSatisfaction::None, &self.secp)?), + None => Ok(None), + } + } + + /// Return the "public" version of the wallet's descriptor, meaning a new descriptor that has + /// the same structure but with every secret key removed + /// + /// This can be used to build a watch-only version of a wallet + pub fn public_descriptor(&self, keychain: KeychainKind) -> Option<&ExtendedDescriptor> { + self.keychain_tracker.txout_index.keychains().get(&keychain) + } + + /// Finalize a PSBT, i.e., for each input determine if sufficient data is available to pass + /// validation and construct the respective `scriptSig` or `scriptWitness`. Please refer to + /// [BIP174](https://github.com/bitcoin/bips/blob/master/bip-0174.mediawiki#Input_Finalizer) + /// for further information. + /// + /// Returns `true` if the PSBT could be finalized, and `false` otherwise. + /// + /// The [`SignOptions`] can be used to tweak the behavior of the finalizer. + pub fn finalize_psbt( + &self, + psbt: &mut psbt::PartiallySignedTransaction, + sign_options: SignOptions, + ) -> Result { + let tx = &psbt.unsigned_tx; + let mut finished = true; + + for (n, input) in tx.input.iter().enumerate() { + let psbt_input = &psbt + .inputs + .get(n) + .ok_or(Error::Signer(SignerError::InputIndexOutOfRange))?; + if psbt_input.final_script_sig.is_some() || psbt_input.final_script_witness.is_some() { + continue; + } + let confirmation_height = self + .keychain_tracker + .chain() + .tx_position(input.previous_output.txid) + .map(|conftime| match conftime { + &ConfirmationTime::Confirmed { height, .. } => height, + ConfirmationTime::Unconfirmed => u32::MAX, + }); + let last_sync_height = self + .keychain_tracker + .chain() + .latest_checkpoint() + .map(|block_id| block_id.height); + let current_height = sign_options.assume_height.or(last_sync_height); + + debug!( + "Input #{} - {}, using `confirmation_height` = {:?}, `current_height` = {:?}", + n, input.previous_output, confirmation_height, current_height + ); + + // - Try to derive the descriptor by looking at the txout. If it's in our database, we + // know exactly which `keychain` to use, and which derivation index it is + // - If that fails, try to derive it by looking at the psbt input: the complete logic + // is in `src/descriptor/mod.rs`, but it will basically look at `bip32_derivation`, + // `redeem_script` and `witness_script` to determine the right derivation + // - If that also fails, it will try it on the internal descriptor, if present + let desc = psbt + .get_utxo_for(n) + .and_then(|txout| self.get_descriptor_for_txout(&txout)) + .or_else(|| { + self.keychain_tracker + .txout_index + .keychains() + .iter() + .find_map(|(_, desc)| { + desc.derive_from_psbt_input( + psbt_input, + psbt.get_utxo_for(n), + &self.secp, + ) + }) + }); + + match desc { + Some(desc) => { + let mut tmp_input = bitcoin::TxIn::default(); + match desc.satisfy( + &mut tmp_input, + ( + PsbtInputSatisfier::new(psbt, n), + After::new(current_height, false), + Older::new(current_height, confirmation_height, false), + ), + ) { + Ok(_) => { + let psbt_input = &mut psbt.inputs[n]; + psbt_input.final_script_sig = Some(tmp_input.script_sig); + psbt_input.final_script_witness = Some(tmp_input.witness); + if sign_options.remove_partial_sigs { + psbt_input.partial_sigs.clear(); + } + } + Err(e) => { + debug!("satisfy error {:?} for input {}", e, n); + finished = false + } + } + } + None => finished = false, + } + } + + Ok(finished) + } + + /// Return the secp256k1 context used for all signing operations + pub fn secp_ctx(&self) -> &SecpCtx { + &self.secp + } + + /// Returns the descriptor used to create addresses for a particular `keychain`. + pub fn get_descriptor_for_keychain(&self, keychain: KeychainKind) -> &ExtendedDescriptor { + self.public_descriptor(self.map_keychain(keychain)) + .expect("we mapped it to external if it doesn't exist") + } + + /// The derivation index of this wallet. It will return `None` if it has not derived any addresses. + /// Otherwise, it will return the index of the highest address it has derived. + pub fn derivation_index(&self, keychain: KeychainKind) -> Option { + self.keychain_tracker + .txout_index + .last_revealed_index(&keychain) + } + + /// The index of the next address that you would get if you were to ask the wallet for a new address + pub fn next_derivation_index(&self, keychain: KeychainKind) -> u32 { + self.keychain_tracker.txout_index.next_index(&keychain).0 + } + + /// Informs the wallet that you no longer intend to broadcast a tx that was built from it. + /// + /// This frees up the change address used when creating the tx for use in future transactions. + // TODO: Make this free up reserved utxos when that's implemented + pub fn cancel_tx(&mut self, tx: &Transaction) { + let txout_index = &mut self.keychain_tracker.txout_index; + for txout in &tx.output { + if let Some(&(keychain, index)) = txout_index.index_of_spk(&txout.script_pubkey) { + // NOTE: unmark_used will **not** make something unused if it has actually been used + // by a tx in the tracker. It only removes the superficial marking. + txout_index.unmark_used(&keychain, index); + } + } + } + + fn map_keychain(&self, keychain: KeychainKind) -> KeychainKind { + if keychain == KeychainKind::Internal + && self.public_descriptor(KeychainKind::Internal).is_none() + { + KeychainKind::External + } else { + keychain + } + } + + fn get_descriptor_for_txout(&self, txout: &TxOut) -> Option { + let &(keychain, child) = self + .keychain_tracker + .txout_index + .index_of_spk(&txout.script_pubkey)?; + let descriptor = self.get_descriptor_for_keychain(keychain); + Some(descriptor.at_derivation_index(child)) + } + + fn get_available_utxos(&self) -> Vec<(LocalUtxo, usize)> { + self.list_unspent() + .into_iter() + .map(|utxo| { + let keychain = utxo.keychain; + ( + utxo, + self.get_descriptor_for_keychain(keychain) + .max_satisfaction_weight() + .unwrap(), + ) + }) + .collect() + } + + /// Given the options returns the list of utxos that must be used to form the + /// transaction and any further that may be used if needed. + #[allow(clippy::too_many_arguments)] + fn preselect_utxos( + &self, + change_policy: tx_builder::ChangeSpendPolicy, + unspendable: &HashSet, + manually_selected: Vec, + must_use_all_available: bool, + manual_only: bool, + must_only_use_confirmed_tx: bool, + current_height: Option, + ) -> (Vec, Vec) { + // must_spend <- manually selected utxos + // may_spend <- all other available utxos + let mut may_spend = self.get_available_utxos(); + + may_spend.retain(|may_spend| { + !manually_selected + .iter() + .any(|manually_selected| manually_selected.utxo.outpoint() == may_spend.0.outpoint) + }); + let mut must_spend = manually_selected; + + // NOTE: we are intentionally ignoring `unspendable` here. i.e manual + // selection overrides unspendable. + if manual_only { + return (must_spend, vec![]); + } + + let satisfies_confirmed = may_spend + .iter() + .map(|u| { + let txid = u.0.outpoint.txid; + let tx = self.keychain_tracker.chain_graph().get_tx_in_chain(txid); + match tx { + // We don't have the tx in the db for some reason, + // so we can't know for sure if it's mature or not. + // We prefer not to spend it. + None => false, + Some((confirmation_time, tx)) => { + // Whether the UTXO is mature and, if needed, confirmed + let mut spendable = true; + if must_only_use_confirmed_tx && !confirmation_time.is_confirmed() { + return false; + } + if tx.is_coin_base() { + debug_assert!( + confirmation_time.is_confirmed(), + "coinbase must always be confirmed" + ); + if let Some(current_height) = current_height { + match confirmation_time { + ConfirmationTime::Confirmed { height, .. } => { + // https://github.com/bitcoin/bitcoin/blob/c5e67be03bb06a5d7885c55db1f016fbf2333fe3/src/validation.cpp#L373-L375 + spendable &= (current_height.saturating_sub(*height)) + >= COINBASE_MATURITY; + } + ConfirmationTime::Unconfirmed => spendable = false, + } + } + } + spendable + } + } + }) + .collect::>(); + + let mut i = 0; + may_spend.retain(|u| { + let retain = change_policy.is_satisfied_by(&u.0) + && !unspendable.contains(&u.0.outpoint) + && satisfies_confirmed[i]; + i += 1; + retain + }); + + let mut may_spend = may_spend + .into_iter() + .map(|(local_utxo, satisfaction_weight)| WeightedUtxo { + satisfaction_weight, + utxo: Utxo::Local(local_utxo), + }) + .collect(); + + if must_use_all_available { + must_spend.append(&mut may_spend); + } + + (must_spend, may_spend) + } + + fn complete_transaction( + &self, + tx: Transaction, + selected: Vec, + params: TxParams, + ) -> Result { + let mut psbt = psbt::PartiallySignedTransaction::from_unsigned_tx(tx)?; + + if params.add_global_xpubs { + let all_xpubs = self + .keychanins() + .iter() + .flat_map(|(_, desc)| desc.get_extended_keys()) + .collect::>(); + + for xpub in all_xpubs { + let origin = match xpub.origin { + Some(origin) => origin, + None if xpub.xkey.depth == 0 => { + (xpub.root_fingerprint(&self.secp), vec![].into()) + } + _ => return Err(Error::MissingKeyOrigin(xpub.xkey.to_string())), + }; + + psbt.xpub.insert(xpub.xkey, origin); + } + } + + let mut lookup_output = selected + .into_iter() + .map(|utxo| (utxo.outpoint(), utxo)) + .collect::>(); + + // add metadata for the inputs + for (psbt_input, input) in psbt.inputs.iter_mut().zip(psbt.unsigned_tx.input.iter()) { + let utxo = match lookup_output.remove(&input.previous_output) { + Some(utxo) => utxo, + None => continue, + }; + + match utxo { + Utxo::Local(utxo) => { + *psbt_input = + match self.get_psbt_input(utxo, params.sighash, params.only_witness_utxo) { + Ok(psbt_input) => psbt_input, + Err(e) => match e { + Error::UnknownUtxo => psbt::Input { + sighash_type: params.sighash, + ..psbt::Input::default() + }, + _ => return Err(e), + }, + } + } + Utxo::Foreign { + psbt_input: foreign_psbt_input, + outpoint, + } => { + let is_taproot = foreign_psbt_input + .witness_utxo + .as_ref() + .map(|txout| txout.script_pubkey.is_v1_p2tr()) + .unwrap_or(false); + if !is_taproot + && !params.only_witness_utxo + && foreign_psbt_input.non_witness_utxo.is_none() + { + return Err(Error::Generic(format!( + "Missing non_witness_utxo on foreign utxo {}", + outpoint + ))); + } + *psbt_input = *foreign_psbt_input; + } + } + } + + self.update_psbt_with_descriptor(&mut psbt)?; + + Ok(psbt) + } + + /// get the corresponding PSBT Input for a LocalUtxo + pub fn get_psbt_input( + &self, + utxo: LocalUtxo, + sighash_type: Option, + only_witness_utxo: bool, + ) -> Result { + // Try to find the prev_script in our db to figure out if this is internal or external, + // and the derivation index + let &(keychain, child) = self + .keychain_tracker + .txout_index + .index_of_spk(&utxo.txout.script_pubkey) + .ok_or(Error::UnknownUtxo)?; + + let mut psbt_input = psbt::Input { + sighash_type, + ..psbt::Input::default() + }; + + let desc = self.get_descriptor_for_keychain(keychain); + let derived_descriptor = desc.at_derivation_index(child); + + psbt_input + .update_with_descriptor_unchecked(&derived_descriptor) + .map_err(MiniscriptPsbtError::Conversion)?; + + let prev_output = utxo.outpoint; + if let Some(prev_tx) = self.keychain_tracker.graph().get_tx(prev_output.txid) { + if desc.is_witness() || desc.is_taproot() { + psbt_input.witness_utxo = Some(prev_tx.output[prev_output.vout as usize].clone()); + } + if !desc.is_taproot() && (!desc.is_witness() || !only_witness_utxo) { + psbt_input.non_witness_utxo = Some(prev_tx.clone()); + } + } + Ok(psbt_input) + } + + fn update_psbt_with_descriptor( + &self, + psbt: &mut psbt::PartiallySignedTransaction, + ) -> Result<(), Error> { + // We need to borrow `psbt` mutably within the loops, so we have to allocate a vec for all + // the input utxos and outputs + // + // Clippy complains that the collect is not required, but that's wrong + #[allow(clippy::needless_collect)] + let utxos = (0..psbt.inputs.len()) + .filter_map(|i| psbt.get_utxo_for(i).map(|utxo| (true, i, utxo))) + .chain( + psbt.unsigned_tx + .output + .iter() + .enumerate() + .map(|(i, out)| (false, i, out.clone())), + ) + .collect::>(); + + // Try to figure out the keychain and derivation for every input and output + for (is_input, index, out) in utxos.into_iter() { + if let Some(&(keychain, child)) = self + .keychain_tracker + .txout_index + .index_of_spk(&out.script_pubkey) + { + debug!( + "Found descriptor for input #{} {:?}/{}", + index, keychain, child + ); + + let desc = self.get_descriptor_for_keychain(keychain); + let desc = desc.at_derivation_index(child); + + if is_input { + psbt.update_input_with_descriptor(index, &desc) + .map_err(MiniscriptPsbtError::UtxoUpdate)?; + } else { + psbt.update_output_with_descriptor(index, &desc) + .map_err(MiniscriptPsbtError::OutputUpdate)?; + } + } + } + + Ok(()) + } + + /// Return the checksum of the public descriptor associated to `keychain` + /// + /// Internally calls [`Self::get_descriptor_for_keychain`] to fetch the right descriptor + pub fn descriptor_checksum(&self, keychain: KeychainKind) -> String { + self.get_descriptor_for_keychain(keychain) + .to_string() + .split_once('#') + .unwrap() + .1 + .to_string() + } + + /// Applies an update to the wallet and stages the changes (but does not [`commit`] them). + /// + /// Usually you create an `update` by interacting with some blockchain data source and inserting + /// transactions related to your wallet into it. + /// + /// [`commit`]: Self::commit + pub fn apply_update(&mut self, update: Update) -> Result<(), UpdateError> + where + D: persist::PersistBackend, + { + let changeset = self.keychain_tracker.apply_update(update)?; + self.persist.stage(changeset); + Ok(()) + } + + /// Commits all curently [`staged`] changed to the persistence backend returning and error when this fails. + /// + /// [`staged`]: Self::staged + pub fn commit(&mut self) -> Result<(), D::WriteError> + where + D: persist::PersistBackend, + { + self.persist.commit() + } + + /// Returns the changes that will be staged with the next call to [`commit`]. + /// + /// [`commit`]: Self::commit + pub fn staged(&self) -> &ChangeSet { + self.persist.staged() + } + + /// Get a reference to the inner [`TxGraph`](bdk_chain::tx_graph::TxGraph). + pub fn as_graph(&self) -> &bdk_chain::tx_graph::TxGraph { + self.keychain_tracker.graph() + } + + /// Get a reference to the inner [`ChainGraph`](bdk_chain::chain_graph::ChainGraph). + pub fn as_chain_graph(&self) -> &bdk_chain::chain_graph::ChainGraph { + self.keychain_tracker.chain_graph() + } +} + +impl AsRef for Wallet { + fn as_ref(&self) -> &bdk_chain::tx_graph::TxGraph { + self.keychain_tracker.graph() + } +} + +impl AsRef> for Wallet { + fn as_ref(&self) -> &bdk_chain::chain_graph::ChainGraph { + self.keychain_tracker.chain_graph() + } +} + +/// Deterministically generate a unique name given the descriptors defining the wallet +/// +/// Compatible with [`wallet_name_from_descriptor`] +pub fn wallet_name_from_descriptor( + descriptor: T, + change_descriptor: Option, + network: Network, + secp: &SecpCtx, +) -> Result +where + T: IntoWalletDescriptor, +{ + //TODO check descriptors contains only public keys + let descriptor = descriptor + .into_wallet_descriptor(secp, network)? + .0 + .to_string(); + let mut wallet_name = calc_checksum(&descriptor[..descriptor.find('#').unwrap()])?; + if let Some(change_descriptor) = change_descriptor { + let change_descriptor = change_descriptor + .into_wallet_descriptor(secp, network)? + .0 + .to_string(); + wallet_name.push_str( + calc_checksum(&change_descriptor[..change_descriptor.find('#').unwrap()])?.as_str(), + ); + } + + Ok(wallet_name) +} + +#[macro_export] +#[doc(hidden)] +/// Macro for getting a wallet for use in a doctest +macro_rules! doctest_wallet { + () => {{ + use $crate::bitcoin::{BlockHash, Transaction, PackedLockTime, TxOut, Network, hashes::Hash}; + use $crate::chain::{ConfirmationTime, BlockId}; + use $crate::wallet::{AddressIndex, Wallet}; + let descriptor = "tr([73c5da0a/86'/0'/0']tprv8fMn4hSKPRC1oaCPqxDb1JWtgkpeiQvZhsr8W2xuy3GEMkzoArcAWTfJxYb6Wj8XNNDWEjfYKK4wGQXh3ZUXhDF2NcnsALpWTeSwarJt7Vc/0/*)"; + let change_descriptor = "tr([73c5da0a/86'/0'/0']tprv8fMn4hSKPRC1oaCPqxDb1JWtgkpeiQvZhsr8W2xuy3GEMkzoArcAWTfJxYb6Wj8XNNDWEjfYKK4wGQXh3ZUXhDF2NcnsALpWTeSwarJt7Vc/1/*)"; + + let mut wallet = Wallet::new_no_persist( + descriptor, + Some(change_descriptor), + Network::Regtest, + ) + .unwrap(); + let address = wallet.get_address(AddressIndex::New).address; + let tx = Transaction { + version: 1, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut { + value: 500_000, + script_pubkey: address.script_pubkey(), + }], + }; + let _ = wallet.insert_checkpoint(BlockId { height: 1_000, hash: BlockHash::all_zeros() }); + let _ = wallet.insert_tx(tx.clone(), ConfirmationTime::Confirmed { + height: 500, + time: 50_000 + }); + + wallet + }} +} diff --git a/src/wallet/signer.rs b/crates/bdk/src/wallet/signer.rs similarity index 99% rename from src/wallet/signer.rs rename to crates/bdk/src/wallet/signer.rs index ff54cfa8a..68dc46450 100644 --- a/src/wallet/signer.rs +++ b/crates/bdk/src/wallet/signer.rs @@ -15,13 +15,12 @@ //! through the [`Wallet::add_signer`](super::Wallet::add_signer) function. //! //! ``` -//! # use std::sync::Arc; -//! # use std::str::FromStr; +//! # use alloc::sync::Arc; +//! # use core::str::FromStr; //! # use bitcoin::secp256k1::{Secp256k1, All}; //! # use bitcoin::*; //! # use bitcoin::util::psbt; //! # use bdk::signer::*; -//! # use bdk::database::*; //! # use bdk::*; //! # #[derive(Debug)] //! # struct CustomHSM; @@ -70,7 +69,7 @@ //! let custom_signer = CustomSigner::connect(); //! //! let descriptor = "wpkh(tpubD6NzVbkrYhZ4Xferm7Pz4VnjdcDPFyjVu5K4iZXQ4pVN8Cks4pHVowTBXBKRhX64pkRyJZJN5xAKj4UDNnLPb5p2sSKXhewoYx5GbTdUFWq/*)"; -//! let mut wallet = Wallet::new(descriptor, None, Network::Testnet, MemoryDatabase::default())?; +//! let mut wallet = Wallet::new_no_persist(descriptor, None, Network::Testnet)?; //! wallet.add_signer( //! KeychainKind::External, //! SignerOrdering(200), @@ -80,11 +79,12 @@ //! # Ok::<_, bdk::Error>(()) //! ``` -use std::cmp::Ordering; -use std::collections::BTreeMap; -use std::fmt; -use std::ops::{Bound::Included, Deref}; -use std::sync::Arc; +use crate::collections::BTreeMap; +use alloc::sync::Arc; +use alloc::vec::Vec; +use core::cmp::Ordering; +use core::fmt; +use core::ops::{Bound::Included, Deref}; use bitcoin::blockdata::opcodes; use bitcoin::blockdata::script::Builder as ScriptBuilder; @@ -199,6 +199,7 @@ impl fmt::Display for SignerError { } } +#[cfg(feature = "std")] impl std::error::Error for SignerError {} /// Signing context @@ -560,7 +561,7 @@ fn sign_psbt_schnorr( #[derive(Debug, Clone, PartialOrd, PartialEq, Ord, Eq)] pub struct SignerOrdering(pub usize); -impl std::default::Default for SignerOrdering { +impl Default for SignerOrdering { fn default() -> Self { SignerOrdering(100) } @@ -1017,8 +1018,8 @@ mod signers_container_tests { use bitcoin::secp256k1::{All, Secp256k1}; use bitcoin::util::bip32; use bitcoin::Network; + use core::str::FromStr; use miniscript::ScriptContext; - use std::str::FromStr; fn is_equal(this: &Arc, that: &Arc) -> bool { let secp = Secp256k1::new(); diff --git a/src/wallet/tx_builder.rs b/crates/bdk/src/wallet/tx_builder.rs similarity index 92% rename from src/wallet/tx_builder.rs rename to crates/bdk/src/wallet/tx_builder.rs index 6d52b8d90..dbd4811c1 100644 --- a/src/wallet/tx_builder.rs +++ b/crates/bdk/src/wallet/tx_builder.rs @@ -19,7 +19,7 @@ //! # use bdk::*; //! # use bdk::wallet::tx_builder::CreateTx; //! # let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap(); -//! # let wallet = doctest_wallet!(); +//! # let mut wallet = doctest_wallet!(); //! // create a TxBuilder from a wallet //! let mut tx_builder = wallet.build_tx(); //! @@ -36,22 +36,25 @@ //! # Ok::<(), bdk::Error>(()) //! ``` -use std::collections::BTreeMap; -use std::collections::HashSet; -use std::default::Default; -use std::marker::PhantomData; +use crate::collections::BTreeMap; +use crate::collections::HashSet; +use alloc::{boxed::Box, rc::Rc, string::String, vec::Vec}; +use bdk_chain::ConfirmationTime; +use core::cell::RefCell; +use core::marker::PhantomData; use bitcoin::util::psbt::{self, PartiallySignedTransaction as Psbt}; use bitcoin::{LockTime, OutPoint, Script, Sequence, Transaction}; use super::coin_selection::{CoinSelectionAlgorithm, DefaultCoinSelectionAlgorithm}; -use crate::{database::BatchDatabase, Error, Utxo, Wallet}; +use super::persist; use crate::{ types::{FeeRate, KeychainKind, LocalUtxo, WeightedUtxo}, TransactionDetails, }; +use crate::{Error, Utxo, Wallet}; /// Context in which the [`TxBuilder`] is valid -pub trait TxBuilderContext: std::fmt::Debug + Default + Clone {} +pub trait TxBuilderContext: core::fmt::Debug + Default + Clone {} /// Marker type to indicate the [`TxBuilder`] is being used to create a new transaction (as opposed /// to bumping the fee of an existing one). @@ -78,7 +81,7 @@ impl TxBuilderContext for BumpFee {} /// # use bdk::wallet::tx_builder::*; /// # use bitcoin::*; /// # use core::str::FromStr; -/// # let wallet = doctest_wallet!(); +/// # let mut wallet = doctest_wallet!(); /// # let addr1 = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap(); /// # let addr2 = addr1.clone(); /// // chaining @@ -116,7 +119,7 @@ impl TxBuilderContext for BumpFee {} /// [`coin_selection`]: Self::coin_selection #[derive(Debug)] pub struct TxBuilder<'a, D, Cs, Ctx> { - pub(crate) wallet: &'a Wallet, + pub(crate) wallet: Rc>>, pub(crate) params: TxParams, pub(crate) coin_selection: Cs, pub(crate) phantom: PhantomData, @@ -161,16 +164,16 @@ pub(crate) enum FeePolicy { FeeAmount(u64), } -impl std::default::Default for FeePolicy { +impl Default for FeePolicy { fn default() -> Self { FeePolicy::FeeRate(FeeRate::default_min_relay_fee()) } } -impl<'a, Cs: Clone, Ctx, D> Clone for TxBuilder<'a, D, Cs, Ctx> { +impl<'a, D, Cs: Clone, Ctx> Clone for TxBuilder<'a, D, Cs, Ctx> { fn clone(&self) -> Self { TxBuilder { - wallet: self.wallet, + wallet: self.wallet.clone(), params: self.params.clone(), coin_selection: self.coin_selection.clone(), phantom: PhantomData, @@ -179,9 +182,7 @@ impl<'a, Cs: Clone, Ctx, D> Clone for TxBuilder<'a, D, Cs, Ctx> { } // methods supported by both contexts, for any CoinSelectionAlgorithm -impl<'a, D: BatchDatabase, Cs: CoinSelectionAlgorithm, Ctx: TxBuilderContext> - TxBuilder<'a, D, Cs, Ctx> -{ +impl<'a, D, Cs: CoinSelectionAlgorithm, Ctx: TxBuilderContext> TxBuilder<'a, D, Cs, Ctx> { /// Set a custom fee rate pub fn fee_rate(&mut self, fee_rate: FeeRate) -> &mut Self { self.params.fee_policy = Some(FeePolicy::FeeRate(fee_rate)); @@ -242,7 +243,7 @@ impl<'a, D: BatchDatabase, Cs: CoinSelectionAlgorithm, Ctx: TxBuilderContext> /// # use bitcoin::*; /// # use bdk::*; /// # let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap(); - /// # let wallet = doctest_wallet!(); + /// # let mut wallet = doctest_wallet!(); /// let mut path = BTreeMap::new(); /// path.insert("aabbccdd".to_string(), vec![0, 1]); /// @@ -274,18 +275,21 @@ impl<'a, D: BatchDatabase, Cs: CoinSelectionAlgorithm, Ctx: TxBuilderContext> /// These have priority over the "unspendable" utxos, meaning that if a utxo is present both in /// the "utxos" and the "unspendable" list, it will be spent. pub fn add_utxos(&mut self, outpoints: &[OutPoint]) -> Result<&mut Self, Error> { - let utxos = outpoints - .iter() - .map(|outpoint| self.wallet.get_utxo(*outpoint)?.ok_or(Error::UnknownUtxo)) - .collect::, _>>()?; - - for utxo in utxos { - let descriptor = self.wallet.get_descriptor_for_keychain(utxo.keychain); - let satisfaction_weight = descriptor.max_satisfaction_weight().unwrap(); - self.params.utxos.push(WeightedUtxo { - satisfaction_weight, - utxo: Utxo::Local(utxo), - }); + { + let wallet = self.wallet.borrow(); + let utxos = outpoints + .iter() + .map(|outpoint| wallet.get_utxo(*outpoint).ok_or(Error::UnknownUtxo)) + .collect::, _>>()?; + + for utxo in utxos { + let descriptor = wallet.get_descriptor_for_keychain(utxo.keychain); + let satisfaction_weight = descriptor.max_satisfaction_weight().unwrap(); + self.params.utxos.push(WeightedUtxo { + satisfaction_weight, + utxo: Utxo::Local(utxo), + }); + } } Ok(self) @@ -503,7 +507,7 @@ impl<'a, D: BatchDatabase, Cs: CoinSelectionAlgorithm, Ctx: TxBuilderContext> /// Overrides the [`DefaultCoinSelectionAlgorithm`](super::coin_selection::DefaultCoinSelectionAlgorithm). /// /// Note that this function consumes the builder and returns it so it is usually best to put this as the first call on the builder. - pub fn coin_selection>( + pub fn coin_selection( self, coin_selection: P, ) -> TxBuilder<'a, D, P, Ctx> { @@ -520,8 +524,13 @@ impl<'a, D: BatchDatabase, Cs: CoinSelectionAlgorithm, Ctx: TxBuilderContext> /// Returns the [`BIP174`] "PSBT" and summary details about the transaction. /// /// [`BIP174`]: https://github.com/bitcoin/bips/blob/master/bip-0174.mediawiki - pub fn finish(self) -> Result<(Psbt, TransactionDetails), Error> { - self.wallet.create_tx(self.coin_selection, self.params) + pub fn finish(self) -> Result<(Psbt, TransactionDetails), Error> + where + D: persist::PersistBackend, + { + self.wallet + .borrow_mut() + .create_tx(self.coin_selection, self.params) } /// Enable signaling RBF @@ -569,7 +578,7 @@ impl<'a, D: BatchDatabase, Cs: CoinSelectionAlgorithm, Ctx: TxBuilderContext> } } -impl<'a, D: BatchDatabase, Cs: CoinSelectionAlgorithm> TxBuilder<'a, D, Cs, CreateTx> { +impl<'a, D, Cs: CoinSelectionAlgorithm> TxBuilder<'a, D, Cs, CreateTx> { /// Replace the recipients already added with a new list pub fn set_recipients(&mut self, recipients: Vec<(Script, u64)>) -> &mut Self { self.params.recipients = recipients; @@ -615,7 +624,7 @@ impl<'a, D: BatchDatabase, Cs: CoinSelectionAlgorithm> TxBuilder<'a, D, Cs, C /// # use bdk::*; /// # use bdk::wallet::tx_builder::CreateTx; /// # let to_address = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap(); - /// # let wallet = doctest_wallet!(); + /// # let mut wallet = doctest_wallet!(); /// let mut tx_builder = wallet.build_tx(); /// /// tx_builder @@ -640,7 +649,7 @@ impl<'a, D: BatchDatabase, Cs: CoinSelectionAlgorithm> TxBuilder<'a, D, Cs, C } // methods supported only by bump_fee -impl<'a, D: BatchDatabase> TxBuilder<'a, D, DefaultCoinSelectionAlgorithm, BumpFee> { +impl<'a, D> TxBuilder<'a, D, DefaultCoinSelectionAlgorithm, BumpFee> { /// Explicitly tells the wallet that it is allowed to reduce the amount of the output matching this /// `script_pubkey` in order to bump the transaction fee. Without specifying this the wallet /// will attempt to find a change output to shrink instead. @@ -695,14 +704,8 @@ impl TxOrdering { TxOrdering::Untouched => {} TxOrdering::Shuffle => { use rand::seq::SliceRandom; - #[cfg(test)] - use rand::SeedableRng; - - #[cfg(not(test))] let mut rng = rand::thread_rng(); - #[cfg(test)] - let mut rng = rand::rngs::StdRng::seed_from_u64(12345); - + tx.input.shuffle(&mut rng); tx.output.shuffle(&mut rng); } TxOrdering::Bip69Lexicographic => { @@ -788,6 +791,7 @@ mod test { }; } + use bdk_chain::ConfirmationTime; use bitcoin::consensus::deserialize; use bitcoin::hashes::hex::FromHex; @@ -813,15 +817,25 @@ mod test { let original_tx = ordering_test_tx!(); let mut tx = original_tx.clone(); - TxOrdering::Shuffle.sort_tx(&mut tx); + (0..40) + .find(|_| { + TxOrdering::Shuffle.sort_tx(&mut tx); + original_tx.input != tx.input + }) + .expect("it should have moved the inputs at least once"); - assert_eq!(original_tx.input, tx.input); - assert_ne!(original_tx.output, tx.output); + let mut tx = original_tx.clone(); + (0..40) + .find(|_| { + TxOrdering::Shuffle.sort_tx(&mut tx); + original_tx.output != tx.output + }) + .expect("it should have moved the outputs at least once"); } #[test] fn test_output_ordering_bip69() { - use std::str::FromStr; + use core::str::FromStr; let original_tx = ordering_test_tx!(); let mut tx = original_tx; @@ -867,6 +881,8 @@ mod test { txout: Default::default(), keychain: KeychainKind::External, is_spent: false, + confirmation_time: ConfirmationTime::Unconfirmed, + derivation_index: 0, }, LocalUtxo { outpoint: OutPoint { @@ -876,6 +892,11 @@ mod test { txout: Default::default(), keychain: KeychainKind::Internal, is_spent: false, + confirmation_time: ConfirmationTime::Confirmed { + height: 32, + time: 42, + }, + derivation_index: 1, }, ] } diff --git a/src/wallet/utils.rs b/crates/bdk/src/wallet/utils.rs similarity index 99% rename from src/wallet/utils.rs rename to crates/bdk/src/wallet/utils.rs index 163d417ee..86f7d2fe4 100644 --- a/src/wallet/utils.rs +++ b/crates/bdk/src/wallet/utils.rs @@ -120,7 +120,7 @@ mod test { use super::{check_nsequence_rbf, IsDust}; use crate::bitcoin::{Address, Sequence}; - use std::str::FromStr; + use core::str::FromStr; #[test] fn test_is_dust() { diff --git a/crates/bdk/tests/common.rs b/crates/bdk/tests/common.rs new file mode 100644 index 000000000..de9467032 --- /dev/null +++ b/crates/bdk/tests/common.rs @@ -0,0 +1,93 @@ +#![allow(unused)] +use bdk::{wallet::AddressIndex, Wallet}; +use bdk_chain::{BlockId, ConfirmationTime}; +use bitcoin::hashes::Hash; +use bitcoin::{BlockHash, Network, Transaction, TxOut}; + +/// Return a fake wallet that appears to be funded for testing. +pub fn get_funded_wallet_with_change( + descriptor: &str, + change: Option<&str>, +) -> (Wallet, bitcoin::Txid) { + let mut wallet = Wallet::new_no_persist(descriptor, change, Network::Regtest).unwrap(); + let address = wallet.get_address(AddressIndex::New).address; + + let tx = Transaction { + version: 1, + lock_time: bitcoin::PackedLockTime(0), + input: vec![], + output: vec![TxOut { + value: 50_000, + script_pubkey: address.script_pubkey(), + }], + }; + + wallet + .insert_checkpoint(BlockId { + height: 1_000, + hash: BlockHash::all_zeros(), + }) + .unwrap(); + wallet + .insert_tx( + tx.clone(), + ConfirmationTime::Confirmed { + height: 1_000, + time: 100, + }, + ) + .unwrap(); + + (wallet, tx.txid()) +} + +pub fn get_funded_wallet(descriptor: &str) -> (Wallet, bitcoin::Txid) { + get_funded_wallet_with_change(descriptor, None) +} + +pub fn get_test_wpkh() -> &'static str { + "wpkh(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW)" +} + +pub fn get_test_single_sig_csv() -> &'static str { + // and(pk(Alice),older(6)) + "wsh(and_v(v:pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW),older(6)))" +} + +pub fn get_test_a_or_b_plus_csv() -> &'static str { + // or(pk(Alice),and(pk(Bob),older(144))) + "wsh(or_d(pk(cRjo6jqfVNP33HhSS76UhXETZsGTZYx8FMFvR9kpbtCSV1PmdZdu),and_v(v:pk(cMnkdebixpXMPfkcNEjjGin7s94hiehAH4mLbYkZoh9KSiNNmqC8),older(144))))" +} + +pub fn get_test_single_sig_cltv() -> &'static str { + // and(pk(Alice),after(100000)) + "wsh(and_v(v:pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW),after(100000)))" +} + +pub fn get_test_tr_single_sig() -> &'static str { + "tr(cNJmN3fH9DDbDt131fQNkVakkpzawJBSeybCUNmP1BovpmGQ45xG)" +} + +pub fn get_test_tr_with_taptree() -> &'static str { + "tr(b511bd5771e47ee27558b1765e87b541668304ec567721c7b880edc0a010da55,{pk(cPZzKuNmpuUjD1e8jUU4PVzy2b5LngbSip8mBsxf4e7rSFZVb4Uh),pk(8aee2b8120a5f157f1223f72b5e62b825831a27a9fdf427db7cc697494d4a642)})" +} + +pub fn get_test_tr_with_taptree_both_priv() -> &'static str { + "tr(b511bd5771e47ee27558b1765e87b541668304ec567721c7b880edc0a010da55,{pk(cPZzKuNmpuUjD1e8jUU4PVzy2b5LngbSip8mBsxf4e7rSFZVb4Uh),pk(cNaQCDwmmh4dS9LzCgVtyy1e1xjCJ21GUDHe9K98nzb689JvinGV)})" +} + +pub fn get_test_tr_repeated_key() -> &'static str { + "tr(b511bd5771e47ee27558b1765e87b541668304ec567721c7b880edc0a010da55,{and_v(v:pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW),after(100)),and_v(v:pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW),after(200))})" +} + +pub fn get_test_tr_single_sig_xprv() -> &'static str { + "tr(tprv8ZgxMBicQKsPdDArR4xSAECuVxeX1jwwSXR4ApKbkYgZiziDc4LdBy2WvJeGDfUSE4UT4hHhbgEwbdq8ajjUHiKDegkwrNU6V55CxcxonVN/*)" +} + +pub fn get_test_tr_with_taptree_xprv() -> &'static str { + "tr(cNJmN3fH9DDbDt131fQNkVakkpzawJBSeybCUNmP1BovpmGQ45xG,{pk(tprv8ZgxMBicQKsPdDArR4xSAECuVxeX1jwwSXR4ApKbkYgZiziDc4LdBy2WvJeGDfUSE4UT4hHhbgEwbdq8ajjUHiKDegkwrNU6V55CxcxonVN/*),pk(8aee2b8120a5f157f1223f72b5e62b825831a27a9fdf427db7cc697494d4a642)})" +} + +pub fn get_test_tr_dup_keys() -> &'static str { + "tr(cNJmN3fH9DDbDt131fQNkVakkpzawJBSeybCUNmP1BovpmGQ45xG,{pk(8aee2b8120a5f157f1223f72b5e62b825831a27a9fdf427db7cc697494d4a642),pk(8aee2b8120a5f157f1223f72b5e62b825831a27a9fdf427db7cc697494d4a642)})" +} diff --git a/crates/bdk/tests/psbt.rs b/crates/bdk/tests/psbt.rs new file mode 100644 index 000000000..8d399f5fe --- /dev/null +++ b/crates/bdk/tests/psbt.rs @@ -0,0 +1,158 @@ +use bdk::bitcoin::TxIn; +use bdk::wallet::AddressIndex; +use bdk::wallet::AddressIndex::New; +use bdk::{psbt, FeeRate, SignOptions}; +use bitcoin::util::psbt::PartiallySignedTransaction as Psbt; +use core::str::FromStr; +mod common; +use common::*; + +// from bip 174 +const PSBT_STR: &str = "cHNidP8BAKACAAAAAqsJSaCMWvfEm4IS9Bfi8Vqz9cM9zxU4IagTn4d6W3vkAAAAAAD+////qwlJoIxa98SbghL0F+LxWrP1wz3PFTghqBOfh3pbe+QBAAAAAP7///8CYDvqCwAAAAAZdqkUdopAu9dAy+gdmI5x3ipNXHE5ax2IrI4kAAAAAAAAGXapFG9GILVT+glechue4O/p+gOcykWXiKwAAAAAAAEHakcwRAIgR1lmF5fAGwNrJZKJSGhiGDR9iYZLcZ4ff89X0eURZYcCIFMJ6r9Wqk2Ikf/REf3xM286KdqGbX+EhtdVRs7tr5MZASEDXNxh/HupccC1AaZGoqg7ECy0OIEhfKaC3Ibi1z+ogpIAAQEgAOH1BQAAAAAXqRQ1RebjO4MsRwUPJNPuuTycA5SLx4cBBBYAFIXRNTfy4mVAWjTbr6nj3aAfuCMIAAAA"; + +#[test] +#[should_panic(expected = "InputIndexOutOfRange")] +fn test_psbt_malformed_psbt_input_legacy() { + let psbt_bip = Psbt::from_str(PSBT_STR).unwrap(); + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let send_to = wallet.get_address(AddressIndex::New); + let mut builder = wallet.build_tx(); + builder.add_recipient(send_to.script_pubkey(), 10_000); + let (mut psbt, _) = builder.finish().unwrap(); + psbt.inputs.push(psbt_bip.inputs[0].clone()); + let options = SignOptions { + trust_witness_utxo: true, + ..Default::default() + }; + let _ = wallet.sign(&mut psbt, options).unwrap(); +} + +#[test] +#[should_panic(expected = "InputIndexOutOfRange")] +fn test_psbt_malformed_psbt_input_segwit() { + let psbt_bip = Psbt::from_str(PSBT_STR).unwrap(); + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let send_to = wallet.get_address(AddressIndex::New); + let mut builder = wallet.build_tx(); + builder.add_recipient(send_to.script_pubkey(), 10_000); + let (mut psbt, _) = builder.finish().unwrap(); + psbt.inputs.push(psbt_bip.inputs[1].clone()); + let options = SignOptions { + trust_witness_utxo: true, + ..Default::default() + }; + let _ = wallet.sign(&mut psbt, options).unwrap(); +} + +#[test] +#[should_panic(expected = "InputIndexOutOfRange")] +fn test_psbt_malformed_tx_input() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let send_to = wallet.get_address(AddressIndex::New); + let mut builder = wallet.build_tx(); + builder.add_recipient(send_to.script_pubkey(), 10_000); + let (mut psbt, _) = builder.finish().unwrap(); + psbt.unsigned_tx.input.push(TxIn::default()); + let options = SignOptions { + trust_witness_utxo: true, + ..Default::default() + }; + let _ = wallet.sign(&mut psbt, options).unwrap(); +} + +#[test] +fn test_psbt_sign_with_finalized() { + let psbt_bip = Psbt::from_str(PSBT_STR).unwrap(); + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let send_to = wallet.get_address(AddressIndex::New); + let mut builder = wallet.build_tx(); + builder.add_recipient(send_to.script_pubkey(), 10_000); + let (mut psbt, _) = builder.finish().unwrap(); + + // add a finalized input + psbt.inputs.push(psbt_bip.inputs[0].clone()); + psbt.unsigned_tx + .input + .push(psbt_bip.unsigned_tx.input[0].clone()); + + let _ = wallet.sign(&mut psbt, SignOptions::default()).unwrap(); +} + +#[test] +fn test_psbt_fee_rate_with_witness_utxo() { + use psbt::PsbtUtils; + + let expected_fee_rate = 1.2345; + + let (mut wallet, _) = get_funded_wallet("wpkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + builder.fee_rate(FeeRate::from_sat_per_vb(expected_fee_rate)); + let (mut psbt, _) = builder.finish().unwrap(); + let fee_amount = psbt.fee_amount(); + assert!(fee_amount.is_some()); + + let unfinalized_fee_rate = psbt.fee_rate().unwrap(); + + let finalized = wallet.sign(&mut psbt, Default::default()).unwrap(); + assert!(finalized); + + let finalized_fee_rate = psbt.fee_rate().unwrap(); + assert!(finalized_fee_rate.as_sat_per_vb() >= expected_fee_rate); + assert!(finalized_fee_rate.as_sat_per_vb() < unfinalized_fee_rate.as_sat_per_vb()); +} + +#[test] +fn test_psbt_fee_rate_with_nonwitness_utxo() { + use psbt::PsbtUtils; + + let expected_fee_rate = 1.2345; + + let (mut wallet, _) = get_funded_wallet("pkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + builder.fee_rate(FeeRate::from_sat_per_vb(expected_fee_rate)); + let (mut psbt, _) = builder.finish().unwrap(); + let fee_amount = psbt.fee_amount(); + assert!(fee_amount.is_some()); + let unfinalized_fee_rate = psbt.fee_rate().unwrap(); + + let finalized = wallet.sign(&mut psbt, Default::default()).unwrap(); + assert!(finalized); + + let finalized_fee_rate = psbt.fee_rate().unwrap(); + assert!(finalized_fee_rate.as_sat_per_vb() >= expected_fee_rate); + assert!(finalized_fee_rate.as_sat_per_vb() < unfinalized_fee_rate.as_sat_per_vb()); +} + +#[test] +fn test_psbt_fee_rate_with_missing_txout() { + use psbt::PsbtUtils; + + let expected_fee_rate = 1.2345; + + let (mut wpkh_wallet, _) = get_funded_wallet("wpkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)"); + let addr = wpkh_wallet.get_address(New); + let mut builder = wpkh_wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + builder.fee_rate(FeeRate::from_sat_per_vb(expected_fee_rate)); + let (mut wpkh_psbt, _) = builder.finish().unwrap(); + + wpkh_psbt.inputs[0].witness_utxo = None; + wpkh_psbt.inputs[0].non_witness_utxo = None; + assert!(wpkh_psbt.fee_amount().is_none()); + assert!(wpkh_psbt.fee_rate().is_none()); + + let (mut pkh_wallet, _) = get_funded_wallet("pkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)"); + let addr = pkh_wallet.get_address(New); + let mut builder = pkh_wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + builder.fee_rate(FeeRate::from_sat_per_vb(expected_fee_rate)); + let (mut pkh_psbt, _) = builder.finish().unwrap(); + + pkh_psbt.inputs[0].non_witness_utxo = None; + assert!(pkh_psbt.fee_amount().is_none()); + assert!(pkh_psbt.fee_rate().is_none()); +} diff --git a/crates/bdk/tests/wallet.rs b/crates/bdk/tests/wallet.rs new file mode 100644 index 000000000..9b25223e4 --- /dev/null +++ b/crates/bdk/tests/wallet.rs @@ -0,0 +1,3309 @@ +use assert_matches::assert_matches; +use bdk::descriptor::calc_checksum; +use bdk::signer::{SignOptions, SignerError}; +use bdk::wallet::coin_selection::LargestFirstCoinSelection; +use bdk::wallet::AddressIndex::*; +use bdk::wallet::{AddressIndex, AddressInfo, Balance, Wallet}; +use bdk::Error; +use bdk::FeeRate; +use bdk::KeychainKind; +use bdk_chain::BlockId; +use bdk_chain::COINBASE_MATURITY; +use bdk_chain::{ConfirmationTime, TxHeight}; +use bitcoin::hashes::Hash; +use bitcoin::BlockHash; +use bitcoin::Script; +use bitcoin::{util::psbt, Network}; +use bitcoin::{ + Address, EcdsaSighashType, LockTime, OutPoint, PackedLockTime, SchnorrSighashType, Sequence, + Transaction, TxIn, TxOut, +}; +use core::str::FromStr; + +mod common; +use common::*; + +fn receive_output(wallet: &mut Wallet, value: u64, height: TxHeight) -> OutPoint { + let tx = Transaction { + version: 1, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut { + script_pubkey: wallet.get_address(LastUnused).script_pubkey(), + value, + }], + }; + + wallet + .insert_tx( + tx.clone(), + match height { + TxHeight::Confirmed(height) => ConfirmationTime::Confirmed { + height, + time: 42_000, + }, + TxHeight::Unconfirmed => ConfirmationTime::Unconfirmed, + }, + ) + .unwrap(); + + OutPoint { + txid: tx.txid(), + vout: 0, + } +} + +fn receive_output_in_latest_block(wallet: &mut Wallet, value: u64) -> OutPoint { + let height = wallet.latest_checkpoint().map(|id| id.height).into(); + receive_output(wallet, value, height) +} + +// The satisfaction size of a P2WPKH is 112 WU = +// 1 (elements in witness) + 1 (OP_PUSH) + 33 (pk) + 1 (OP_PUSH) + 72 (signature + sighash) + 1*4 (script len) +// On the witness itself, we have to push once for the pk (33WU) and once for signature + sighash (72WU), for +// a total of 105 WU. +// Here, we push just once for simplicity, so we have to add an extra byte for the missing +// OP_PUSH. +const P2WPKH_FAKE_WITNESS_SIZE: usize = 106; + +#[test] +fn test_descriptor_checksum() { + let (wallet, _) = get_funded_wallet(get_test_wpkh()); + let checksum = wallet.descriptor_checksum(KeychainKind::External); + assert_eq!(checksum.len(), 8); + + let raw_descriptor = wallet + .keychanins() + .iter() + .next() + .unwrap() + .1 + .to_string() + .split_once('#') + .unwrap() + .0 + .to_string(); + assert_eq!(calc_checksum(&raw_descriptor).unwrap(), checksum); +} + +#[test] +fn test_get_funded_wallet_balance() { + let (wallet, _) = get_funded_wallet(get_test_wpkh()); + assert_eq!(wallet.get_balance().confirmed, 50000); +} + +macro_rules! assert_fee_rate { + ($psbt:expr, $fees:expr, $fee_rate:expr $( ,@dust_change $( $dust_change:expr )* )* $( ,@add_signature $( $add_signature:expr )* )* ) => ({ + let psbt = $psbt.clone(); + #[allow(unused_mut)] + let mut tx = $psbt.clone().extract_tx(); + $( + $( $add_signature )* + for txin in &mut tx.input { + txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature + } + )* + + #[allow(unused_mut)] + #[allow(unused_assignments)] + let mut dust_change = false; + $( + $( $dust_change )* + dust_change = true; + )* + + let fee_amount = psbt + .inputs + .iter() + .fold(0, |acc, i| acc + i.witness_utxo.as_ref().unwrap().value) + - psbt + .unsigned_tx + .output + .iter() + .fold(0, |acc, o| acc + o.value); + + assert_eq!(fee_amount, $fees); + + let tx_fee_rate = FeeRate::from_wu($fees, tx.weight()); + let fee_rate = $fee_rate; + + if !dust_change { + assert!(tx_fee_rate >= fee_rate && (tx_fee_rate - fee_rate).as_sat_per_vb().abs() < 0.5, "Expected fee rate of {:?}, the tx has {:?}", fee_rate, tx_fee_rate); + } else { + assert!(tx_fee_rate >= fee_rate, "Expected fee rate of at least {:?}, the tx has {:?}", fee_rate, tx_fee_rate); + } + }); +} + +macro_rules! from_str { + ($e:expr, $t:ty) => {{ + use core::str::FromStr; + <$t>::from_str($e).unwrap() + }}; + + ($e:expr) => { + from_str!($e, _) + }; +} + +#[test] +#[should_panic(expected = "NoRecipients")] +fn test_create_tx_empty_recipients() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + wallet.build_tx().finish().unwrap(); +} + +#[test] +#[should_panic(expected = "NoUtxosSelected")] +fn test_create_tx_manually_selected_empty_utxos() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .manually_selected_only(); + builder.finish().unwrap(); +} + +#[test] +#[should_panic(expected = "Invalid version `0`")] +fn test_create_tx_version_0() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .version(0); + builder.finish().unwrap(); +} + +#[test] +#[should_panic( + expected = "TxBuilder requested version `1`, but at least `2` is needed to use OP_CSV" +)] +fn test_create_tx_version_1_csv() { + let (mut wallet, _) = get_funded_wallet(get_test_single_sig_csv()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .version(1); + builder.finish().unwrap(); +} + +#[test] +fn test_create_tx_custom_version() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .version(42); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!(psbt.unsigned_tx.version, 42); +} + +#[test] +fn test_create_tx_default_locktime_is_last_sync_height() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (psbt, _) = builder.finish().unwrap(); + + // Since we never synced the wallet we don't have a last_sync_height + // we could use to try to prevent fee sniping. We default to 0. + assert_eq!(psbt.unsigned_tx.lock_time.0, 1_000); +} + +#[test] +fn test_create_tx_fee_sniping_locktime_last_sync() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + + let (psbt, _) = builder.finish().unwrap(); + + // If there's no current_height we're left with using the last sync height + assert_eq!( + psbt.unsigned_tx.lock_time.0, + wallet.latest_checkpoint().unwrap().height + ); +} + +#[test] +fn test_create_tx_default_locktime_cltv() { + let (mut wallet, _) = get_funded_wallet(get_test_single_sig_cltv()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!(psbt.unsigned_tx.lock_time.0, 100_000); +} + +#[test] +fn test_create_tx_custom_locktime() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .current_height(630_001) + .nlocktime(LockTime::from_height(630_000).unwrap()); + let (psbt, _) = builder.finish().unwrap(); + + // When we explicitly specify a nlocktime + // we don't try any fee sniping prevention trick + // (we ignore the current_height) + assert_eq!(psbt.unsigned_tx.lock_time.0, 630_000); +} + +#[test] +fn test_create_tx_custom_locktime_compatible_with_cltv() { + let (mut wallet, _) = get_funded_wallet(get_test_single_sig_cltv()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .nlocktime(LockTime::from_height(630_000).unwrap()); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!(psbt.unsigned_tx.lock_time.0, 630_000); +} + +#[test] +#[should_panic( + expected = "TxBuilder requested timelock of `Blocks(Height(50000))`, but at least `Blocks(Height(100000))` is required to spend from this script" +)] +fn test_create_tx_custom_locktime_incompatible_with_cltv() { + let (mut wallet, _) = get_funded_wallet(get_test_single_sig_cltv()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .nlocktime(LockTime::from_height(50000).unwrap()); + builder.finish().unwrap(); +} + +#[test] +fn test_create_tx_no_rbf_csv() { + let (mut wallet, _) = get_funded_wallet(get_test_single_sig_csv()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!(psbt.unsigned_tx.input[0].sequence, Sequence(6)); +} + +#[test] +fn test_create_tx_with_default_rbf_csv() { + let (mut wallet, _) = get_funded_wallet(get_test_single_sig_csv()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .enable_rbf(); + let (psbt, _) = builder.finish().unwrap(); + // When CSV is enabled it takes precedence over the rbf value (unless forced by the user). + // It will be set to the OP_CSV value, in this case 6 + assert_eq!(psbt.unsigned_tx.input[0].sequence, Sequence(6)); +} + +#[test] +#[should_panic( + expected = "Cannot enable RBF with nSequence `Sequence(3)` given a required OP_CSV of `Sequence(6)`" +)] +fn test_create_tx_with_custom_rbf_csv() { + let (mut wallet, _) = get_funded_wallet(get_test_single_sig_csv()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .enable_rbf_with_sequence(Sequence(3)); + builder.finish().unwrap(); +} + +#[test] +fn test_create_tx_no_rbf_cltv() { + let (mut wallet, _) = get_funded_wallet(get_test_single_sig_cltv()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!(psbt.unsigned_tx.input[0].sequence, Sequence(0xFFFFFFFE)); +} + +#[test] +#[should_panic(expected = "Cannot enable RBF with a nSequence >= 0xFFFFFFFE")] +fn test_create_tx_invalid_rbf_sequence() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .enable_rbf_with_sequence(Sequence(0xFFFFFFFE)); + builder.finish().unwrap(); +} + +#[test] +fn test_create_tx_custom_rbf_sequence() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .enable_rbf_with_sequence(Sequence(0xDEADBEEF)); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!(psbt.unsigned_tx.input[0].sequence, Sequence(0xDEADBEEF)); +} + +#[test] +fn test_create_tx_default_sequence() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!(psbt.unsigned_tx.input[0].sequence, Sequence(0xFFFFFFFE)); +} + +#[test] +#[should_panic( + expected = "The `change_policy` can be set only if the wallet has a change_descriptor" +)] +fn test_create_tx_change_policy_no_internal() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .do_not_spend_change(); + builder.finish().unwrap(); +} + +#[test] +fn test_create_tx_drain_wallet_and_drain_to() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!(psbt.unsigned_tx.output.len(), 1); + assert_eq!( + psbt.unsigned_tx.output[0].value, + 50_000 - details.fee.unwrap_or(0) + ); +} + +#[test] +fn test_create_tx_drain_wallet_and_drain_to_and_with_recipient() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap(); + let drain_addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 20_000) + .drain_to(drain_addr.script_pubkey()) + .drain_wallet(); + let (psbt, details) = builder.finish().unwrap(); + let outputs = psbt.unsigned_tx.output; + + assert_eq!(outputs.len(), 2); + let main_output = outputs + .iter() + .find(|x| x.script_pubkey == addr.script_pubkey()) + .unwrap(); + let drain_output = outputs + .iter() + .find(|x| x.script_pubkey == drain_addr.script_pubkey()) + .unwrap(); + assert_eq!(main_output.value, 20_000,); + assert_eq!(drain_output.value, 30_000 - details.fee.unwrap_or(0)); +} + +#[test] +fn test_create_tx_drain_to_and_utxos() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let utxos: Vec<_> = wallet + .list_unspent() + .into_iter() + .map(|u| u.outpoint) + .collect(); + let mut builder = wallet.build_tx(); + builder + .drain_to(addr.script_pubkey()) + .add_utxos(&utxos) + .unwrap(); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!(psbt.unsigned_tx.output.len(), 1); + assert_eq!( + psbt.unsigned_tx.output[0].value, + 50_000 - details.fee.unwrap_or(0) + ); +} + +#[test] +#[should_panic(expected = "NoRecipients")] +fn test_create_tx_drain_to_no_drain_wallet_no_utxos() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let drain_addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(drain_addr.script_pubkey()); + builder.finish().unwrap(); +} + +#[test] +fn test_create_tx_default_fee_rate() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (psbt, details) = builder.finish().unwrap(); + + assert_fee_rate!(psbt, details.fee.unwrap_or(0), FeeRate::default(), @add_signature); +} + +#[test] +fn test_create_tx_custom_fee_rate() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .fee_rate(FeeRate::from_sat_per_vb(5.0)); + let (psbt, details) = builder.finish().unwrap(); + + assert_fee_rate!(psbt, details.fee.unwrap_or(0), FeeRate::from_sat_per_vb(5.0), @add_signature); +} + +#[test] +fn test_create_tx_absolute_fee() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .drain_to(addr.script_pubkey()) + .drain_wallet() + .fee_absolute(100); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!(details.fee.unwrap_or(0), 100); + assert_eq!(psbt.unsigned_tx.output.len(), 1); + assert_eq!( + psbt.unsigned_tx.output[0].value, + 50_000 - details.fee.unwrap_or(0) + ); +} + +#[test] +fn test_create_tx_absolute_zero_fee() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .drain_to(addr.script_pubkey()) + .drain_wallet() + .fee_absolute(0); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!(details.fee.unwrap_or(0), 0); + assert_eq!(psbt.unsigned_tx.output.len(), 1); + assert_eq!( + psbt.unsigned_tx.output[0].value, + 50_000 - details.fee.unwrap_or(0) + ); +} + +#[test] +#[should_panic(expected = "InsufficientFunds")] +fn test_create_tx_absolute_high_fee() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .drain_to(addr.script_pubkey()) + .drain_wallet() + .fee_absolute(60_000); + let (_psbt, _details) = builder.finish().unwrap(); +} + +#[test] +fn test_create_tx_add_change() { + use bdk::wallet::tx_builder::TxOrdering; + + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .ordering(TxOrdering::Untouched); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!(psbt.unsigned_tx.output.len(), 2); + assert_eq!(psbt.unsigned_tx.output[0].value, 25_000); + assert_eq!( + psbt.unsigned_tx.output[1].value, + 25_000 - details.fee.unwrap_or(0) + ); +} + +#[test] +fn test_create_tx_skip_change_dust() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 49_800); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!(psbt.unsigned_tx.output.len(), 1); + assert_eq!(psbt.unsigned_tx.output[0].value, 49_800); + assert_eq!(details.fee.unwrap_or(0), 200); +} + +#[test] +#[should_panic(expected = "InsufficientFunds")] +fn test_create_tx_drain_to_dust_amount() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + // very high fee rate, so that the only output would be below dust + let mut builder = wallet.build_tx(); + builder + .drain_to(addr.script_pubkey()) + .drain_wallet() + .fee_rate(FeeRate::from_sat_per_vb(453.0)); + builder.finish().unwrap(); +} + +#[test] +fn test_create_tx_ordering_respected() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 30_000) + .add_recipient(addr.script_pubkey(), 10_000) + .ordering(bdk::wallet::tx_builder::TxOrdering::Bip69Lexicographic); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!(psbt.unsigned_tx.output.len(), 3); + assert_eq!( + psbt.unsigned_tx.output[0].value, + 10_000 - details.fee.unwrap_or(0) + ); + assert_eq!(psbt.unsigned_tx.output[1].value, 10_000); + assert_eq!(psbt.unsigned_tx.output[2].value, 30_000); +} + +#[test] +fn test_create_tx_default_sighash() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 30_000); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!(psbt.inputs[0].sighash_type, None); +} + +#[test] +fn test_create_tx_custom_sighash() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 30_000) + .sighash(bitcoin::EcdsaSighashType::Single.into()); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!( + psbt.inputs[0].sighash_type, + Some(bitcoin::EcdsaSighashType::Single.into()) + ); +} + +#[test] +fn test_create_tx_input_hd_keypaths() { + use bitcoin::util::bip32::{DerivationPath, Fingerprint}; + use core::str::FromStr; + + let (mut wallet, _) = get_funded_wallet("wpkh([d34db33f/44'/0'/0']tpubDEnoLuPdBep9bzw5LoGYpsxUQYheRQ9gcgrJhJEcdKFB9cWQRyYmkCyRoTqeD4tJYiVVgt6A3rN6rWn9RYhR9sBsGxji29LYWHuKKbdb1ev/0/*)"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!(psbt.inputs[0].bip32_derivation.len(), 1); + assert_eq!( + psbt.inputs[0].bip32_derivation.values().next().unwrap(), + &( + Fingerprint::from_str("d34db33f").unwrap(), + DerivationPath::from_str("m/44'/0'/0'/0/0").unwrap() + ) + ); +} + +#[test] +fn test_create_tx_output_hd_keypaths() { + use bitcoin::util::bip32::{DerivationPath, Fingerprint}; + use core::str::FromStr; + + let (mut wallet, _) = get_funded_wallet("wpkh([d34db33f/44'/0'/0']tpubDEnoLuPdBep9bzw5LoGYpsxUQYheRQ9gcgrJhJEcdKFB9cWQRyYmkCyRoTqeD4tJYiVVgt6A3rN6rWn9RYhR9sBsGxji29LYWHuKKbdb1ev/0/*)"); + + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!(psbt.outputs[0].bip32_derivation.len(), 1); + let expected_derivation_path = format!("m/44'/0'/0'/0/{}", addr.index); + assert_eq!( + psbt.outputs[0].bip32_derivation.values().next().unwrap(), + &( + Fingerprint::from_str("d34db33f").unwrap(), + DerivationPath::from_str(&expected_derivation_path).unwrap() + ) + ); +} + +#[test] +fn test_create_tx_set_redeem_script_p2sh() { + use bitcoin::hashes::hex::FromHex; + + let (mut wallet, _) = + get_funded_wallet("sh(pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW))"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!( + psbt.inputs[0].redeem_script, + Some(Script::from( + Vec::::from_hex( + "21032b0558078bec38694a84933d659303e2575dae7e91685911454115bfd64487e3ac" + ) + .unwrap() + )) + ); + assert_eq!(psbt.inputs[0].witness_script, None); +} + +#[test] +fn test_create_tx_set_witness_script_p2wsh() { + use bitcoin::hashes::hex::FromHex; + + let (mut wallet, _) = + get_funded_wallet("wsh(pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW))"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!(psbt.inputs[0].redeem_script, None); + assert_eq!( + psbt.inputs[0].witness_script, + Some(Script::from( + Vec::::from_hex( + "21032b0558078bec38694a84933d659303e2575dae7e91685911454115bfd64487e3ac" + ) + .unwrap() + )) + ); +} + +#[test] +fn test_create_tx_set_redeem_witness_script_p2wsh_p2sh() { + use bitcoin::hashes::hex::FromHex; + + let (mut wallet, _) = + get_funded_wallet("sh(wsh(pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW)))"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (psbt, _) = builder.finish().unwrap(); + + let script = Script::from( + Vec::::from_hex( + "21032b0558078bec38694a84933d659303e2575dae7e91685911454115bfd64487e3ac", + ) + .unwrap(), + ); + + assert_eq!(psbt.inputs[0].redeem_script, Some(script.to_v0_p2wsh())); + assert_eq!(psbt.inputs[0].witness_script, Some(script)); +} + +#[test] +fn test_create_tx_non_witness_utxo() { + let (mut wallet, _) = + get_funded_wallet("sh(pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW))"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (psbt, _) = builder.finish().unwrap(); + + assert!(psbt.inputs[0].non_witness_utxo.is_some()); + assert!(psbt.inputs[0].witness_utxo.is_none()); +} + +#[test] +fn test_create_tx_only_witness_utxo() { + let (mut wallet, _) = + get_funded_wallet("wsh(pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW))"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .drain_to(addr.script_pubkey()) + .only_witness_utxo() + .drain_wallet(); + let (psbt, _) = builder.finish().unwrap(); + + assert!(psbt.inputs[0].non_witness_utxo.is_none()); + assert!(psbt.inputs[0].witness_utxo.is_some()); +} + +#[test] +fn test_create_tx_shwpkh_has_witness_utxo() { + let (mut wallet, _) = + get_funded_wallet("sh(wpkh(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW))"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (psbt, _) = builder.finish().unwrap(); + + assert!(psbt.inputs[0].witness_utxo.is_some()); +} + +#[test] +fn test_create_tx_both_non_witness_utxo_and_witness_utxo_default() { + let (mut wallet, _) = + get_funded_wallet("wsh(pk(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW))"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (psbt, _) = builder.finish().unwrap(); + + assert!(psbt.inputs[0].non_witness_utxo.is_some()); + assert!(psbt.inputs[0].witness_utxo.is_some()); +} + +#[test] +fn test_create_tx_add_utxo() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let small_output_tx = Transaction { + input: vec![], + output: vec![TxOut { + value: 25_000, + script_pubkey: wallet.get_address(New).address.script_pubkey(), + }], + version: 0, + lock_time: PackedLockTime(0), + }; + wallet + .insert_tx(small_output_tx.clone(), ConfirmationTime::Unconfirmed) + .unwrap(); + + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 30_000) + .add_utxo(OutPoint { + txid: small_output_tx.txid(), + vout: 0, + }) + .unwrap(); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!( + psbt.unsigned_tx.input.len(), + 2, + "should add an additional input since 25_000 < 30_000" + ); + assert_eq!(details.sent, 75_000, "total should be sum of both inputs"); +} + +#[test] +#[should_panic(expected = "InsufficientFunds")] +fn test_create_tx_manually_selected_insufficient() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let small_output_tx = Transaction { + input: vec![], + output: vec![TxOut { + value: 25_000, + script_pubkey: wallet.get_address(New).address.script_pubkey(), + }], + version: 0, + lock_time: PackedLockTime(0), + }; + + wallet + .insert_tx(small_output_tx.clone(), ConfirmationTime::Unconfirmed) + .unwrap(); + + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 30_000) + .add_utxo(OutPoint { + txid: small_output_tx.txid(), + vout: 0, + }) + .unwrap() + .manually_selected_only(); + builder.finish().unwrap(); +} + +#[test] +#[should_panic(expected = "SpendingPolicyRequired(External)")] +fn test_create_tx_policy_path_required() { + let (mut wallet, _) = get_funded_wallet(get_test_a_or_b_plus_csv()); + + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 30_000); + builder.finish().unwrap(); +} + +#[test] +fn test_create_tx_policy_path_no_csv() { + let descriptors = get_test_wpkh(); + let mut wallet = Wallet::new_no_persist(descriptors, None, Network::Regtest).unwrap(); + + let tx = Transaction { + version: 0, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut { + value: 50_000, + script_pubkey: wallet.get_address(New).script_pubkey(), + }], + }; + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + + let external_policy = wallet.policies(KeychainKind::External).unwrap().unwrap(); + let root_id = external_policy.id; + // child #0 is just the key "A" + let path = vec![(root_id, vec![0])].into_iter().collect(); + + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 30_000) + .policy_path(path, KeychainKind::External); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!(psbt.unsigned_tx.input[0].sequence, Sequence(0xFFFFFFFF)); +} + +#[test] +fn test_create_tx_policy_path_use_csv() { + let (mut wallet, _) = get_funded_wallet(get_test_a_or_b_plus_csv()); + + let external_policy = wallet.policies(KeychainKind::External).unwrap().unwrap(); + let root_id = external_policy.id; + // child #1 is or(pk(B),older(144)) + let path = vec![(root_id, vec![1])].into_iter().collect(); + + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 30_000) + .policy_path(path, KeychainKind::External); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!(psbt.unsigned_tx.input[0].sequence, Sequence(144)); +} + +#[test] +fn test_create_tx_global_xpubs_with_origin() { + use bitcoin::hashes::hex::FromHex; + use bitcoin::util::bip32; + + let (mut wallet, _) = get_funded_wallet("wpkh([73756c7f/48'/0'/0'/2']tpubDCKxNyM3bLgbEX13Mcd8mYxbVg9ajDkWXMh29hMWBurKfVmBfWAM96QVP3zaUcN51HvkZ3ar4VwP82kC8JZhhux8vFQoJintSpVBwpFvyU3/0/*)"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .add_global_xpubs(); + let (psbt, _) = builder.finish().unwrap(); + + let key = bip32::ExtendedPubKey::from_str("tpubDCKxNyM3bLgbEX13Mcd8mYxbVg9ajDkWXMh29hMWBurKfVmBfWAM96QVP3zaUcN51HvkZ3ar4VwP82kC8JZhhux8vFQoJintSpVBwpFvyU3").unwrap(); + let fingerprint = bip32::Fingerprint::from_hex("73756c7f").unwrap(); + let path = bip32::DerivationPath::from_str("m/48'/0'/0'/2'").unwrap(); + + assert_eq!(psbt.xpub.len(), 1); + assert_eq!(psbt.xpub.get(&key), Some(&(fingerprint, path))); +} + +#[test] +fn test_add_foreign_utxo() { + let (mut wallet1, _) = get_funded_wallet(get_test_wpkh()); + let (wallet2, _) = + get_funded_wallet("wpkh(cVbZ8ovhye9AoAHFsqobCf7LxbXDAECy9Kb8TZdfsDYMZGBUyCnm)"); + + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let utxo = wallet2.list_unspent().remove(0); + let foreign_utxo_satisfaction = wallet2 + .get_descriptor_for_keychain(KeychainKind::External) + .max_satisfaction_weight() + .unwrap(); + + let psbt_input = psbt::Input { + witness_utxo: Some(utxo.txout.clone()), + ..Default::default() + }; + + let mut builder = wallet1.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 60_000) + .only_witness_utxo() + .add_foreign_utxo(utxo.outpoint, psbt_input, foreign_utxo_satisfaction) + .unwrap(); + let (mut psbt, details) = builder.finish().unwrap(); + + assert_eq!( + details.sent - details.received, + 10_000 + details.fee.unwrap_or(0), + "we should have only net spent ~10_000" + ); + + assert!( + psbt.unsigned_tx + .input + .iter() + .any(|input| input.previous_output == utxo.outpoint), + "foreign_utxo should be in there" + ); + + let finished = wallet1 + .sign( + &mut psbt, + SignOptions { + trust_witness_utxo: true, + ..Default::default() + }, + ) + .unwrap(); + + assert!( + !finished, + "only one of the inputs should have been signed so far" + ); + + let finished = wallet2 + .sign( + &mut psbt, + SignOptions { + trust_witness_utxo: true, + ..Default::default() + }, + ) + .unwrap(); + assert!(finished, "all the inputs should have been signed now"); +} + +#[test] +#[should_panic(expected = "Generic(\"Foreign utxo missing witness_utxo or non_witness_utxo\")")] +fn test_add_foreign_utxo_invalid_psbt_input() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let outpoint = wallet.list_unspent()[0].outpoint; + let foreign_utxo_satisfaction = wallet + .get_descriptor_for_keychain(KeychainKind::External) + .max_satisfaction_weight() + .unwrap(); + + let mut builder = wallet.build_tx(); + builder + .add_foreign_utxo(outpoint, psbt::Input::default(), foreign_utxo_satisfaction) + .unwrap(); +} + +#[test] +fn test_add_foreign_utxo_where_outpoint_doesnt_match_psbt_input() { + let (mut wallet1, txid1) = get_funded_wallet(get_test_wpkh()); + let (wallet2, txid2) = + get_funded_wallet("wpkh(cVbZ8ovhye9AoAHFsqobCf7LxbXDAECy9Kb8TZdfsDYMZGBUyCnm)"); + + let utxo2 = wallet2.list_unspent().remove(0); + let tx1 = wallet1.get_tx(txid1, true).unwrap().transaction.unwrap(); + let tx2 = wallet2.get_tx(txid2, true).unwrap().transaction.unwrap(); + + let satisfaction_weight = wallet2 + .get_descriptor_for_keychain(KeychainKind::External) + .max_satisfaction_weight() + .unwrap(); + + let mut builder = wallet1.build_tx(); + assert!( + builder + .add_foreign_utxo( + utxo2.outpoint, + psbt::Input { + non_witness_utxo: Some(tx1), + ..Default::default() + }, + satisfaction_weight + ) + .is_err(), + "should fail when outpoint doesn't match psbt_input" + ); + assert!( + builder + .add_foreign_utxo( + utxo2.outpoint, + psbt::Input { + non_witness_utxo: Some(tx2), + ..Default::default() + }, + satisfaction_weight + ) + .is_ok(), + "shoulld be ok when outpoint does match psbt_input" + ); +} + +#[test] +fn test_add_foreign_utxo_only_witness_utxo() { + let (mut wallet1, _) = get_funded_wallet(get_test_wpkh()); + let (wallet2, txid2) = + get_funded_wallet("wpkh(cVbZ8ovhye9AoAHFsqobCf7LxbXDAECy9Kb8TZdfsDYMZGBUyCnm)"); + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let utxo2 = wallet2.list_unspent().remove(0); + + let satisfaction_weight = wallet2 + .get_descriptor_for_keychain(KeychainKind::External) + .max_satisfaction_weight() + .unwrap(); + + let mut builder = wallet1.build_tx(); + builder.add_recipient(addr.script_pubkey(), 60_000); + + { + let mut builder = builder.clone(); + let psbt_input = psbt::Input { + witness_utxo: Some(utxo2.txout.clone()), + ..Default::default() + }; + builder + .add_foreign_utxo(utxo2.outpoint, psbt_input, satisfaction_weight) + .unwrap(); + assert!( + builder.finish().is_err(), + "psbt_input with witness_utxo should fail with only witness_utxo" + ); + } + + { + let mut builder = builder.clone(); + let psbt_input = psbt::Input { + witness_utxo: Some(utxo2.txout.clone()), + ..Default::default() + }; + builder + .only_witness_utxo() + .add_foreign_utxo(utxo2.outpoint, psbt_input, satisfaction_weight) + .unwrap(); + assert!( + builder.finish().is_ok(), + "psbt_input with just witness_utxo should succeed when `only_witness_utxo` is enabled" + ); + } + + { + let mut builder = builder.clone(); + let tx2 = wallet2.get_tx(txid2, true).unwrap().transaction.unwrap(); + let psbt_input = psbt::Input { + non_witness_utxo: Some(tx2), + ..Default::default() + }; + builder + .add_foreign_utxo(utxo2.outpoint, psbt_input, satisfaction_weight) + .unwrap(); + assert!( + builder.finish().is_ok(), + "psbt_input with non_witness_utxo should succeed by default" + ); + } +} + +#[test] +fn test_get_psbt_input() { + // this should grab a known good utxo and set the input + let (wallet, _) = get_funded_wallet(get_test_wpkh()); + for utxo in wallet.list_unspent() { + let psbt_input = wallet.get_psbt_input(utxo, None, false).unwrap(); + assert!(psbt_input.witness_utxo.is_some() || psbt_input.non_witness_utxo.is_some()); + } +} + +#[test] +#[should_panic( + expected = "MissingKeyOrigin(\"tpubDCKxNyM3bLgbEX13Mcd8mYxbVg9ajDkWXMh29hMWBurKfVmBfWAM96QVP3zaUcN51HvkZ3ar4VwP82kC8JZhhux8vFQoJintSpVBwpFvyU3\")" +)] +fn test_create_tx_global_xpubs_origin_missing() { + let (mut wallet, _) = get_funded_wallet("wpkh(tpubDCKxNyM3bLgbEX13Mcd8mYxbVg9ajDkWXMh29hMWBurKfVmBfWAM96QVP3zaUcN51HvkZ3ar4VwP82kC8JZhhux8vFQoJintSpVBwpFvyU3/0/*)"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .add_global_xpubs(); + builder.finish().unwrap(); +} + +#[test] +fn test_create_tx_global_xpubs_master_without_origin() { + use bitcoin::hashes::hex::FromHex; + use bitcoin::util::bip32; + + let (mut wallet, _) = get_funded_wallet("wpkh(tpubD6NzVbkrYhZ4Y55A58Gv9RSNF5hy84b5AJqYy7sCcjFrkcLpPre8kmgfit6kY1Zs3BLgeypTDBZJM222guPpdz7Cup5yzaMu62u7mYGbwFL/0/*)"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .add_global_xpubs(); + let (psbt, _) = builder.finish().unwrap(); + + let key = bip32::ExtendedPubKey::from_str("tpubD6NzVbkrYhZ4Y55A58Gv9RSNF5hy84b5AJqYy7sCcjFrkcLpPre8kmgfit6kY1Zs3BLgeypTDBZJM222guPpdz7Cup5yzaMu62u7mYGbwFL").unwrap(); + let fingerprint = bip32::Fingerprint::from_hex("997a323b").unwrap(); + + assert_eq!(psbt.xpub.len(), 1); + assert_eq!( + psbt.xpub.get(&key), + Some(&(fingerprint, bip32::DerivationPath::default())) + ); +} + +#[test] +#[should_panic(expected = "IrreplaceableTransaction")] +fn test_bump_fee_irreplaceable_tx() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (psbt, _) = builder.finish().unwrap(); + + let tx = psbt.extract_tx(); + let txid = tx.txid(); + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + wallet.build_fee_bump(txid).unwrap().finish().unwrap(); +} + +#[test] +#[should_panic(expected = "TransactionConfirmed")] +fn test_bump_fee_confirmed_tx() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (psbt, _) = builder.finish().unwrap(); + + let tx = psbt.extract_tx(); + let txid = tx.txid(); + + wallet + .insert_tx( + tx, + ConfirmationTime::Confirmed { + height: 42, + time: 42_000, + }, + ) + .unwrap(); + + wallet.build_fee_bump(txid).unwrap().finish().unwrap(); +} + +#[test] +#[should_panic(expected = "FeeRateTooLow")] +fn test_bump_fee_low_fee_rate() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .enable_rbf(); + let (psbt, _) = builder.finish().unwrap(); + + let tx = psbt.extract_tx(); + let txid = tx.txid(); + + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder.fee_rate(FeeRate::from_sat_per_vb(1.0)); + builder.finish().unwrap(); +} + +#[test] +#[should_panic(expected = "FeeTooLow")] +fn test_bump_fee_low_abs() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .enable_rbf(); + let (psbt, _) = builder.finish().unwrap(); + + let tx = psbt.extract_tx(); + let txid = tx.txid(); + + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder.fee_absolute(10); + builder.finish().unwrap(); +} + +#[test] +#[should_panic(expected = "FeeTooLow")] +fn test_bump_fee_zero_abs() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .enable_rbf(); + let (psbt, _) = builder.finish().unwrap(); + + let tx = psbt.extract_tx(); + let txid = tx.txid(); + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder.fee_absolute(0); + builder.finish().unwrap(); +} + +#[test] +fn test_bump_fee_reduce_change() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .enable_rbf(); + let (psbt, original_details) = builder.finish().unwrap(); + let tx = psbt.extract_tx(); + let txid = tx.txid(); + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder.fee_rate(FeeRate::from_sat_per_vb(2.5)).enable_rbf(); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!(details.sent, original_details.sent); + assert_eq!( + details.received + details.fee.unwrap_or(0), + original_details.received + original_details.fee.unwrap_or(0) + ); + assert!(details.fee.unwrap_or(0) > original_details.fee.unwrap_or(0)); + + let tx = &psbt.unsigned_tx; + assert_eq!(tx.output.len(), 2); + assert_eq!( + tx.output + .iter() + .find(|txout| txout.script_pubkey == addr.script_pubkey()) + .unwrap() + .value, + 25_000 + ); + assert_eq!( + tx.output + .iter() + .find(|txout| txout.script_pubkey != addr.script_pubkey()) + .unwrap() + .value, + details.received + ); + + assert_fee_rate!(psbt, details.fee.unwrap_or(0), FeeRate::from_sat_per_vb(2.5), @add_signature); + + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder.fee_absolute(200); + builder.enable_rbf(); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!(details.sent, original_details.sent); + assert_eq!( + details.received + details.fee.unwrap_or(0), + original_details.received + original_details.fee.unwrap_or(0) + ); + assert!( + details.fee.unwrap_or(0) > original_details.fee.unwrap_or(0), + "{} > {}", + details.fee.unwrap_or(0), + original_details.fee.unwrap_or(0) + ); + + let tx = &psbt.unsigned_tx; + assert_eq!(tx.output.len(), 2); + assert_eq!( + tx.output + .iter() + .find(|txout| txout.script_pubkey == addr.script_pubkey()) + .unwrap() + .value, + 25_000 + ); + assert_eq!( + tx.output + .iter() + .find(|txout| txout.script_pubkey != addr.script_pubkey()) + .unwrap() + .value, + details.received + ); + + assert_eq!(details.fee.unwrap_or(0), 200); +} + +#[test] +fn test_bump_fee_reduce_single_recipient() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx(); + builder + .drain_to(addr.script_pubkey()) + .drain_wallet() + .enable_rbf(); + let (psbt, original_details) = builder.finish().unwrap(); + let tx = psbt.extract_tx(); + let txid = tx.txid(); + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder + .fee_rate(FeeRate::from_sat_per_vb(2.5)) + .allow_shrinking(addr.script_pubkey()) + .unwrap(); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!(details.sent, original_details.sent); + assert!(details.fee.unwrap_or(0) > original_details.fee.unwrap_or(0)); + + let tx = &psbt.unsigned_tx; + assert_eq!(tx.output.len(), 1); + assert_eq!(tx.output[0].value + details.fee.unwrap_or(0), details.sent); + + assert_fee_rate!(psbt, details.fee.unwrap_or(0), FeeRate::from_sat_per_vb(2.5), @add_signature); +} + +#[test] +fn test_bump_fee_absolute_reduce_single_recipient() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx(); + builder + .drain_to(addr.script_pubkey()) + .drain_wallet() + .enable_rbf(); + let (psbt, original_details) = builder.finish().unwrap(); + let tx = psbt.extract_tx(); + let txid = tx.txid(); + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder + .allow_shrinking(addr.script_pubkey()) + .unwrap() + .fee_absolute(300); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!(details.sent, original_details.sent); + assert!(details.fee.unwrap_or(0) > original_details.fee.unwrap_or(0)); + + let tx = &psbt.unsigned_tx; + assert_eq!(tx.output.len(), 1); + assert_eq!(tx.output[0].value + details.fee.unwrap_or(0), details.sent); + + assert_eq!(details.fee.unwrap_or(0), 300); +} + +#[test] +fn test_bump_fee_drain_wallet() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + // receive an extra tx so that our wallet has two utxos. + let tx = Transaction { + version: 1, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut { + value: 25_000, + script_pubkey: wallet.get_address(New).script_pubkey(), + }], + }; + wallet + .insert_tx( + tx.clone(), + ConfirmationTime::Confirmed { + height: wallet.latest_checkpoint().unwrap().height, + time: 42_000, + }, + ) + .unwrap(); + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + + let mut builder = wallet.build_tx(); + builder + .drain_to(addr.script_pubkey()) + .add_utxo(OutPoint { + txid: tx.txid(), + vout: 0, + }) + .unwrap() + .manually_selected_only() + .enable_rbf(); + let (psbt, original_details) = builder.finish().unwrap(); + let tx = psbt.extract_tx(); + let txid = tx.txid(); + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + assert_eq!(original_details.sent, 25_000); + + // for the new feerate, it should be enough to reduce the output, but since we specify + // `drain_wallet` we expect to spend everything + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder + .drain_wallet() + .allow_shrinking(addr.script_pubkey()) + .unwrap() + .fee_rate(FeeRate::from_sat_per_vb(5.0)); + let (_, details) = builder.finish().unwrap(); + + assert_eq!(details.sent, 75_000); +} + +#[test] +#[should_panic(expected = "InsufficientFunds")] +fn test_bump_fee_remove_output_manually_selected_only() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + // receive an extra tx so that our wallet has two utxos. then we manually pick only one of + // them, and make sure that `bump_fee` doesn't try to add more. This fails because we've + // told the wallet it's not allowed to add more inputs AND it can't reduce the value of the + // existing output. In other words, bump_fee + manually_selected_only is always an error + // unless you've also set "allow_shrinking" OR there is a change output. + let init_tx = Transaction { + version: 1, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut { + script_pubkey: wallet.get_address(New).script_pubkey(), + value: 25_000, + }], + }; + wallet + .insert_tx(init_tx.clone(), wallet.transactions().last().unwrap().0) + .unwrap(); + let outpoint = OutPoint { + txid: init_tx.txid(), + vout: 0, + }; + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx(); + builder + .drain_to(addr.script_pubkey()) + .add_utxo(outpoint) + .unwrap() + .manually_selected_only() + .enable_rbf(); + let (psbt, original_details) = builder.finish().unwrap(); + let tx = psbt.extract_tx(); + let txid = tx.txid(); + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + assert_eq!(original_details.sent, 25_000); + + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder + .manually_selected_only() + .fee_rate(FeeRate::from_sat_per_vb(255.0)); + builder.finish().unwrap(); +} + +#[test] +fn test_bump_fee_add_input() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let init_tx = Transaction { + version: 1, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut { + script_pubkey: wallet.get_address(New).script_pubkey(), + value: 25_000, + }], + }; + wallet + .insert_tx(init_tx, wallet.transactions().last().unwrap().0) + .unwrap(); + + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx().coin_selection(LargestFirstCoinSelection); + builder + .add_recipient(addr.script_pubkey(), 45_000) + .enable_rbf(); + let (psbt, original_details) = builder.finish().unwrap(); + let tx = psbt.extract_tx(); + let txid = tx.txid(); + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder.fee_rate(FeeRate::from_sat_per_vb(50.0)); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!(details.sent, original_details.sent + 25_000); + assert_eq!(details.fee.unwrap_or(0) + details.received, 30_000); + + let tx = &psbt.unsigned_tx; + assert_eq!(tx.input.len(), 2); + assert_eq!(tx.output.len(), 2); + assert_eq!( + tx.output + .iter() + .find(|txout| txout.script_pubkey == addr.script_pubkey()) + .unwrap() + .value, + 45_000 + ); + assert_eq!( + tx.output + .iter() + .find(|txout| txout.script_pubkey != addr.script_pubkey()) + .unwrap() + .value, + details.received + ); + + assert_fee_rate!(psbt, details.fee.unwrap_or(0), FeeRate::from_sat_per_vb(50.0), @add_signature); +} + +#[test] +fn test_bump_fee_absolute_add_input() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + receive_output_in_latest_block(&mut wallet, 25_000); + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx().coin_selection(LargestFirstCoinSelection); + builder + .add_recipient(addr.script_pubkey(), 45_000) + .enable_rbf(); + let (psbt, original_details) = builder.finish().unwrap(); + let tx = psbt.extract_tx(); + let txid = tx.txid(); + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder.fee_absolute(6_000); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!(details.sent, original_details.sent + 25_000); + assert_eq!(details.fee.unwrap_or(0) + details.received, 30_000); + + let tx = &psbt.unsigned_tx; + assert_eq!(tx.input.len(), 2); + assert_eq!(tx.output.len(), 2); + assert_eq!( + tx.output + .iter() + .find(|txout| txout.script_pubkey == addr.script_pubkey()) + .unwrap() + .value, + 45_000 + ); + assert_eq!( + tx.output + .iter() + .find(|txout| txout.script_pubkey != addr.script_pubkey()) + .unwrap() + .value, + details.received + ); + + assert_eq!(details.fee.unwrap_or(0), 6_000); +} + +#[test] +fn test_bump_fee_no_change_add_input_and_change() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let op = receive_output_in_latest_block(&mut wallet, 25_000); + + // initially make a tx without change by using `drain_to` + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx(); + builder + .drain_to(addr.script_pubkey()) + .add_utxo(op) + .unwrap() + .manually_selected_only() + .enable_rbf(); + let (psbt, original_details) = builder.finish().unwrap(); + + let tx = psbt.extract_tx(); + let txid = tx.txid(); + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + + // now bump the fees without using `allow_shrinking`. the wallet should add an + // extra input and a change output, and leave the original output untouched + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder.fee_rate(FeeRate::from_sat_per_vb(50.0)); + let (psbt, details) = builder.finish().unwrap(); + + let original_send_all_amount = original_details.sent - original_details.fee.unwrap_or(0); + assert_eq!(details.sent, original_details.sent + 50_000); + assert_eq!( + details.received, + 75_000 - original_send_all_amount - details.fee.unwrap_or(0) + ); + + let tx = &psbt.unsigned_tx; + assert_eq!(tx.input.len(), 2); + assert_eq!(tx.output.len(), 2); + assert_eq!( + tx.output + .iter() + .find(|txout| txout.script_pubkey == addr.script_pubkey()) + .unwrap() + .value, + original_send_all_amount + ); + assert_eq!( + tx.output + .iter() + .find(|txout| txout.script_pubkey != addr.script_pubkey()) + .unwrap() + .value, + 75_000 - original_send_all_amount - details.fee.unwrap_or(0) + ); + + assert_fee_rate!(psbt, details.fee.unwrap_or(0), FeeRate::from_sat_per_vb(50.0), @add_signature); +} + +#[test] +fn test_bump_fee_add_input_change_dust() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + receive_output_in_latest_block(&mut wallet, 25_000); + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx().coin_selection(LargestFirstCoinSelection); + builder + .add_recipient(addr.script_pubkey(), 45_000) + .enable_rbf(); + let (psbt, original_details) = builder.finish().unwrap(); + let mut tx = psbt.extract_tx(); + for txin in &mut tx.input { + txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // to get realisitc weight + } + let original_tx_weight = tx.weight(); + assert_eq!(tx.input.len(), 1); + assert_eq!(tx.output.len(), 2); + let txid = tx.txid(); + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + + let mut builder = wallet.build_fee_bump(txid).unwrap(); + // We set a fee high enough that during rbf we are forced to add + // a new input and also that we have to remove the change + // that we had previously + + // We calculate the new weight as: + // original weight + // + extra input weight: 160 WU = (32 (prevout) + 4 (vout) + 4 (nsequence)) * 4 + // + input satisfaction weight: 112 WU = 106 (witness) + 2 (witness len) + (1 (script len)) * 4 + // - change output weight: 124 WU = (8 (value) + 1 (script len) + 22 (script)) * 4 + let new_tx_weight = original_tx_weight + 160 + 112 - 124; + // two inputs (50k, 25k) and one output (45k) - epsilon + // We use epsilon here to avoid asking for a slightly too high feerate + let fee_abs = 50_000 + 25_000 - 45_000 - 10; + builder.fee_rate(FeeRate::from_wu(fee_abs, new_tx_weight)); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!( + original_details.received, + 5_000 - original_details.fee.unwrap_or(0) + ); + + assert_eq!(details.sent, original_details.sent + 25_000); + assert_eq!(details.fee.unwrap_or(0), 30_000); + assert_eq!(details.received, 0); + + let tx = &psbt.unsigned_tx; + assert_eq!(tx.input.len(), 2); + assert_eq!(tx.output.len(), 1); + assert_eq!( + tx.output + .iter() + .find(|txout| txout.script_pubkey == addr.script_pubkey()) + .unwrap() + .value, + 45_000 + ); + + assert_fee_rate!(psbt, details.fee.unwrap_or(0), FeeRate::from_sat_per_vb(140.0), @dust_change, @add_signature); +} + +#[test] +fn test_bump_fee_force_add_input() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let incoming_op = receive_output_in_latest_block(&mut wallet, 25_000); + + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx().coin_selection(LargestFirstCoinSelection); + builder + .add_recipient(addr.script_pubkey(), 45_000) + .enable_rbf(); + let (psbt, original_details) = builder.finish().unwrap(); + let mut tx = psbt.extract_tx(); + let txid = tx.txid(); + for txin in &mut tx.input { + txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature + } + wallet + .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed) + .unwrap(); + // the new fee_rate is low enough that just reducing the change would be fine, but we force + // the addition of an extra input with `add_utxo()` + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder + .add_utxo(incoming_op) + .unwrap() + .fee_rate(FeeRate::from_sat_per_vb(5.0)); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!(details.sent, original_details.sent + 25_000); + assert_eq!(details.fee.unwrap_or(0) + details.received, 30_000); + + let tx = &psbt.unsigned_tx; + assert_eq!(tx.input.len(), 2); + assert_eq!(tx.output.len(), 2); + assert_eq!( + tx.output + .iter() + .find(|txout| txout.script_pubkey == addr.script_pubkey()) + .unwrap() + .value, + 45_000 + ); + assert_eq!( + tx.output + .iter() + .find(|txout| txout.script_pubkey != addr.script_pubkey()) + .unwrap() + .value, + details.received + ); + + assert_fee_rate!(psbt, details.fee.unwrap_or(0), FeeRate::from_sat_per_vb(5.0), @add_signature); +} + +#[test] +fn test_bump_fee_absolute_force_add_input() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let incoming_op = receive_output_in_latest_block(&mut wallet, 25_000); + + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx().coin_selection(LargestFirstCoinSelection); + builder + .add_recipient(addr.script_pubkey(), 45_000) + .enable_rbf(); + let (psbt, original_details) = builder.finish().unwrap(); + let mut tx = psbt.extract_tx(); + let txid = tx.txid(); + // skip saving the new utxos, we know they can't be used anyways + for txin in &mut tx.input { + txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature + } + wallet + .insert_tx(tx.clone(), ConfirmationTime::Unconfirmed) + .unwrap(); + + // the new fee_rate is low enough that just reducing the change would be fine, but we force + // the addition of an extra input with `add_utxo()` + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder.add_utxo(incoming_op).unwrap().fee_absolute(250); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!(details.sent, original_details.sent + 25_000); + assert_eq!(details.fee.unwrap_or(0) + details.received, 30_000); + + let tx = &psbt.unsigned_tx; + assert_eq!(tx.input.len(), 2); + assert_eq!(tx.output.len(), 2); + assert_eq!( + tx.output + .iter() + .find(|txout| txout.script_pubkey == addr.script_pubkey()) + .unwrap() + .value, + 45_000 + ); + assert_eq!( + tx.output + .iter() + .find(|txout| txout.script_pubkey != addr.script_pubkey()) + .unwrap() + .value, + details.received + ); + + assert_eq!(details.fee.unwrap_or(0), 250); +} + +#[test] +#[should_panic(expected = "InsufficientFunds")] +fn test_bump_fee_unconfirmed_inputs_only() { + // We try to bump the fee, but: + // - We can't reduce the change, as we have no change + // - All our UTXOs are unconfirmed + // So, we fail with "InsufficientFunds", as per RBF rule 2: + // The replacement transaction may only include an unconfirmed input + // if that input was included in one of the original transactions. + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx(); + builder + .drain_wallet() + .drain_to(addr.script_pubkey()) + .enable_rbf(); + let (psbt, __details) = builder.finish().unwrap(); + // Now we receive one transaction with 0 confirmations. We won't be able to use that for + // fee bumping, as it's still unconfirmed! + receive_output(&mut wallet, 25_000, TxHeight::Unconfirmed); + let mut tx = psbt.extract_tx(); + let txid = tx.txid(); + for txin in &mut tx.input { + txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature + } + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder.fee_rate(FeeRate::from_sat_per_vb(25.0)); + builder.finish().unwrap(); +} + +#[test] +fn test_bump_fee_unconfirmed_input() { + // We create a tx draining the wallet and spending one confirmed + // and one unconfirmed UTXO. We check that we can fee bump normally + // (BIP125 rule 2 only apply to newly added unconfirmed input, you can + // always fee bump with an unconfirmed input if it was included in the + // original transaction) + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + // We receive a tx with 0 confirmations, which will be used as an input + // in the drain tx. + receive_output(&mut wallet, 25_000, TxHeight::Unconfirmed); + let mut builder = wallet.build_tx(); + builder + .drain_wallet() + .drain_to(addr.script_pubkey()) + .enable_rbf(); + let (psbt, _) = builder.finish().unwrap(); + let mut tx = psbt.extract_tx(); + let txid = tx.txid(); + for txin in &mut tx.input { + txin.witness.push([0x00; P2WPKH_FAKE_WITNESS_SIZE]); // fake signature + } + wallet.insert_tx(tx, ConfirmationTime::Unconfirmed).unwrap(); + + let mut builder = wallet.build_fee_bump(txid).unwrap(); + builder + .fee_rate(FeeRate::from_sat_per_vb(15.0)) + .allow_shrinking(addr.script_pubkey()) + .unwrap(); + builder.finish().unwrap(); +} + +#[test] +fn test_fee_amount_negative_drain_val() { + // While building the transaction, bdk would calculate the drain_value + // as + // current_delta - fee_amount - drain_fee + // using saturating_sub, meaning that if the result would end up negative, + // it'll remain to zero instead. + // This caused a bug in master where we would calculate the wrong fee + // for a transaction. + // See https://github.com/bitcoindevkit/bdk/issues/660 + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let send_to = Address::from_str("tb1ql7w62elx9ucw4pj5lgw4l028hmuw80sndtntxt").unwrap(); + let fee_rate = FeeRate::from_sat_per_vb(2.01); + let incoming_op = receive_output_in_latest_block(&mut wallet, 8859); + + let mut builder = wallet.build_tx(); + builder + .add_recipient(send_to.script_pubkey(), 8630) + .add_utxo(incoming_op) + .unwrap() + .enable_rbf() + .fee_rate(fee_rate); + let (psbt, details) = builder.finish().unwrap(); + + assert!(psbt.inputs.len() == 1); + assert_fee_rate!(psbt, details.fee.unwrap_or(0), fee_rate, @add_signature); +} + +#[test] +fn test_sign_single_xprv() { + let (mut wallet, _) = get_funded_wallet("wpkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (mut psbt, _) = builder.finish().unwrap(); + + let finalized = wallet.sign(&mut psbt, Default::default()).unwrap(); + assert!(finalized); + + let extracted = psbt.extract_tx(); + assert_eq!(extracted.input[0].witness.len(), 2); +} + +#[test] +fn test_sign_single_xprv_with_master_fingerprint_and_path() { + let (mut wallet, _) = get_funded_wallet("wpkh([d34db33f/84h/1h/0h]tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (mut psbt, _) = builder.finish().unwrap(); + + let finalized = wallet.sign(&mut psbt, Default::default()).unwrap(); + assert!(finalized); + + let extracted = psbt.extract_tx(); + assert_eq!(extracted.input[0].witness.len(), 2); +} + +#[test] +fn test_sign_single_xprv_bip44_path() { + let (mut wallet, _) = get_funded_wallet("wpkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/44'/0'/0'/0/*)"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (mut psbt, _) = builder.finish().unwrap(); + + let finalized = wallet.sign(&mut psbt, Default::default()).unwrap(); + assert!(finalized); + + let extracted = psbt.extract_tx(); + assert_eq!(extracted.input[0].witness.len(), 2); +} + +#[test] +fn test_sign_single_xprv_sh_wpkh() { + let (mut wallet, _) = get_funded_wallet("sh(wpkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*))"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (mut psbt, _) = builder.finish().unwrap(); + + let finalized = wallet.sign(&mut psbt, Default::default()).unwrap(); + assert!(finalized); + + let extracted = psbt.extract_tx(); + assert_eq!(extracted.input[0].witness.len(), 2); +} + +#[test] +fn test_sign_single_wif() { + let (mut wallet, _) = + get_funded_wallet("wpkh(cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW)"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (mut psbt, _) = builder.finish().unwrap(); + + let finalized = wallet.sign(&mut psbt, Default::default()).unwrap(); + assert!(finalized); + + let extracted = psbt.extract_tx(); + assert_eq!(extracted.input[0].witness.len(), 2); +} + +#[test] +fn test_sign_single_xprv_no_hd_keypaths() { + let (mut wallet, _) = get_funded_wallet("wpkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (mut psbt, _) = builder.finish().unwrap(); + + psbt.inputs[0].bip32_derivation.clear(); + assert_eq!(psbt.inputs[0].bip32_derivation.len(), 0); + + let finalized = wallet.sign(&mut psbt, Default::default()).unwrap(); + assert!(finalized); + + let extracted = psbt.extract_tx(); + assert_eq!(extracted.input[0].witness.len(), 2); +} + +#[test] +fn test_include_output_redeem_witness_script() { + let (mut wallet, _) = get_funded_wallet("sh(wsh(multi(1,cVpPVruEDdmutPzisEsYvtST1usBR3ntr8pXSyt6D2YYqXRyPcFW,cRjo6jqfVNP33HhSS76UhXETZsGTZYx8FMFvR9kpbtCSV1PmdZdu)))"); + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 45_000) + .include_output_redeem_witness_script(); + let (psbt, _) = builder.finish().unwrap(); + + // p2sh-p2wsh transaction should contain both witness and redeem scripts + assert!(psbt + .outputs + .iter() + .any(|output| output.redeem_script.is_some() && output.witness_script.is_some())); +} + +#[test] +fn test_signing_only_one_of_multiple_inputs() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 45_000) + .include_output_redeem_witness_script(); + let (mut psbt, _) = builder.finish().unwrap(); + + // add another input to the psbt that is at least passable. + let dud_input = bitcoin::util::psbt::Input { + witness_utxo: Some(TxOut { + value: 100_000, + script_pubkey: miniscript::Descriptor::::from_str( + "wpkh(025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee6357)", + ) + .unwrap() + .script_pubkey(), + }), + ..Default::default() + }; + + psbt.inputs.push(dud_input); + psbt.unsigned_tx.input.push(bitcoin::TxIn::default()); + let is_final = wallet + .sign( + &mut psbt, + SignOptions { + trust_witness_utxo: true, + ..Default::default() + }, + ) + .unwrap(); + assert!( + !is_final, + "shouldn't be final since we can't sign one of the inputs" + ); + assert!( + psbt.inputs[0].final_script_witness.is_some(), + "should finalized input it signed" + ) +} + +#[test] +fn test_remove_partial_sigs_after_finalize_sign_option() { + let (mut wallet, _) = get_funded_wallet("wpkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)"); + + for remove_partial_sigs in &[true, false] { + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let mut psbt = builder.finish().unwrap().0; + + assert!(wallet + .sign( + &mut psbt, + SignOptions { + remove_partial_sigs: *remove_partial_sigs, + ..Default::default() + }, + ) + .unwrap()); + + psbt.inputs.iter().for_each(|input| { + if *remove_partial_sigs { + assert!(input.partial_sigs.is_empty()) + } else { + assert!(!input.partial_sigs.is_empty()) + } + }); + } +} + +#[test] +fn test_try_finalize_sign_option() { + let (mut wallet, _) = get_funded_wallet("wpkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)"); + + for try_finalize in &[true, false] { + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let mut psbt = builder.finish().unwrap().0; + + let finalized = wallet + .sign( + &mut psbt, + SignOptions { + try_finalize: *try_finalize, + ..Default::default() + }, + ) + .unwrap(); + + psbt.inputs.iter().for_each(|input| { + if *try_finalize { + assert!(finalized); + assert!(input.final_script_sig.is_some()); + assert!(input.final_script_witness.is_some()); + } else { + assert!(!finalized); + assert!(input.final_script_sig.is_none()); + assert!(input.final_script_witness.is_none()); + } + }); + } +} + +#[test] +fn test_sign_nonstandard_sighash() { + let sighash = EcdsaSighashType::NonePlusAnyoneCanPay; + + let (mut wallet, _) = get_funded_wallet("wpkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)"); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .drain_to(addr.script_pubkey()) + .sighash(sighash.into()) + .drain_wallet(); + let (mut psbt, _) = builder.finish().unwrap(); + + let result = wallet.sign(&mut psbt, Default::default()); + assert!( + result.is_err(), + "Signing should have failed because the TX uses non-standard sighashes" + ); + assert_matches!( + result, + Err(bdk::Error::Signer(SignerError::NonStandardSighash)), + "Signing failed with the wrong error type" + ); + + // try again after opting-in + let result = wallet.sign( + &mut psbt, + SignOptions { + allow_all_sighashes: true, + ..Default::default() + }, + ); + assert!(result.is_ok(), "Signing should have worked"); + assert!( + result.unwrap(), + "Should finalize the input since we can produce signatures" + ); + + let extracted = psbt.extract_tx(); + assert_eq!( + *extracted.input[0].witness.to_vec()[0].last().unwrap(), + sighash.to_u32() as u8, + "The signature should have been made with the right sighash" + ); +} + +#[test] +fn test_unused_address() { + let mut wallet = Wallet::new_no_persist("wpkh(tpubEBr4i6yk5nf5DAaJpsi9N2pPYBeJ7fZ5Z9rmN4977iYLCGco1VyjB9tvvuvYtfZzjD5A8igzgw3HeWeeKFmanHYqksqZXYXGsw5zjnj7KM9/*)", + None, Network::Testnet).unwrap(); + + assert_eq!( + wallet.get_address(LastUnused).to_string(), + "tb1q6yn66vajcctph75pvylgkksgpp6nq04ppwct9a" + ); + assert_eq!( + wallet.get_address(LastUnused).to_string(), + "tb1q6yn66vajcctph75pvylgkksgpp6nq04ppwct9a" + ); +} + +#[test] +fn test_next_unused_address() { + let descriptor = "wpkh(tpubEBr4i6yk5nf5DAaJpsi9N2pPYBeJ7fZ5Z9rmN4977iYLCGco1VyjB9tvvuvYtfZzjD5A8igzgw3HeWeeKFmanHYqksqZXYXGsw5zjnj7KM9/*)"; + let mut wallet = Wallet::new_no_persist(descriptor, None, Network::Testnet).unwrap(); + assert_eq!(wallet.derivation_index(KeychainKind::External), None); + + assert_eq!( + wallet.get_address(LastUnused).to_string(), + "tb1q6yn66vajcctph75pvylgkksgpp6nq04ppwct9a" + ); + assert_eq!(wallet.derivation_index(KeychainKind::External), Some(0)); + assert_eq!( + wallet.get_address(LastUnused).to_string(), + "tb1q6yn66vajcctph75pvylgkksgpp6nq04ppwct9a" + ); + assert_eq!(wallet.derivation_index(KeychainKind::External), Some(0)); + + // use the above address + receive_output_in_latest_block(&mut wallet, 25_000); + + assert_eq!( + wallet.get_address(LastUnused).to_string(), + "tb1q4er7kxx6sssz3q7qp7zsqsdx4erceahhax77d7" + ); + assert_eq!(wallet.derivation_index(KeychainKind::External), Some(1)); +} + +#[test] +fn test_peek_address_at_index() { + let mut wallet = Wallet::new_no_persist("wpkh(tpubEBr4i6yk5nf5DAaJpsi9N2pPYBeJ7fZ5Z9rmN4977iYLCGco1VyjB9tvvuvYtfZzjD5A8igzgw3HeWeeKFmanHYqksqZXYXGsw5zjnj7KM9/*)", + None, Network::Testnet).unwrap(); + + assert_eq!( + wallet.get_address(Peek(1)).to_string(), + "tb1q4er7kxx6sssz3q7qp7zsqsdx4erceahhax77d7" + ); + + assert_eq!( + wallet.get_address(Peek(0)).to_string(), + "tb1q6yn66vajcctph75pvylgkksgpp6nq04ppwct9a" + ); + + assert_eq!( + wallet.get_address(Peek(2)).to_string(), + "tb1qzntf2mqex4ehwkjlfdyy3ewdlk08qkvkvrz7x2" + ); + + // current new address is not affected + assert_eq!( + wallet.get_address(New).to_string(), + "tb1q6yn66vajcctph75pvylgkksgpp6nq04ppwct9a" + ); + + assert_eq!( + wallet.get_address(New).to_string(), + "tb1q4er7kxx6sssz3q7qp7zsqsdx4erceahhax77d7" + ); +} + +#[test] +fn test_peek_address_at_index_not_derivable() { + let mut wallet = Wallet::new_no_persist("wpkh(tpubEBr4i6yk5nf5DAaJpsi9N2pPYBeJ7fZ5Z9rmN4977iYLCGco1VyjB9tvvuvYtfZzjD5A8igzgw3HeWeeKFmanHYqksqZXYXGsw5zjnj7KM9/1)", + None, Network::Testnet).unwrap(); + + assert_eq!( + wallet.get_address(Peek(1)).to_string(), + "tb1q4er7kxx6sssz3q7qp7zsqsdx4erceahhax77d7" + ); + + assert_eq!( + wallet.get_address(Peek(0)).to_string(), + "tb1q4er7kxx6sssz3q7qp7zsqsdx4erceahhax77d7" + ); + + assert_eq!( + wallet.get_address(Peek(2)).to_string(), + "tb1q4er7kxx6sssz3q7qp7zsqsdx4erceahhax77d7" + ); +} + +#[test] +fn test_returns_index_and_address() { + let mut wallet = Wallet::new_no_persist("wpkh(tpubEBr4i6yk5nf5DAaJpsi9N2pPYBeJ7fZ5Z9rmN4977iYLCGco1VyjB9tvvuvYtfZzjD5A8igzgw3HeWeeKFmanHYqksqZXYXGsw5zjnj7KM9/*)", + None, Network::Testnet).unwrap(); + + // new index 0 + assert_eq!( + wallet.get_address(New), + AddressInfo { + index: 0, + address: Address::from_str("tb1q6yn66vajcctph75pvylgkksgpp6nq04ppwct9a").unwrap(), + keychain: KeychainKind::External, + } + ); + + // new index 1 + assert_eq!( + wallet.get_address(New), + AddressInfo { + index: 1, + address: Address::from_str("tb1q4er7kxx6sssz3q7qp7zsqsdx4erceahhax77d7").unwrap(), + keychain: KeychainKind::External, + } + ); + + // peek index 25 + assert_eq!( + wallet.get_address(Peek(25)), + AddressInfo { + index: 25, + address: Address::from_str("tb1qsp7qu0knx3sl6536dzs0703u2w2ag6ppl9d0c2").unwrap(), + keychain: KeychainKind::External, + } + ); + + // new index 2 + assert_eq!( + wallet.get_address(New), + AddressInfo { + index: 2, + address: Address::from_str("tb1qzntf2mqex4ehwkjlfdyy3ewdlk08qkvkvrz7x2").unwrap(), + keychain: KeychainKind::External, + } + ); +} + +#[test] +fn test_sending_to_bip350_bech32m_address() { + let (mut wallet, _) = get_funded_wallet(get_test_wpkh()); + let addr = Address::from_str("tb1pqqqqp399et2xygdj5xreqhjjvcmzhxw4aywxecjdzew6hylgvsesf3hn0c") + .unwrap(); + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 45_000); + builder.finish().unwrap(); +} + +#[test] +fn test_get_address() { + use bdk::descriptor::template::Bip84; + let key = bitcoin::util::bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap(); + let mut wallet = Wallet::new_no_persist( + Bip84(key, KeychainKind::External), + Some(Bip84(key, KeychainKind::Internal)), + Network::Regtest, + ) + .unwrap(); + + assert_eq!( + wallet.get_address(AddressIndex::New), + AddressInfo { + index: 0, + address: Address::from_str("bcrt1qrhgaqu0zvf5q2d0gwwz04w0dh0cuehhqvzpp4w").unwrap(), + keychain: KeychainKind::External, + } + ); + + assert_eq!( + wallet.get_internal_address(AddressIndex::New), + AddressInfo { + index: 0, + address: Address::from_str("bcrt1q0ue3s5y935tw7v3gmnh36c5zzsaw4n9c9smq79").unwrap(), + keychain: KeychainKind::Internal, + } + ); + + let mut wallet = + Wallet::new_no_persist(Bip84(key, KeychainKind::External), None, Network::Regtest).unwrap(); + + assert_eq!( + wallet.get_internal_address(AddressIndex::New), + AddressInfo { + index: 0, + address: Address::from_str("bcrt1qrhgaqu0zvf5q2d0gwwz04w0dh0cuehhqvzpp4w").unwrap(), + keychain: KeychainKind::External, + }, + "when there's no internal descriptor it should just use external" + ); +} + +#[test] +fn test_get_address_no_reuse_single_descriptor() { + use bdk::descriptor::template::Bip84; + use std::collections::HashSet; + + let key = bitcoin::util::bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap(); + let mut wallet = + Wallet::new_no_persist(Bip84(key, KeychainKind::External), None, Network::Regtest).unwrap(); + + let mut used_set = HashSet::new(); + + (0..3).for_each(|_| { + let external_addr = wallet.get_address(AddressIndex::New).address; + assert!(used_set.insert(external_addr)); + + let internal_addr = wallet.get_internal_address(AddressIndex::New).address; + assert!(used_set.insert(internal_addr)); + }); +} + +#[test] +fn test_taproot_psbt_populate_tap_key_origins() { + let (mut wallet, _) = get_funded_wallet(get_test_tr_single_sig_xprv()); + let addr = wallet.get_address(AddressIndex::New); + + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!( + psbt.inputs[0] + .tap_key_origins + .clone() + .into_iter() + .collect::>(), + vec![( + from_str!("b96d3a3dc76a4fc74e976511b23aecb78e0754c23c0ed7a6513e18cbbc7178e9"), + (vec![], (from_str!("f6a5cb8b"), from_str!("m/0"))) + )], + "Wrong input tap_key_origins" + ); + assert_eq!( + psbt.outputs[0] + .tap_key_origins + .clone() + .into_iter() + .collect::>(), + vec![( + from_str!("e9b03068cf4a2621d4f81e68f6c4216e6bd260fe6edf6acc55c8d8ae5aeff0a8"), + (vec![], (from_str!("f6a5cb8b"), from_str!("m/1"))) + )], + "Wrong output tap_key_origins" + ); +} + +#[test] +fn test_taproot_psbt_populate_tap_key_origins_repeated_key() { + let (mut wallet, _) = get_funded_wallet(get_test_tr_repeated_key()); + let addr = wallet.get_address(AddressIndex::New); + + let path = vec![("e5mmg3xh".to_string(), vec![0])] + .into_iter() + .collect(); + + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 25_000) + .policy_path(path, KeychainKind::External); + let (psbt, _) = builder.finish().unwrap(); + + let mut input_key_origins = psbt.inputs[0] + .tap_key_origins + .clone() + .into_iter() + .collect::>(); + input_key_origins.sort(); + + assert_eq!( + input_key_origins, + vec![ + ( + from_str!("b511bd5771e47ee27558b1765e87b541668304ec567721c7b880edc0a010da55"), + ( + vec![], + (FromStr::from_str("871fd295").unwrap(), vec![].into()) + ) + ), + ( + from_str!("2b0558078bec38694a84933d659303e2575dae7e91685911454115bfd64487e3"), + ( + vec![ + from_str!( + "858ad7a7d7f270e2c490c4d6ba00c499e46b18fdd59ea3c2c47d20347110271e" + ), + from_str!( + "f6e927ad4492c051fe325894a4f5f14538333b55a35f099876be42009ec8f903" + ), + ], + (FromStr::from_str("ece52657").unwrap(), vec![].into()) + ) + ) + ], + "Wrong input tap_key_origins" + ); + + let mut output_key_origins = psbt.outputs[0] + .tap_key_origins + .clone() + .into_iter() + .collect::>(); + output_key_origins.sort(); + + assert_eq!( + input_key_origins, output_key_origins, + "Wrong output tap_key_origins" + ); +} + +#[test] +fn test_taproot_psbt_input_tap_tree() { + use bdk::bitcoin::psbt::serialize::Deserialize; + use bdk::bitcoin::psbt::TapTree; + use bitcoin::hashes::hex::FromHex; + use bitcoin::util::taproot; + + let (mut wallet, _) = get_funded_wallet(get_test_tr_with_taptree()); + let addr = wallet.get_address(AddressIndex::Peek(0)); + + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (psbt, _) = builder.finish().unwrap(); + + assert_eq!( + psbt.inputs[0].tap_merkle_root, + Some( + FromHex::from_hex("61f81509635053e52d9d1217545916167394490da2287aca4693606e43851986") + .unwrap() + ), + ); + assert_eq!( + psbt.inputs[0].tap_scripts.clone().into_iter().collect::>(), + vec![ + (taproot::ControlBlock::from_slice(&Vec::::from_hex("c0b511bd5771e47ee27558b1765e87b541668304ec567721c7b880edc0a010da55b7ef769a745e625ed4b9a4982a4dc08274c59187e73e6f07171108f455081cb2").unwrap()).unwrap(), (from_str!("208aee2b8120a5f157f1223f72b5e62b825831a27a9fdf427db7cc697494d4a642ac"), taproot::LeafVersion::TapScript)), + (taproot::ControlBlock::from_slice(&Vec::::from_hex("c0b511bd5771e47ee27558b1765e87b541668304ec567721c7b880edc0a010da55b9a515f7be31a70186e3c5937ee4a70cc4b4e1efe876c1d38e408222ffc64834").unwrap()).unwrap(), (from_str!("2051494dc22e24a32fe9dcfbd7e85faf345fa1df296fb49d156e859ef345201295ac"), taproot::LeafVersion::TapScript)), + ], + ); + assert_eq!( + psbt.inputs[0].tap_internal_key, + Some(from_str!( + "b511bd5771e47ee27558b1765e87b541668304ec567721c7b880edc0a010da55" + )) + ); + + // Since we are creating an output to the same address as the input, assert that the + // internal_key is the same + assert_eq!( + psbt.inputs[0].tap_internal_key, + psbt.outputs[0].tap_internal_key + ); + + assert_eq!( + psbt.outputs[0].tap_tree, + Some(TapTree::deserialize(&Vec::::from_hex("01c022208aee2b8120a5f157f1223f72b5e62b825831a27a9fdf427db7cc697494d4a642ac01c0222051494dc22e24a32fe9dcfbd7e85faf345fa1df296fb49d156e859ef345201295ac",).unwrap()).unwrap()) + ); +} + +#[test] +fn test_taproot_sign_missing_witness_utxo() { + let (mut wallet, _) = get_funded_wallet(get_test_tr_single_sig()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (mut psbt, _) = builder.finish().unwrap(); + let witness_utxo = psbt.inputs[0].witness_utxo.take(); + + let result = wallet.sign( + &mut psbt, + SignOptions { + allow_all_sighashes: true, + ..Default::default() + }, + ); + assert_matches!( + result, + Err(Error::Signer(SignerError::MissingWitnessUtxo)), + "Signing should have failed with the correct error because the witness_utxo is missing" + ); + + // restore the witness_utxo + psbt.inputs[0].witness_utxo = witness_utxo; + + let result = wallet.sign( + &mut psbt, + SignOptions { + allow_all_sighashes: true, + ..Default::default() + }, + ); + + assert_matches!( + result, + Ok(true), + "Should finalize the input since we can produce signatures" + ); +} + +#[test] +fn test_taproot_sign_using_non_witness_utxo() { + let (mut wallet, prev_txid) = get_funded_wallet(get_test_tr_single_sig()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder.drain_to(addr.script_pubkey()).drain_wallet(); + let (mut psbt, _) = builder.finish().unwrap(); + + psbt.inputs[0].witness_utxo = None; + psbt.inputs[0].non_witness_utxo = wallet.get_tx(prev_txid, true).unwrap().transaction; + assert!( + psbt.inputs[0].non_witness_utxo.is_some(), + "Previous tx should be present in the database" + ); + + let result = wallet.sign(&mut psbt, Default::default()); + assert!(result.is_ok(), "Signing should have worked"); + assert!( + result.unwrap(), + "Should finalize the input since we can produce signatures" + ); +} + +#[test] +fn test_taproot_foreign_utxo() { + let (mut wallet1, _) = get_funded_wallet(get_test_wpkh()); + let (wallet2, _) = get_funded_wallet(get_test_tr_single_sig()); + + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let utxo = wallet2.list_unspent().remove(0); + let psbt_input = wallet2.get_psbt_input(utxo.clone(), None, false).unwrap(); + let foreign_utxo_satisfaction = wallet2 + .get_descriptor_for_keychain(KeychainKind::External) + .max_satisfaction_weight() + .unwrap(); + + assert!( + psbt_input.non_witness_utxo.is_none(), + "`non_witness_utxo` should never be populated for taproot" + ); + + let mut builder = wallet1.build_tx(); + builder + .add_recipient(addr.script_pubkey(), 60_000) + .add_foreign_utxo(utxo.outpoint, psbt_input, foreign_utxo_satisfaction) + .unwrap(); + let (psbt, details) = builder.finish().unwrap(); + + assert_eq!( + details.sent - details.received, + 10_000 + details.fee.unwrap_or(0), + "we should have only net spent ~10_000" + ); + + assert!( + psbt.unsigned_tx + .input + .iter() + .any(|input| input.previous_output == utxo.outpoint), + "foreign_utxo should be in there" + ); +} + +fn test_spend_from_wallet(mut wallet: Wallet) { + let addr = wallet.get_address(AddressIndex::New); + + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (mut psbt, _) = builder.finish().unwrap(); + + assert!( + wallet.sign(&mut psbt, Default::default()).unwrap(), + "Unable to finalize tx" + ); +} + +// #[test] +// fn test_taproot_key_spend() { +// let (mut wallet, _) = get_funded_wallet(get_test_tr_single_sig()); +// test_spend_from_wallet(wallet); + +// let (mut wallet, _) = get_funded_wallet(get_test_tr_single_sig_xprv()); +// test_spend_from_wallet(wallet); +// } + +#[test] +fn test_taproot_no_key_spend() { + let (mut wallet, _) = get_funded_wallet(get_test_tr_with_taptree_both_priv()); + let addr = wallet.get_address(AddressIndex::New); + + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (mut psbt, _) = builder.finish().unwrap(); + + assert!( + wallet + .sign( + &mut psbt, + SignOptions { + sign_with_tap_internal_key: false, + ..Default::default() + }, + ) + .unwrap(), + "Unable to finalize tx" + ); + + assert!(psbt.inputs.iter().all(|i| i.tap_key_sig.is_none())); +} + +#[test] +fn test_taproot_script_spend() { + let (wallet, _) = get_funded_wallet(get_test_tr_with_taptree()); + test_spend_from_wallet(wallet); + + let (wallet, _) = get_funded_wallet(get_test_tr_with_taptree_xprv()); + test_spend_from_wallet(wallet); +} + +#[test] +fn test_taproot_script_spend_sign_all_leaves() { + use bdk::signer::TapLeavesOptions; + let (mut wallet, _) = get_funded_wallet(get_test_tr_with_taptree_both_priv()); + let addr = wallet.get_address(AddressIndex::New); + + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (mut psbt, _) = builder.finish().unwrap(); + + assert!( + wallet + .sign( + &mut psbt, + SignOptions { + tap_leaves_options: TapLeavesOptions::All, + ..Default::default() + }, + ) + .unwrap(), + "Unable to finalize tx" + ); + + assert!(psbt + .inputs + .iter() + .all(|i| i.tap_script_sigs.len() == i.tap_scripts.len())); +} + +#[test] +fn test_taproot_script_spend_sign_include_some_leaves() { + use bdk::signer::TapLeavesOptions; + use bitcoin::util::taproot::TapLeafHash; + + let (mut wallet, _) = get_funded_wallet(get_test_tr_with_taptree_both_priv()); + let addr = wallet.get_address(AddressIndex::New); + + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (mut psbt, _) = builder.finish().unwrap(); + let mut script_leaves: Vec<_> = psbt.inputs[0] + .tap_scripts + .clone() + .values() + .map(|(script, version)| TapLeafHash::from_script(script, *version)) + .collect(); + let included_script_leaves = vec![script_leaves.pop().unwrap()]; + let excluded_script_leaves = script_leaves; + + assert!( + wallet + .sign( + &mut psbt, + SignOptions { + tap_leaves_options: TapLeavesOptions::Include(included_script_leaves.clone()), + ..Default::default() + }, + ) + .unwrap(), + "Unable to finalize tx" + ); + + assert!(psbt.inputs[0] + .tap_script_sigs + .iter() + .all(|s| included_script_leaves.contains(&s.0 .1) + && !excluded_script_leaves.contains(&s.0 .1))); +} + +#[test] +fn test_taproot_script_spend_sign_exclude_some_leaves() { + use bdk::signer::TapLeavesOptions; + use bitcoin::util::taproot::TapLeafHash; + + let (mut wallet, _) = get_funded_wallet(get_test_tr_with_taptree_both_priv()); + let addr = wallet.get_address(AddressIndex::New); + + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (mut psbt, _) = builder.finish().unwrap(); + let mut script_leaves: Vec<_> = psbt.inputs[0] + .tap_scripts + .clone() + .values() + .map(|(script, version)| TapLeafHash::from_script(script, *version)) + .collect(); + let included_script_leaves = vec![script_leaves.pop().unwrap()]; + let excluded_script_leaves = script_leaves; + + assert!( + wallet + .sign( + &mut psbt, + SignOptions { + tap_leaves_options: TapLeavesOptions::Exclude(excluded_script_leaves.clone()), + ..Default::default() + }, + ) + .unwrap(), + "Unable to finalize tx" + ); + + assert!(psbt.inputs[0] + .tap_script_sigs + .iter() + .all(|s| included_script_leaves.contains(&s.0 .1) + && !excluded_script_leaves.contains(&s.0 .1))); +} + +#[test] +fn test_taproot_script_spend_sign_no_leaves() { + use bdk::signer::TapLeavesOptions; + let (mut wallet, _) = get_funded_wallet(get_test_tr_with_taptree_both_priv()); + let addr = wallet.get_address(AddressIndex::New); + + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (mut psbt, _) = builder.finish().unwrap(); + + wallet + .sign( + &mut psbt, + SignOptions { + tap_leaves_options: TapLeavesOptions::None, + ..Default::default() + }, + ) + .unwrap(); + + assert!(psbt.inputs.iter().all(|i| i.tap_script_sigs.is_empty())); +} + +#[test] +fn test_taproot_sign_derive_index_from_psbt() { + let (mut wallet, _) = get_funded_wallet(get_test_tr_single_sig_xprv()); + + let addr = wallet.get_address(AddressIndex::New); + + let mut builder = wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 25_000); + let (mut psbt, _) = builder.finish().unwrap(); + + // re-create the wallet with an empty db + let wallet_empty = + Wallet::new_no_persist(get_test_tr_single_sig_xprv(), None, Network::Regtest).unwrap(); + + // signing with an empty db means that we will only look at the psbt to infer the + // derivation index + assert!( + wallet_empty.sign(&mut psbt, Default::default()).unwrap(), + "Unable to finalize tx" + ); +} + +#[test] +fn test_taproot_sign_explicit_sighash_all() { + let (mut wallet, _) = get_funded_wallet(get_test_tr_single_sig()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .drain_to(addr.script_pubkey()) + .sighash(SchnorrSighashType::All.into()) + .drain_wallet(); + let (mut psbt, _) = builder.finish().unwrap(); + + let result = wallet.sign(&mut psbt, Default::default()); + assert!( + result.is_ok(), + "Signing should work because SIGHASH_ALL is safe" + ) +} + +#[test] +fn test_taproot_sign_non_default_sighash() { + let sighash = SchnorrSighashType::NonePlusAnyoneCanPay; + + let (mut wallet, _) = get_funded_wallet(get_test_tr_single_sig()); + let addr = wallet.get_address(New); + let mut builder = wallet.build_tx(); + builder + .drain_to(addr.script_pubkey()) + .sighash(sighash.into()) + .drain_wallet(); + let (mut psbt, _) = builder.finish().unwrap(); + + let witness_utxo = psbt.inputs[0].witness_utxo.take(); + + let result = wallet.sign(&mut psbt, Default::default()); + assert!( + result.is_err(), + "Signing should have failed because the TX uses non-standard sighashes" + ); + assert_matches!( + result, + Err(Error::Signer(SignerError::NonStandardSighash)), + "Signing failed with the wrong error type" + ); + + // try again after opting-in + let result = wallet.sign( + &mut psbt, + SignOptions { + allow_all_sighashes: true, + ..Default::default() + }, + ); + assert!( + result.is_err(), + "Signing should have failed because the witness_utxo is missing" + ); + assert_matches!( + result, + Err(Error::Signer(SignerError::MissingWitnessUtxo)), + "Signing failed with the wrong error type" + ); + + // restore the witness_utxo + psbt.inputs[0].witness_utxo = witness_utxo; + + let result = wallet.sign( + &mut psbt, + SignOptions { + allow_all_sighashes: true, + ..Default::default() + }, + ); + + assert!(result.is_ok(), "Signing should have worked"); + assert!( + result.unwrap(), + "Should finalize the input since we can produce signatures" + ); + + let extracted = psbt.extract_tx(); + assert_eq!( + *extracted.input[0].witness.to_vec()[0].last().unwrap(), + sighash as u8, + "The signature should have been made with the right sighash" + ); +} + +#[test] +fn test_spend_coinbase() { + let descriptor = get_test_wpkh(); + let mut wallet = Wallet::new_no_persist(descriptor, None, Network::Regtest).unwrap(); + + let confirmation_height = 5; + wallet + .insert_checkpoint(BlockId { + height: confirmation_height, + hash: BlockHash::all_zeros(), + }) + .unwrap(); + let coinbase_tx = Transaction { + version: 1, + lock_time: bitcoin::PackedLockTime(0), + input: vec![TxIn { + previous_output: OutPoint::null(), + ..Default::default() + }], + output: vec![TxOut { + value: 25_000, + script_pubkey: wallet.get_address(New).address.script_pubkey(), + }], + }; + wallet + .insert_tx( + coinbase_tx, + ConfirmationTime::Confirmed { + height: confirmation_height, + time: 30_000, + }, + ) + .unwrap(); + + let not_yet_mature_time = confirmation_height + COINBASE_MATURITY - 1; + let maturity_time = confirmation_height + COINBASE_MATURITY; + + let balance = wallet.get_balance(); + assert_eq!( + balance, + Balance { + immature: 25_000, + trusted_pending: 0, + untrusted_pending: 0, + confirmed: 0 + } + ); + + // We try to create a transaction, only to notice that all + // our funds are unspendable + let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap(); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), balance.immature / 2) + .current_height(confirmation_height); + assert!(matches!( + builder.finish(), + Err(Error::InsufficientFunds { + needed: _, + available: 0 + }) + )); + + // Still unspendable... + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), balance.immature / 2) + .current_height(not_yet_mature_time); + assert_matches!( + builder.finish(), + Err(Error::InsufficientFunds { + needed: _, + available: 0 + }) + ); + + wallet + .insert_checkpoint(BlockId { + height: maturity_time, + hash: BlockHash::all_zeros(), + }) + .unwrap(); + let balance = wallet.get_balance(); + assert_eq!( + balance, + Balance { + immature: 0, + trusted_pending: 0, + untrusted_pending: 0, + confirmed: 25_000 + } + ); + let mut builder = wallet.build_tx(); + builder + .add_recipient(addr.script_pubkey(), balance.confirmed / 2) + .current_height(maturity_time); + builder.finish().unwrap(); +} + +#[test] +fn test_allow_dust_limit() { + let (mut wallet, _) = get_funded_wallet(get_test_single_sig_cltv()); + + let addr = wallet.get_address(New); + + let mut builder = wallet.build_tx(); + + builder.add_recipient(addr.script_pubkey(), 0); + + assert_matches!(builder.finish(), Err(Error::OutputBelowDustLimit(0))); + + let mut builder = wallet.build_tx(); + + builder + .allow_dust(true) + .add_recipient(addr.script_pubkey(), 0); + + assert!(builder.finish().is_ok()); +} + +#[test] +fn test_fee_rate_sign_no_grinding_high_r() { + // Our goal is to obtain a transaction with a signature with high-R (71 bytes + // instead of 70). We then check that our fee rate and fee calculation is + // alright. + let (mut wallet, _) = get_funded_wallet("wpkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)"); + let addr = wallet.get_address(New); + let fee_rate = FeeRate::from_sat_per_vb(1.0); + let mut builder = wallet.build_tx(); + let mut data = vec![0]; + builder + .drain_to(addr.script_pubkey()) + .drain_wallet() + .fee_rate(fee_rate) + .add_data(&data); + let (mut psbt, details) = builder.finish().unwrap(); + let (op_return_vout, _) = psbt + .unsigned_tx + .output + .iter() + .enumerate() + .find(|(_n, i)| i.script_pubkey.is_op_return()) + .unwrap(); + + let mut sig_len: usize = 0; + // We try to sign many different times until we find a longer signature (71 bytes) + while sig_len < 71 { + // Changing the OP_RETURN data will make the signature change (but not the fee, until + // data[0] is small enough) + data[0] += 1; + psbt.unsigned_tx.output[op_return_vout].script_pubkey = Script::new_op_return(&data); + // Clearing the previous signature + psbt.inputs[0].partial_sigs.clear(); + // Signing + wallet + .sign( + &mut psbt, + SignOptions { + remove_partial_sigs: false, + try_finalize: false, + allow_grinding: false, + ..Default::default() + }, + ) + .unwrap(); + // We only have one key in the partial_sigs map, this is a trick to retrieve it + let key = psbt.inputs[0].partial_sigs.keys().next().unwrap(); + sig_len = psbt.inputs[0].partial_sigs[key].sig.serialize_der().len(); + } + // Actually finalizing the transaction... + wallet + .sign( + &mut psbt, + SignOptions { + remove_partial_sigs: false, + allow_grinding: false, + ..Default::default() + }, + ) + .unwrap(); + // ...and checking that everything is fine + assert_fee_rate!(psbt, details.fee.unwrap_or(0), fee_rate); +} + +#[test] +fn test_fee_rate_sign_grinding_low_r() { + // Our goal is to obtain a transaction with a signature with low-R (70 bytes) + // by setting the `allow_grinding` signing option as true. + // We then check that our fee rate and fee calculation is alright and that our + // signature is 70 bytes. + let (mut wallet, _) = get_funded_wallet("wpkh(tprv8ZgxMBicQKsPd3EupYiPRhaMooHKUHJxNsTfYuScep13go8QFfHdtkG9nRkFGb7busX4isf6X9dURGCoKgitaApQ6MupRhZMcELAxTBRJgS/*)"); + let addr = wallet.get_address(New); + let fee_rate = FeeRate::from_sat_per_vb(1.0); + let mut builder = wallet.build_tx(); + builder + .drain_to(addr.script_pubkey()) + .drain_wallet() + .fee_rate(fee_rate); + let (mut psbt, details) = builder.finish().unwrap(); + + wallet + .sign( + &mut psbt, + SignOptions { + remove_partial_sigs: false, + allow_grinding: true, + ..Default::default() + }, + ) + .unwrap(); + + let key = psbt.inputs[0].partial_sigs.keys().next().unwrap(); + let sig_len = psbt.inputs[0].partial_sigs[key].sig.serialize_der().len(); + assert_eq!(sig_len, 70); + assert_fee_rate!(psbt, details.fee.unwrap_or(0), fee_rate); +} + +// #[cfg(feature = "test-hardware-signer")] +// #[test] +// fn test_hardware_signer() { +// use std::sync::Arc; +// +// use bdk::signer::SignerOrdering; +// use bdk::wallet::hardwaresigner::HWISigner; +// use hwi::types::HWIChain; +// use hwi::HWIClient; +// +// let mut devices = HWIClient::enumerate().unwrap(); +// if devices.is_empty() { +// panic!("No devices found!"); +// } +// let device = devices.remove(0).unwrap(); +// let client = HWIClient::get_client(&device, true, HWIChain::Regtest).unwrap(); +// let descriptors = client.get_descriptors::(None).unwrap(); +// let custom_signer = HWISigner::from_device(&device, HWIChain::Regtest).unwrap(); +// +// let (mut wallet, _) = get_funded_wallet(&descriptors.internal[0]); +// wallet.add_signer( +// KeychainKind::External, +// SignerOrdering(200), +// Arc::new(custom_signer), +// ); +// +// let addr = wallet.get_address(LastUnused); +// let mut builder = wallet.build_tx(); +// builder.drain_to(addr.script_pubkey()).drain_wallet(); +// let (mut psbt, _) = builder.finish().unwrap(); +// +// let finalized = wallet.sign(&mut psbt, Default::default()).unwrap(); +// assert!(finalized); +// } + +#[test] +fn test_taproot_load_descriptor_duplicated_keys() { + // Added after issue https://github.com/bitcoindevkit/bdk/issues/760 + // + // Having the same key in multiple taproot leaves is safe and should be accepted by BDK + + let (mut wallet, _) = get_funded_wallet(get_test_tr_dup_keys()); + let addr = wallet.get_address(New); + + assert_eq!( + addr.to_string(), + "bcrt1pvysh4nmh85ysrkpwtrr8q8gdadhgdejpy6f9v424a8v9htjxjhyqw9c5s5" + ); +} + +#[test] +/// The wallet should re-use previously allocated change addresses when the tx using them is cancelled +fn test_tx_cancellation() { + macro_rules! new_tx { + ($wallet:expr) => {{ + let addr = Address::from_str("2N4eQYCbKUHCCTUjBJeHcJp9ok6J2GZsTDt").unwrap(); + let mut builder = $wallet.build_tx(); + builder.add_recipient(addr.script_pubkey(), 10_000); + + let (psbt, _) = builder.finish().unwrap(); + + psbt + }}; + } + + let (mut wallet, _) = + get_funded_wallet_with_change(get_test_wpkh(), Some(get_test_tr_single_sig_xprv())); + + let psbt1 = new_tx!(wallet); + let change_derivation_1 = psbt1 + .unsigned_tx + .output + .iter() + .find_map(|txout| wallet.derivation_of_spk(&txout.script_pubkey)) + .unwrap(); + assert_eq!(change_derivation_1, (KeychainKind::Internal, 0)); + + let psbt2 = new_tx!(wallet); + + let change_derivation_2 = psbt2 + .unsigned_tx + .output + .iter() + .find_map(|txout| wallet.derivation_of_spk(&txout.script_pubkey)) + .unwrap(); + assert_eq!(change_derivation_2, (KeychainKind::Internal, 1)); + + wallet.cancel_tx(&psbt1.extract_tx()); + + let psbt3 = new_tx!(wallet); + let change_derivation_3 = psbt3 + .unsigned_tx + .output + .iter() + .find_map(|txout| wallet.derivation_of_spk(&txout.script_pubkey)) + .unwrap(); + assert_eq!(change_derivation_3, (KeychainKind::Internal, 0)); + + let psbt3 = new_tx!(wallet); + let change_derivation_3 = psbt3 + .unsigned_tx + .output + .iter() + .find_map(|txout| wallet.derivation_of_spk(&txout.script_pubkey)) + .unwrap(); + assert_eq!(change_derivation_3, (KeychainKind::Internal, 2)); + + wallet.cancel_tx(&psbt3.extract_tx()); + + let psbt3 = new_tx!(wallet); + let change_derivation_4 = psbt3 + .unsigned_tx + .output + .iter() + .find_map(|txout| wallet.derivation_of_spk(&txout.script_pubkey)) + .unwrap(); + assert_eq!(change_derivation_4, (KeychainKind::Internal, 2)); +} diff --git a/crates/chain/Cargo.toml b/crates/chain/Cargo.toml new file mode 100644 index 000000000..c3d0d77dc --- /dev/null +++ b/crates/chain/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "bdk_chain" +version = "0.3.1" +edition = "2021" +rust-version = "1.57" +homepage = "https://bitcoindevkit.org" +repository = "https://github.com/bitcoindevkit/bdk" +documentation = "https://docs.rs/bdk_chain" +description = "Collection of core structures for Bitcoin Dev Kit." +license = "MIT OR Apache-2.0" +readme = "../README.md" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bitcoin = { version = "0.29" } +serde_crate = { package = "serde", version = "1", optional = true, features = ["derive"] } + +# Use hashbrown as a feature flag to have HashSet and HashMap from it. +# note version 0.13 breaks outs MSRV. +hashbrown = { version = "0.12", optional = true, features = ["serde"] } +miniscript = { version = "9.0.0", optional = true } + +[dev-dependencies] +rand = "0.8" + +[features] +default = ["std", "miniscript"] +std = [] +serde = ["serde_crate", "bitcoin/serde" ] diff --git a/crates/chain/src/chain_data.rs b/crates/chain/src/chain_data.rs new file mode 100644 index 000000000..51b1e3b2e --- /dev/null +++ b/crates/chain/src/chain_data.rs @@ -0,0 +1,218 @@ +use bitcoin::{hashes::Hash, BlockHash, OutPoint, TxOut, Txid}; + +use crate::{ + sparse_chain::{self, ChainPosition}, + COINBASE_MATURITY, +}; + +/// Represents the height in which a transaction is confirmed at. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[cfg_attr( + feature = "serde", + derive(serde::Deserialize, serde::Serialize), + serde(crate = "serde_crate") +)] +pub enum TxHeight { + Confirmed(u32), + Unconfirmed, +} + +impl Default for TxHeight { + fn default() -> Self { + Self::Unconfirmed + } +} + +impl core::fmt::Display for TxHeight { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Confirmed(h) => core::write!(f, "confirmed_at({})", h), + Self::Unconfirmed => core::write!(f, "unconfirmed"), + } + } +} + +impl From> for TxHeight { + fn from(opt: Option) -> Self { + match opt { + Some(h) => Self::Confirmed(h), + None => Self::Unconfirmed, + } + } +} + +impl From for Option { + fn from(height: TxHeight) -> Self { + match height { + TxHeight::Confirmed(h) => Some(h), + TxHeight::Unconfirmed => None, + } + } +} + +impl crate::sparse_chain::ChainPosition for TxHeight { + fn height(&self) -> TxHeight { + *self + } + + fn max_ord_of_height(height: TxHeight) -> Self { + height + } + + fn min_ord_of_height(height: TxHeight) -> Self { + height + } +} + +impl TxHeight { + pub fn is_confirmed(&self) -> bool { + matches!(self, Self::Confirmed(_)) + } +} + +/// Block height and timestamp in which a transaction is confirmed in. +#[derive(Debug, Clone, PartialEq, Eq, Copy, PartialOrd, Ord, core::hash::Hash)] +#[cfg_attr( + feature = "serde", + derive(serde::Deserialize, serde::Serialize), + serde(crate = "serde_crate") +)] +pub enum ConfirmationTime { + Confirmed { height: u32, time: u64 }, + Unconfirmed, +} + +impl sparse_chain::ChainPosition for ConfirmationTime { + fn height(&self) -> TxHeight { + match self { + ConfirmationTime::Confirmed { height, .. } => TxHeight::Confirmed(*height), + ConfirmationTime::Unconfirmed => TxHeight::Unconfirmed, + } + } + + fn max_ord_of_height(height: TxHeight) -> Self { + match height { + TxHeight::Confirmed(height) => Self::Confirmed { + height, + time: u64::MAX, + }, + TxHeight::Unconfirmed => Self::Unconfirmed, + } + } + + fn min_ord_of_height(height: TxHeight) -> Self { + match height { + TxHeight::Confirmed(height) => Self::Confirmed { + height, + time: u64::MIN, + }, + TxHeight::Unconfirmed => Self::Unconfirmed, + } + } +} + +impl ConfirmationTime { + pub fn is_confirmed(&self) -> bool { + matches!(self, Self::Confirmed { .. }) + } +} + +/// A reference to a block in the cannonical chain. +#[derive(Debug, Clone, PartialEq, Eq, Copy, PartialOrd, Ord)] +#[cfg_attr( + feature = "serde", + derive(serde::Deserialize, serde::Serialize), + serde(crate = "serde_crate") +)] +pub struct BlockId { + /// The height the block was confirmed at + pub height: u32, + /// The hash of the block + pub hash: BlockHash, +} + +impl Default for BlockId { + fn default() -> Self { + Self { + height: Default::default(), + hash: BlockHash::from_inner([0u8; 32]), + } + } +} + +impl From<(u32, BlockHash)> for BlockId { + fn from((height, hash): (u32, BlockHash)) -> Self { + Self { height, hash } + } +} + +impl From for (u32, BlockHash) { + fn from(block_id: BlockId) -> Self { + (block_id.height, block_id.hash) + } +} + +impl From<(&u32, &BlockHash)> for BlockId { + fn from((height, hash): (&u32, &BlockHash)) -> Self { + Self { + height: *height, + hash: *hash, + } + } +} + +/// A `TxOut` with as much data as we can retreive about it +#[derive(Debug, Clone, PartialEq)] +pub struct FullTxOut { + /// The location of the `TxOut` + pub outpoint: OutPoint, + /// The `TxOut` + pub txout: TxOut, + /// The position of the transaction in `outpoint` in the overall chain. + pub chain_position: I, + /// The txid and chain position of the transaction (if any) that has spent this output. + pub spent_by: Option<(I, Txid)>, + /// Whether this output is on a coinbase transaction + pub is_on_coinbase: bool, +} + +impl FullTxOut { + /// Whether the utxo is/was/will be spendable at `height`. + /// + /// It is spendable if it is not an immature coinbase output and no spending tx has been + /// confirmed by that heigt. + pub fn is_spendable_at(&self, height: u32) -> bool { + if !self.is_mature(height) { + return false; + } + + if self.chain_position.height() > TxHeight::Confirmed(height) { + return false; + } + + match &self.spent_by { + Some((spending_height, _)) => spending_height.height() > TxHeight::Confirmed(height), + None => true, + } + } + + pub fn is_mature(&self, height: u32) -> bool { + if self.is_on_coinbase { + let tx_height = match self.chain_position.height() { + TxHeight::Confirmed(tx_height) => tx_height, + TxHeight::Unconfirmed => { + debug_assert!(false, "coinbase tx can never be unconfirmed"); + return false; + } + }; + let age = height.saturating_sub(tx_height); + if age + 1 < COINBASE_MATURITY { + return false; + } + } + + true + } +} + +// TOOD: make test diff --git a/crates/chain/src/chain_graph.rs b/crates/chain/src/chain_graph.rs new file mode 100644 index 000000000..8d1eda663 --- /dev/null +++ b/crates/chain/src/chain_graph.rs @@ -0,0 +1,638 @@ +//! Module for structures that combine the features of [`sparse_chain`] and [`tx_graph`]. +use crate::{ + collections::HashSet, + sparse_chain::{self, ChainPosition, SparseChain}, + tx_graph::{self, TxGraph}, + BlockId, ForEachTxOut, FullTxOut, TxHeight, +}; +use alloc::{string::ToString, vec::Vec}; +use bitcoin::{OutPoint, Transaction, TxOut, Txid}; +use core::fmt::Debug; + +/// A consistent combination of a [`SparseChain

`] and a [`TxGraph`]. +/// +/// `SparseChain` only keeps track of transaction ids and their position in the chain but you often +/// want to store the full transactions as well. Additionally you want to make sure that everything +/// in the chain is consistent with the full transaction data. `ChainGraph` enforces these two +/// invariants: +/// +/// 1. Every transaction that is in the chain is also in the graph (you always have the full +/// transaction). +/// 2. No transactions in the chain conflict with each other i.e. they don't double spend each +/// other or have ancestors that double spend each other. +/// +/// Note that the `ChainGraph` guarantees a 1:1 mapping between transactions in the `chain` and +/// `graph` but not the other way around. Transactions may fall out of the *chain* (via re-org or +/// mempool eviction) but will remain in the *graph*. +#[derive(Clone, Debug, PartialEq)] +pub struct ChainGraph

{ + chain: SparseChain

, + graph: TxGraph, +} + +impl

Default for ChainGraph

{ + fn default() -> Self { + Self { + chain: Default::default(), + graph: Default::default(), + } + } +} + +impl

AsRef> for ChainGraph

{ + fn as_ref(&self) -> &SparseChain

{ + &self.chain + } +} + +impl

AsRef for ChainGraph

{ + fn as_ref(&self) -> &TxGraph { + &self.graph + } +} + +impl

AsRef> for ChainGraph

{ + fn as_ref(&self) -> &ChainGraph

{ + self + } +} + +impl

ChainGraph

{ + /// Returns a reference to the internal [`SparseChain`]. + pub fn chain(&self) -> &SparseChain

{ + &self.chain + } + + /// Returns a reference to the internal [`TxGraph`]. + pub fn graph(&self) -> &TxGraph { + &self.graph + } +} + +impl

ChainGraph

+where + P: ChainPosition, +{ + /// Create a new chain graph from a `chain` and a `graph`. + /// + /// There are two reasons this can return an `Err`: + /// + /// 1. There is a transaction in the `chain` that does not have its corresponding full + /// transaction in `graph`. + /// 2. The `chain` has two transactions that allegedly in it but they conflict in the `graph` + /// (so could not possibly be in the same chain). + pub fn new(chain: SparseChain

, graph: TxGraph) -> Result> { + let mut missing = HashSet::default(); + for (pos, txid) in chain.txids() { + if let Some(tx) = graph.get_tx(*txid) { + let conflict = graph + .walk_conflicts(tx, |_, txid| Some((chain.tx_position(txid)?.clone(), txid))) + .next(); + if let Some((conflict_pos, conflict)) = conflict { + return Err(NewError::Conflict { + a: (pos.clone(), *txid), + b: (conflict_pos, conflict), + }); + } + } else { + missing.insert(*txid); + } + } + + if !missing.is_empty() { + return Err(NewError::Missing(missing)); + } + + Ok(Self { chain, graph }) + } + + /// Take an update in the form of a [`SparseChain

`][`SparseChain`] and attempt to turn it + /// into a chain graph by filling in full transactions from `self` and from `new_txs`. This + /// returns a `ChainGraph>` where the [`Cow<'a, T>`] will borrow the transaction if it + /// got it from `self`. + /// + /// This is useful when interacting with services like an electrum server which returns a list + /// of txids and heights when calling [`script_get_history`] which can easily be inserted into a + /// [`SparseChain`][`SparseChain`]. From there you need to figure out which full + /// transactions you are missing in your chain graph and form `new_txs`. You then use + /// `inflate_update` to turn this into an update `ChainGraph>` and finally + /// use [`determine_changeset`] to generate the changeset from it. + /// + /// [`SparseChain`]: crate::sparse_chain::SparseChain + /// [`Cow<'a, T>`]: std::borrow::Cow + /// [`script_get_history`]: https://docs.rs/electrum-client/latest/electrum_client/trait.ElectrumApi.html#tymethod.script_get_history + /// [`determine_changeset`]: Self::determine_changeset + pub fn inflate_update( + &self, + update: SparseChain

, + new_txs: impl IntoIterator, + ) -> Result, NewError

> { + let mut inflated_chain = SparseChain::default(); + let mut inflated_graph = TxGraph::default(); + + for (height, hash) in update.checkpoints().clone().into_iter() { + let _ = inflated_chain + .insert_checkpoint(BlockId { height, hash }) + .expect("must insert"); + } + + // [TODO] @evanlinjin: These need better comments + // - copy transactions that have changed positions into the graph + // - add new transactions to inflated chain + for (pos, txid) in update.txids() { + match self.chain.tx_position(*txid) { + Some(original_pos) => { + if original_pos != pos { + let tx = self + .graph + .get_tx(*txid) + .expect("tx must exist as it is referenced in sparsechain") + .clone(); + let _ = inflated_chain + .insert_tx(*txid, pos.clone()) + .expect("must insert since this was already in update"); + let _ = inflated_graph.insert_tx(tx); + } + } + None => { + let _ = inflated_chain + .insert_tx(*txid, pos.clone()) + .expect("must insert since this was already in update"); + } + } + } + + for tx in new_txs { + let _ = inflated_graph.insert_tx(tx); + } + + ChainGraph::new(inflated_chain, inflated_graph) + } + + /// Sets the checkpoint limit. + /// + /// Refer to [`SparseChain::checkpoint_limit`] for more. + pub fn checkpoint_limit(&self) -> Option { + self.chain.checkpoint_limit() + } + + /// Sets the checkpoint limit. + /// + /// Refer to [`SparseChain::set_checkpoint_limit`] for more. + pub fn set_checkpoint_limit(&mut self, limit: Option) { + self.chain.set_checkpoint_limit(limit) + } + + /// Determines the changes required to invalidate checkpoints `from_height` (inclusive) and + /// above. Displaced transactions will have their positions moved to [`TxHeight::Unconfirmed`]. + pub fn invalidate_checkpoints_preview(&self, from_height: u32) -> ChangeSet

{ + ChangeSet { + chain: self.chain.invalidate_checkpoints_preview(from_height), + ..Default::default() + } + } + + /// Invalidate checkpoints `from_height` (inclusive) and above. Displaced transactions will be + /// re-positioned to [`TxHeight::Unconfirmed`]. + /// + /// This is equivalent to calling [`Self::invalidate_checkpoints_preview`] and + /// [`Self::apply_changeset`] in sequence. + pub fn invalidate_checkpoints(&mut self, from_height: u32) -> ChangeSet

+ where + ChangeSet

: Clone, + { + let changeset = self.invalidate_checkpoints_preview(from_height); + self.apply_changeset(changeset.clone()); + changeset + } + + /// Get a transaction that is currently in the underlying [`SparseChain`]. + /// + /// This does not necessarily mean that it is *confirmed* in the blockchain, it might just be in + /// the unconfirmed transaction list within the [`SparseChain`]. + pub fn get_tx_in_chain(&self, txid: Txid) -> Option<(&P, &Transaction)> { + let position = self.chain.tx_position(txid)?; + let full_tx = self.graph.get_tx(txid).expect("must exist"); + Some((position, full_tx)) + } + + /// Determines the changes required to insert a transaction into the inner [`ChainGraph`] and + /// [`SparseChain`] at the given `position`. + /// + /// If inserting it into the chain `position` will result in conflicts, the returned + /// [`ChangeSet`] should evict conflicting transactions. + pub fn insert_tx_preview( + &self, + tx: Transaction, + pos: P, + ) -> Result, InsertTxError

> { + let mut changeset = ChangeSet { + chain: self.chain.insert_tx_preview(tx.txid(), pos)?, + graph: self.graph.insert_tx_preview(tx), + }; + self.fix_conflicts(&mut changeset)?; + Ok(changeset) + } + + /// Inserts [`Transaction`] at given chain position. + /// + /// This is equivalent to calling [`Self::insert_tx_preview`] and [`Self::apply_changeset`] in + /// sequence. + pub fn insert_tx(&mut self, tx: Transaction, pos: P) -> Result, InsertTxError

> { + let changeset = self.insert_tx_preview(tx, pos)?; + self.apply_changeset(changeset.clone()); + Ok(changeset) + } + + /// Determines the changes required to insert a [`TxOut`] into the internal [`TxGraph`]. + pub fn insert_txout_preview(&self, outpoint: OutPoint, txout: TxOut) -> ChangeSet

{ + ChangeSet { + chain: Default::default(), + graph: self.graph.insert_txout_preview(outpoint, txout), + } + } + + /// Inserts a [`TxOut`] into the internal [`TxGraph`]. + /// + /// This is equivalent to calling [`Self::insert_txout_preview`] and [`Self::apply_changeset`] + /// in sequence. + pub fn insert_txout(&mut self, outpoint: OutPoint, txout: TxOut) -> ChangeSet

{ + let changeset = self.insert_txout_preview(outpoint, txout); + self.apply_changeset(changeset.clone()); + changeset + } + + /// Determines the changes required to insert a `block_id` (a height and block hash) into the + /// chain. + /// + /// If a checkpoint already exists at that height with a different hash this will return + /// an error. + pub fn insert_checkpoint_preview( + &self, + block_id: BlockId, + ) -> Result, InsertCheckpointError> { + self.chain + .insert_checkpoint_preview(block_id) + .map(|chain_changeset| ChangeSet { + chain: chain_changeset, + ..Default::default() + }) + } + + /// Inserts checkpoint into [`Self`]. + /// + /// This is equivalent to calling [`Self::insert_checkpoint_preview`] and + /// [`Self::apply_changeset`] in sequence. + pub fn insert_checkpoint( + &mut self, + block_id: BlockId, + ) -> Result, InsertCheckpointError> { + let changeset = self.insert_checkpoint_preview(block_id)?; + self.apply_changeset(changeset.clone()); + Ok(changeset) + } + + /// Calculates the difference between self and `update` in the form of a [`ChangeSet`]. + pub fn determine_changeset( + &self, + update: &ChainGraph

, + ) -> Result, UpdateError

> { + let chain_changeset = self + .chain + .determine_changeset(&update.chain) + .map_err(UpdateError::Chain)?; + + let mut changeset = ChangeSet { + chain: chain_changeset, + graph: self.graph.determine_additions(&update.graph), + }; + + self.fix_conflicts(&mut changeset)?; + Ok(changeset) + } + + /// Given a transaction, return an iterator of `txid`s that conflict with it (spends at least + /// one of the same inputs). This includes all descendants of conflicting transactions. + /// + /// This method only returns conflicts that exist in the [`SparseChain`] as transactions that + /// are not included in [`SparseChain`] are already considered as evicted. + pub fn tx_conflicts_in_chain<'a>( + &'a self, + tx: &'a Transaction, + ) -> impl Iterator + 'a { + self.graph.walk_conflicts(tx, move |_, conflict_txid| { + self.chain + .tx_position(conflict_txid) + .map(|conflict_pos| (conflict_pos, conflict_txid)) + }) + } + + /// Fix changeset conflicts. + /// + /// **WARNING:** If there are any missing full txs, conflict resolution will not be complete. In + /// debug mode, this will result in panic. + fn fix_conflicts(&self, changeset: &mut ChangeSet

) -> Result<(), UnresolvableConflict

> { + let mut chain_conflicts = vec![]; + + for (&txid, pos_change) in &changeset.chain.txids { + let pos = match pos_change { + Some(pos) => { + // Ignore txs that are still in the chain -- we only care about new ones + if self.chain.tx_position(txid).is_some() { + continue; + } + pos + } + // Ignore txids that are being delted by the change (they can't conflict) + None => continue, + }; + + let mut full_tx = self.graph.get_tx(txid); + + if full_tx.is_none() { + full_tx = changeset.graph.tx.iter().find(|tx| tx.txid() == txid) + } + + debug_assert!(full_tx.is_some(), "should have full tx at this point"); + + let full_tx = match full_tx { + Some(full_tx) => full_tx, + None => continue, + }; + + for (conflict_pos, conflict_txid) in self.tx_conflicts_in_chain(full_tx) { + chain_conflicts.push((pos.clone(), txid, conflict_pos, conflict_txid)) + } + } + + for (update_pos, update_txid, conflicting_pos, conflicting_txid) in chain_conflicts { + // We have found a tx that conflicts with our update txid. Only allow this when the + // conflicting tx will be positioned as "unconfirmed" after the update is applied. + // If so, we will modify the changeset to evict the conflicting txid. + + // determine the position of the conflicting txid after current changeset is applied + let conflicting_new_pos = changeset + .chain + .txids + .get(&conflicting_txid) + .map(Option::as_ref) + .unwrap_or(Some(conflicting_pos)); + + match conflicting_new_pos { + None => { + // conflicting txid will be deleted, can ignore + } + Some(existing_new_pos) => match existing_new_pos.height() { + TxHeight::Confirmed(_) => { + // the new postion of the conflicting tx is "confirmed", therefore cannot be + // evicted, return error + return Err(UnresolvableConflict { + already_confirmed_tx: (conflicting_pos.clone(), conflicting_txid), + update_tx: (update_pos, update_txid), + }); + } + TxHeight::Unconfirmed => { + // the new position of the conflicting tx is "unconfirmed", therefore it can + // be evicted + changeset.chain.txids.insert(conflicting_txid, None); + } + }, + }; + } + + Ok(()) + } + + /// Applies `changeset` to `self`. + /// + /// **Warning** this method assumes the changeset is assumed to be correctly formed. If it isn't + /// then the chain graph may not behave correctly in the future and may panic unexpectedly. + pub fn apply_changeset(&mut self, changeset: ChangeSet

) { + self.chain.apply_changeset(changeset.chain); + self.graph.apply_additions(changeset.graph); + } + + /// Applies the `update` chain graph. Note this is shorthand for calling + /// [`Self::determine_changeset()`] and [`Self::apply_changeset()`] in sequence. + pub fn apply_update(&mut self, update: ChainGraph

) -> Result, UpdateError

> { + let changeset = self.determine_changeset(&update)?; + self.apply_changeset(changeset.clone()); + Ok(changeset) + } + + /// Get the full transaction output at an outpoint if it exists in the chain and the graph. + pub fn full_txout(&self, outpoint: OutPoint) -> Option> { + self.chain.full_txout(&self.graph, outpoint) + } + + /// Iterate over the full transactions and their position in the chain ordered by their position + /// in ascending order. + pub fn transactions_in_chain(&self) -> impl DoubleEndedIterator { + self.chain + .txids() + .map(move |(pos, txid)| (pos, self.graph.get_tx(*txid).expect("must exist"))) + } + + /// Finds the transaction in the chain that spends `outpoint` given the input/output + /// relationships in `graph`. Note that the transaction including `outpoint` does not need to be + /// in the `graph` or the `chain` for this to return `Some(_)`. + pub fn spent_by(&self, outpoint: OutPoint) -> Option<(&P, Txid)> { + self.chain.spent_by(&self.graph, outpoint) + } + + /// Whether the chain graph contains any data whatsoever. + pub fn is_empty(&self) -> bool { + self.chain.is_empty() && self.graph.is_empty() + } +} + +/// Represents changes to [`ChainGraph`]. +/// +/// This is essentially a combination of [`sparse_chain::ChangeSet`] and [`tx_graph::Additions`]. +#[derive(Debug, Clone, PartialEq)] +#[cfg_attr( + feature = "serde", + derive(serde::Deserialize, serde::Serialize), + serde( + crate = "serde_crate", + bound( + deserialize = "P: serde::Deserialize<'de>", + serialize = "P: serde::Serialize" + ) + ) +)] +#[must_use] +pub struct ChangeSet

{ + pub chain: sparse_chain::ChangeSet

, + pub graph: tx_graph::Additions, +} + +impl

ChangeSet

{ + /// Returns `true` if this [`ChangeSet`] records no changes. + pub fn is_empty(&self) -> bool { + self.chain.is_empty() && self.graph.is_empty() + } + + /// Returns `true` if this [`ChangeSet`] contains transaction evictions. + pub fn contains_eviction(&self) -> bool { + self.chain + .txids + .iter() + .any(|(_, new_pos)| new_pos.is_none()) + } + + /// Appends the changes in `other` into self such that applying `self` afterwards has the same + /// effect as sequentially applying the original `self` and `other`. + pub fn append(&mut self, other: ChangeSet

) + where + P: ChainPosition, + { + self.chain.append(other.chain); + self.graph.append(other.graph); + } +} + +impl

Default for ChangeSet

{ + fn default() -> Self { + Self { + chain: Default::default(), + graph: Default::default(), + } + } +} + +impl

ForEachTxOut for ChainGraph

{ + fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))) { + self.graph.for_each_txout(f) + } +} + +impl

ForEachTxOut for ChangeSet

{ + fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))) { + self.graph.for_each_txout(f) + } +} + +/// Error that may occur when calling [`ChainGraph::new`]. +#[derive(Clone, Debug, PartialEq)] +pub enum NewError

{ + /// Two transactions within the sparse chain conflicted with each other + Conflict { a: (P, Txid), b: (P, Txid) }, + /// One or more transactions in the chain were not in the graph + Missing(HashSet), +} + +impl core::fmt::Display for NewError

{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + NewError::Conflict { a, b } => write!( + f, + "Unable to inflate sparse chain to chain graph since transactions {:?} and {:?}", + a, b + ), + NewError::Missing(missing) => write!( + f, + "missing full transactions for {}", + missing + .iter() + .map(|txid| txid.to_string()) + .collect::>() + .join(", ") + ), + } + } +} + +#[cfg(feature = "std")] +impl std::error::Error for NewError

{} + +/// Error that may occur when inserting a transaction. +/// +/// Refer to [`ChainGraph::insert_tx_preview`] and [`ChainGraph::insert_tx`]. +#[derive(Clone, Debug, PartialEq)] +pub enum InsertTxError

{ + Chain(sparse_chain::InsertTxError

), + UnresolvableConflict(UnresolvableConflict

), +} + +impl core::fmt::Display for InsertTxError

{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + InsertTxError::Chain(inner) => core::fmt::Display::fmt(inner, f), + InsertTxError::UnresolvableConflict(inner) => core::fmt::Display::fmt(inner, f), + } + } +} + +impl

From> for InsertTxError

{ + fn from(inner: sparse_chain::InsertTxError

) -> Self { + Self::Chain(inner) + } +} + +#[cfg(feature = "std")] +impl std::error::Error for InsertTxError

{} + +/// A nice alias of [`sparse_chain::InsertCheckpointError`]. +pub type InsertCheckpointError = sparse_chain::InsertCheckpointError; + +/// Represents an update failure. +#[derive(Clone, Debug, PartialEq)] +pub enum UpdateError

{ + /// The update chain was inconsistent with the existing chain + Chain(sparse_chain::UpdateError

), + /// A transaction in the update spent the same input as an already confirmed transaction + UnresolvableConflict(UnresolvableConflict

), +} + +impl core::fmt::Display for UpdateError

{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + UpdateError::Chain(inner) => core::fmt::Display::fmt(inner, f), + UpdateError::UnresolvableConflict(inner) => core::fmt::Display::fmt(inner, f), + } + } +} + +impl

From> for UpdateError

{ + fn from(inner: sparse_chain::UpdateError

) -> Self { + Self::Chain(inner) + } +} + +#[cfg(feature = "std")] +impl std::error::Error for UpdateError

{} + +/// Represents an unresolvable conflict between an update's transaction and an +/// already-confirmed transaction. +#[derive(Clone, Debug, PartialEq)] +pub struct UnresolvableConflict

{ + pub already_confirmed_tx: (P, Txid), + pub update_tx: (P, Txid), +} + +impl core::fmt::Display for UnresolvableConflict

{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let Self { + already_confirmed_tx, + update_tx, + } = self; + write!(f, "update transaction {} at height {:?} conflicts with an already confirmed transaction {} at height {:?}", + update_tx.1, update_tx.0, already_confirmed_tx.1, already_confirmed_tx.0) + } +} + +impl

From> for UpdateError

{ + fn from(inner: UnresolvableConflict

) -> Self { + Self::UnresolvableConflict(inner) + } +} + +impl

From> for InsertTxError

{ + fn from(inner: UnresolvableConflict

) -> Self { + Self::UnresolvableConflict(inner) + } +} + +#[cfg(feature = "std")] +impl std::error::Error for UnresolvableConflict

{} diff --git a/crates/chain/src/descriptor_ext.rs b/crates/chain/src/descriptor_ext.rs new file mode 100644 index 000000000..e74928b85 --- /dev/null +++ b/crates/chain/src/descriptor_ext.rs @@ -0,0 +1,16 @@ +use crate::miniscript::{Descriptor, DescriptorPublicKey}; + +/// A trait to extend the functionality of a miniscript descriptor. +pub trait DescriptorExt { + /// Returns the minimum value (in satoshis) that an output should have to be broadcastable. + fn dust_value(&self) -> u64; +} + +impl DescriptorExt for Descriptor { + fn dust_value(&self) -> u64 { + self.at_derivation_index(0) + .script_pubkey() + .dust_value() + .to_sat() + } +} diff --git a/crates/chain/src/example_utils.rs b/crates/chain/src/example_utils.rs new file mode 100644 index 000000000..8077e2118 --- /dev/null +++ b/crates/chain/src/example_utils.rs @@ -0,0 +1,30 @@ +#![allow(unused)] +use alloc::vec::Vec; +use bitcoin::{ + consensus, + hashes::{hex::FromHex, Hash}, + Transaction, +}; + +use crate::BlockId; + +pub const RAW_TX_1: &str = "0200000000010116d6174da7183d70d0a7d4dc314d517a7d135db79ad63515028b293a76f4f9d10000000000feffffff023a21fc8350060000160014531c405e1881ef192294b8813631e258bf98ea7a1027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c024730440220591b1a172a122da49ba79a3e79f98aaa03fd7a372f9760da18890b6a327e6010022013e82319231da6c99abf8123d7c07e13cf9bd8d76e113e18dc452e5024db156d012102318a2d558b2936c52e320decd6d92a88d7f530be91b6fe0af5caf41661e77da3ef2e0100"; +pub const RAW_TX_2: &str = "02000000000101a688607020cfae91a61e7c516b5ef1264d5d77f17200c3866826c6c808ebf1620000000000feffffff021027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c20fd48ff530600001600146886c525e41d4522042bd0b159dfbade2504a6bb024730440220740ff7e665cd20565d4296b549df8d26b941be3f1e3af89a0b60e50c0dbeb69a02206213ab7030cf6edc6c90d4ccf33010644261e029950a688dc0b1a9ebe6ddcc5a012102f2ac6b396a97853cb6cd62242c8ae4842024742074475023532a51e9c53194253e760100"; +pub const RAW_TX_3: &str = "0200000000010135d67ee47b557e68b8c6223958f597381965ed719f1207ee2b9e20432a24a5dc0100000000feffffff021027000000000000225120a82f29944d65b86ae6b5e5cc75e294ead6c59391a1edc5e016e3498c67fc7bbb62215a5055060000160014070df7671dea67a50c4799a744b5c9be8f4bac690247304402207ebf8d29f71fd03e7e6977b3ea78ca5fcc5c49a42ae822348fc401862fdd766c02201d7e4ff0684ecb008b6142f36ead1b0b4d615524c4f58c261113d361f4427e25012103e6a75e2fab85e5ecad641afc4ffba7222f998649d9f18cac92f0fcc8618883b3ee760100"; +pub const RAW_TX_4: &str = "02000000000101d00e8f76ed313e19b339ee293c0f52b0325c95e24c8f3966fa353fb2bedbcf580100000000feffffff021027000000000000225120882d74e5d0572d5a816cef0041a96b6c1de832f6f9676d9605c44d5e9a97d3dc9cda55fe53060000160014852b5864b8edd42fab4060c87f818e50780865ff0247304402201dccbb9bed7fba924b6d249c5837cc9b37470c0e3d8fbea77cb59baba3efe6fa0220700cc170916913b9bfc2bc0fefb6af776e8b542c561702f136cddc1c7aa43141012103acec3fc79dbbca745815c2a807dc4e81010c80e308e84913f59cb42a275dad97f3760100"; + +pub fn tx_from_hex(s: &str) -> Transaction { + let raw = Vec::from_hex(s).expect("data must be in hex"); + consensus::deserialize(raw.as_slice()).expect("must deserialize") +} + +pub fn new_hash(s: &str) -> H { + ::hash(s.as_bytes()) +} + +pub fn new_block_id(height: u32, hash: &str) -> BlockId { + BlockId { + height, + hash: new_hash(hash), + } +} diff --git a/crates/chain/src/keychain.rs b/crates/chain/src/keychain.rs new file mode 100644 index 000000000..f24938260 --- /dev/null +++ b/crates/chain/src/keychain.rs @@ -0,0 +1,309 @@ +//! Module for keychain based structures. +//! +//! A keychain here is a set of application defined indexes for a minscript descriptor where we can +//! derive script pubkeys at a particular derivation index. The application's index is simply +//! anything that implements `Ord`. +//! +//! [`KeychainTxOutIndex`] indexes script pubkeys of keychains and scans in relevant outpoints (that +//! has a `txout` containing an indexed script pubkey). Internally, this uses [`SpkTxOutIndex`], but +//! also maintains "revealed" and "lookahead" index count per keychain. +//! +//! [`KeychainTracker`] combines [`ChainGraph`] and [`KeychainTxOutIndex`] and enforces atomic +//! changes between both these structures. [`KeychainScan`] is a structure used to update to +//! [`KeychainTracker`] and changes made on a [`KeychainTracker`] are reported by +//! [`KeychainChangeSet`]s. +//! +//! [`SpkTxOutIndex`]: crate::SpkTxOutIndex +use crate::{ + chain_graph::{self, ChainGraph}, + collections::BTreeMap, + sparse_chain::ChainPosition, + tx_graph::TxGraph, + ForEachTxOut, +}; + +#[cfg(feature = "miniscript")] +pub mod persist; +#[cfg(feature = "miniscript")] +pub use persist::*; +#[cfg(feature = "miniscript")] +mod tracker; +#[cfg(feature = "miniscript")] +pub use tracker::*; +#[cfg(feature = "miniscript")] +mod txout_index; +#[cfg(feature = "miniscript")] +pub use txout_index::*; + +/// Represents updates to the derivation index of a [`KeychainTxOutIndex`]. +/// +/// It can be applied to [`KeychainTxOutIndex`] with [`apply_additions`]. [`DerivationAdditions] are +/// monotone in that they will never decrease the revealed derivation index. +/// +/// [`KeychainTxOutIndex`]: crate::keychain::KeychainTxOutIndex +/// [`apply_additions`]: crate::keychain::KeychainTxOutIndex::apply_additions +#[derive(Clone, Debug, PartialEq)] +#[cfg_attr( + feature = "serde", + derive(serde::Deserialize, serde::Serialize), + serde( + crate = "serde_crate", + bound( + deserialize = "K: Ord + serde::Deserialize<'de>", + serialize = "K: Ord + serde::Serialize" + ) + ) +)] +#[must_use] +pub struct DerivationAdditions(pub BTreeMap); + +impl DerivationAdditions { + /// Returns whether the additions are empty. + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + /// Get the inner map of keychain to its new derivation index. + pub fn as_inner(&self) -> &BTreeMap { + &self.0 + } +} + +impl DerivationAdditions { + /// Append another [`DerivationAdditions`] into self. + /// + /// If keychain already exists, increases the index when other's index > self's index. + /// If keychain did not exist, append the new keychain. + pub fn append(&mut self, mut other: Self) { + self.0.iter_mut().for_each(|(key, index)| { + if let Some(other_index) = other.0.remove(key) { + *index = other_index.max(*index); + } + }); + + self.0.append(&mut other.0); + } +} + +impl Default for DerivationAdditions { + fn default() -> Self { + Self(Default::default()) + } +} + +impl AsRef> for DerivationAdditions { + fn as_ref(&self) -> &BTreeMap { + &self.0 + } +} + +#[derive(Clone, Debug, PartialEq)] +/// An update that includes the last active indexes of each keychain. +pub struct KeychainScan { + /// The update data in the form of a chain that could be applied + pub update: ChainGraph

, + /// The last active indexes of each keychain + pub last_active_indices: BTreeMap, +} + +impl Default for KeychainScan { + fn default() -> Self { + Self { + update: Default::default(), + last_active_indices: Default::default(), + } + } +} + +impl From> for KeychainScan { + fn from(update: ChainGraph

) -> Self { + KeychainScan { + update, + last_active_indices: Default::default(), + } + } +} + +/// Represents changes to a [`KeychainTracker`]. +/// +/// This is essentially a combination of [`DerivationAdditions`] and [`chain_graph::ChangeSet`]. +#[derive(Clone, Debug)] +#[cfg_attr( + feature = "serde", + derive(serde::Deserialize, serde::Serialize), + serde( + crate = "serde_crate", + bound( + deserialize = "K: Ord + serde::Deserialize<'de>, P: serde::Deserialize<'de>", + serialize = "K: Ord + serde::Serialize, P: serde::Serialize" + ) + ) +)] +#[must_use] +pub struct KeychainChangeSet { + /// The changes in local keychain derivation indices + pub derivation_indices: DerivationAdditions, + /// The changes that have occurred in the blockchain + pub chain_graph: chain_graph::ChangeSet

, +} + +impl Default for KeychainChangeSet { + fn default() -> Self { + Self { + chain_graph: Default::default(), + derivation_indices: Default::default(), + } + } +} + +impl KeychainChangeSet { + /// Returns whether the [`KeychainChangeSet`] is empty (no changes recorded). + pub fn is_empty(&self) -> bool { + self.chain_graph.is_empty() && self.derivation_indices.is_empty() + } + + /// Appends the changes in `other` into `self` such that applying `self` afterwards has the same + /// effect as sequentially applying the original `self` and `other`. + /// + /// Note the derivation indices cannot be decreased so `other` will only change the derivation + /// index for a keychain if it's entry is higher than the one in `self`. + pub fn append(&mut self, other: KeychainChangeSet) + where + K: Ord, + P: ChainPosition, + { + self.derivation_indices.append(other.derivation_indices); + self.chain_graph.append(other.chain_graph); + } +} + +impl From> for KeychainChangeSet { + fn from(changeset: chain_graph::ChangeSet

) -> Self { + Self { + chain_graph: changeset, + ..Default::default() + } + } +} + +impl From> for KeychainChangeSet { + fn from(additions: DerivationAdditions) -> Self { + Self { + derivation_indices: additions, + ..Default::default() + } + } +} + +impl AsRef for KeychainScan { + fn as_ref(&self) -> &TxGraph { + self.update.graph() + } +} + +impl ForEachTxOut for KeychainChangeSet { + fn for_each_txout(&self, f: impl FnMut((bitcoin::OutPoint, &bitcoin::TxOut))) { + self.chain_graph.for_each_txout(f) + } +} + +/// Balance differentiated in various categories. +#[derive(Debug, PartialEq, Eq, Clone, Default)] +#[cfg_attr( + feature = "serde", + derive(serde::Deserialize, serde::Serialize), + serde(crate = "serde_crate",) +)] +pub struct Balance { + /// All coinbase outputs not yet matured + pub immature: u64, + /// Unconfirmed UTXOs generated by a wallet tx + pub trusted_pending: u64, + /// Unconfirmed UTXOs received from an external wallet + pub untrusted_pending: u64, + /// Confirmed and immediately spendable balance + pub confirmed: u64, +} + +impl Balance { + /// Get sum of trusted_pending and confirmed coins. + /// + /// This is the balance you can spend right now that shouldn't get cancelled via another party + /// double spending it. + pub fn trusted_spendable(&self) -> u64 { + self.confirmed + self.trusted_pending + } + + /// Get the whole balance visible to the wallet. + pub fn total(&self) -> u64 { + self.confirmed + self.trusted_pending + self.untrusted_pending + self.immature + } +} + +impl core::fmt::Display for Balance { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!( + f, + "{{ immature: {}, trusted_pending: {}, untrusted_pending: {}, confirmed: {} }}", + self.immature, self.trusted_pending, self.untrusted_pending, self.confirmed + ) + } +} + +impl core::ops::Add for Balance { + type Output = Self; + + fn add(self, other: Self) -> Self { + Self { + immature: self.immature + other.immature, + trusted_pending: self.trusted_pending + other.trusted_pending, + untrusted_pending: self.untrusted_pending + other.untrusted_pending, + confirmed: self.confirmed + other.confirmed, + } + } +} + +#[cfg(test)] +mod test { + use crate::TxHeight; + + use super::*; + #[test] + fn append_keychain_derivation_indices() { + #[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Debug)] + enum Keychain { + One, + Two, + Three, + Four, + } + let mut lhs_di = BTreeMap::::default(); + let mut rhs_di = BTreeMap::::default(); + lhs_di.insert(Keychain::One, 7); + lhs_di.insert(Keychain::Two, 0); + rhs_di.insert(Keychain::One, 3); + rhs_di.insert(Keychain::Two, 5); + lhs_di.insert(Keychain::Three, 3); + rhs_di.insert(Keychain::Four, 4); + let mut lhs = KeychainChangeSet { + derivation_indices: DerivationAdditions(lhs_di), + chain_graph: chain_graph::ChangeSet::::default(), + }; + + let rhs = KeychainChangeSet { + derivation_indices: DerivationAdditions(rhs_di), + chain_graph: chain_graph::ChangeSet::::default(), + }; + + lhs.append(rhs); + + // Exiting index doesn't update if new index in `other` is lower than `self` + assert_eq!(lhs.derivation_indices.0.get(&Keychain::One), Some(&7)); + // Existing index updates if new index in `other` is higher than `self. + assert_eq!(lhs.derivation_indices.0.get(&Keychain::Two), Some(&5)); + // Existing index unchanged, if keychain doesn't exist in `other` + assert_eq!(lhs.derivation_indices.0.get(&Keychain::Three), Some(&3)); + // New keychain gets added if keychain is in `other`, but not in `self`. + assert_eq!(lhs.derivation_indices.0.get(&Keychain::Four), Some(&4)); + } +} diff --git a/crates/chain/src/keychain/persist.rs b/crates/chain/src/keychain/persist.rs new file mode 100644 index 000000000..94c9faf25 --- /dev/null +++ b/crates/chain/src/keychain/persist.rs @@ -0,0 +1,108 @@ +//! Persistence for changes made to a [`KeychainTracker`]. +//! +//! BDK's [`KeychainTracker`] needs somewhere to persist changes it makes during operation. +//! Operations like giving out a new address are crucial to persist so that next time the +//! application is loaded it can find transactions related to that address. +//! +//! Note that the [`KeychainTracker`] does not read this persisted data during operation since it +//! always has a copy in memory. +//! +//! [`KeychainTracker`]: crate::keychain::KeychainTracker + +use crate::{keychain, sparse_chain::ChainPosition}; + +/// `Persist` wraps a [`PersistBackend`] to create a convenient staging area for changes before they +/// are persisted. Not all changes made to the [`KeychainTracker`] need to be written to disk right +/// away so you can use [`Persist::stage`] to *stage* it first and then [`Persist::commit`] to +/// finally write it to disk. +/// +/// [`KeychainTracker`]: keychain::KeychainTracker +#[derive(Debug)] +pub struct Persist { + backend: B, + stage: keychain::KeychainChangeSet, +} + +impl Persist { + /// Create a new `Persist` from a [`PersistBackend`]. + pub fn new(backend: B) -> Self { + Self { + backend, + stage: Default::default(), + } + } + + /// Stage a `changeset` to later persistence with [`commit`]. + /// + /// [`commit`]: Self::commit + pub fn stage(&mut self, changeset: keychain::KeychainChangeSet) + where + K: Ord, + P: ChainPosition, + { + self.stage.append(changeset) + } + + /// Get the changes that haven't been commited yet + pub fn staged(&self) -> &keychain::KeychainChangeSet { + &self.stage + } + + /// Commit the staged changes to the underlying persistence backend. + /// + /// Retuns a backend defined error if this fails + pub fn commit(&mut self) -> Result<(), B::WriteError> + where + B: PersistBackend, + { + self.backend.append_changeset(&self.stage)?; + self.stage = Default::default(); + Ok(()) + } +} + +/// A persistence backend for [`Persist`]. +pub trait PersistBackend { + /// The error the backend returns when it fails to write. + type WriteError: core::fmt::Debug; + + /// The error the backend returns when it fails to load. + type LoadError: core::fmt::Debug; + + /// Appends a new changeset to the persistance backend. + /// + /// It is up to the backend what it does with this. It could store every changeset in a list or + /// it insert the actual changes to a more structured database. All it needs to guarantee is + /// that [`load_into_keychain_tracker`] restores a keychain tracker to what it should be if all + /// changesets had been applied sequentially. + /// + /// [`load_into_keychain_tracker`]: Self::load_into_keychain_tracker + fn append_changeset( + &mut self, + changeset: &keychain::KeychainChangeSet, + ) -> Result<(), Self::WriteError>; + + /// Applies all the changesets the backend has received to `tracker`. + fn load_into_keychain_tracker( + &mut self, + tracker: &mut keychain::KeychainTracker, + ) -> Result<(), Self::LoadError>; +} + +impl PersistBackend for () { + type WriteError = (); + type LoadError = (); + + fn append_changeset( + &mut self, + _changeset: &keychain::KeychainChangeSet, + ) -> Result<(), Self::WriteError> { + Ok(()) + } + fn load_into_keychain_tracker( + &mut self, + _tracker: &mut keychain::KeychainTracker, + ) -> Result<(), Self::LoadError> { + Ok(()) + } +} diff --git a/crates/chain/src/keychain/tracker.rs b/crates/chain/src/keychain/tracker.rs new file mode 100644 index 000000000..e75d299fb --- /dev/null +++ b/crates/chain/src/keychain/tracker.rs @@ -0,0 +1,308 @@ +use bitcoin::Transaction; +use miniscript::{Descriptor, DescriptorPublicKey}; + +use crate::{ + chain_graph::{self, ChainGraph}, + collections::*, + keychain::{KeychainChangeSet, KeychainScan, KeychainTxOutIndex}, + sparse_chain::{self, SparseChain}, + tx_graph::TxGraph, + BlockId, FullTxOut, TxHeight, +}; + +use super::{Balance, DerivationAdditions}; + +/// A convenient combination of a [`KeychainTxOutIndex`] and a [`ChainGraph`]. +/// +/// The [`KeychainTracker`] atomically updates its [`KeychainTxOutIndex`] whenever new chain data is +/// incorporated into its internal [`ChainGraph`]. +#[derive(Clone, Debug)] +pub struct KeychainTracker { + /// Index between script pubkeys to transaction outputs + pub txout_index: KeychainTxOutIndex, + chain_graph: ChainGraph

, +} + +impl KeychainTracker +where + P: sparse_chain::ChainPosition, + K: Ord + Clone + core::fmt::Debug, +{ + /// Add a keychain to the tracker's `txout_index` with a descriptor to derive addresses for it. + /// This is just shorthand for calling [`KeychainTxOutIndex::add_keychain`] on the internal + /// `txout_index`. + /// + /// Adding a keychain means you will be able to derive new script pubkeys under that keychain + /// and the tracker will discover transaction outputs with those script pubkeys. + pub fn add_keychain(&mut self, keychain: K, descriptor: Descriptor) { + self.txout_index.add_keychain(keychain, descriptor) + } + + /// Get the internal map of keychains to their descriptors. This is just shorthand for calling + /// [`KeychainTxOutIndex::keychains`] on the internal `txout_index`. + pub fn keychains(&mut self) -> &BTreeMap> { + self.txout_index.keychains() + } + + /// Get the checkpoint limit of the internal [`SparseChain`]. + /// + /// Refer to [`SparseChain::checkpoint_limit`] for more. + pub fn checkpoint_limit(&self) -> Option { + self.chain_graph.checkpoint_limit() + } + + /// Set the checkpoint limit of the internal [`SparseChain`]. + /// + /// Refer to [`SparseChain::set_checkpoint_limit`] for more. + pub fn set_checkpoint_limit(&mut self, limit: Option) { + self.chain_graph.set_checkpoint_limit(limit) + } + + /// Determines the resultant [`KeychainChangeSet`] if the given [`KeychainScan`] is applied. + /// + /// Internally, we call [`ChainGraph::determine_changeset`] and also determine the additions of + /// [`KeychainTxOutIndex`]. + pub fn determine_changeset( + &self, + scan: &KeychainScan, + ) -> Result, chain_graph::UpdateError

> { + // TODO: `KeychainTxOutIndex::determine_additions` + let mut derivation_indices = scan.last_active_indices.clone(); + derivation_indices.retain(|keychain, index| { + match self.txout_index.last_revealed_index(keychain) { + Some(existing) => *index > existing, + None => true, + } + }); + + Ok(KeychainChangeSet { + derivation_indices: DerivationAdditions(derivation_indices), + chain_graph: self.chain_graph.determine_changeset(&scan.update)?, + }) + } + + /// Directly applies a [`KeychainScan`] on [`KeychainTracker`]. + /// + /// This is equivilant to calling [`determine_changeset`] and [`apply_changeset`] in sequence. + /// + /// [`determine_changeset`]: Self::determine_changeset + /// [`apply_changeset`]: Self::apply_changeset + pub fn apply_update( + &mut self, + scan: KeychainScan, + ) -> Result, chain_graph::UpdateError

> { + let changeset = self.determine_changeset(&scan)?; + self.apply_changeset(changeset.clone()); + Ok(changeset) + } + + /// Applies the changes in `changeset` to [`KeychainTracker`]. + /// + /// Internally, this calls [`KeychainTxOutIndex::apply_additions`] and + /// [`ChainGraph::apply_changeset`] in sequence. + pub fn apply_changeset(&mut self, changeset: KeychainChangeSet) { + let KeychainChangeSet { + derivation_indices, + chain_graph, + } = changeset; + self.txout_index.apply_additions(derivation_indices); + let _ = self.txout_index.scan(&chain_graph); + self.chain_graph.apply_changeset(chain_graph) + } + + /// Iterates through [`FullTxOut`]s that are considered to exist in our representation of the + /// blockchain/mempool. + /// + /// In other words, these are `txout`s of confirmed and in-mempool transactions, based on our + /// view of the blockchain/mempool. + pub fn full_txouts(&self) -> impl Iterator)> + '_ { + self.txout_index + .txouts() + .filter_map(move |(spk_i, op, _)| Some((spk_i, self.chain_graph.full_txout(op)?))) + } + + /// Iterates through [`FullTxOut`]s that are unspent outputs. + /// + /// Refer to [`full_txouts`] for more. + /// + /// [`full_txouts`]: Self::full_txouts + pub fn full_utxos(&self) -> impl Iterator)> + '_ { + self.full_txouts() + .filter(|(_, txout)| txout.spent_by.is_none()) + } + + /// Returns a reference to the internal [`ChainGraph`]. + pub fn chain_graph(&self) -> &ChainGraph

{ + &self.chain_graph + } + + /// Returns a reference to the internal [`TxGraph`] (which is part of the [`ChainGraph`]). + pub fn graph(&self) -> &TxGraph { + self.chain_graph().graph() + } + + /// Returns a reference to the internal [`SparseChain`] (which is part of the [`ChainGraph`]). + pub fn chain(&self) -> &SparseChain

{ + self.chain_graph().chain() + } + + /// Determines the changes as result of inserting `block_id` (a height and block hash) into the + /// tracker. + /// + /// The caller is responsible for guaranteeing that a block exists at that height. If a + /// checkpoint already exists at that height with a different hash this will return an error. + /// Otherwise it will return `Ok(true)` if the checkpoint didn't already exist or `Ok(false)` + /// if it did. + /// + /// **Warning**: This function modifies the internal state of the tracker. You are responsible + /// for persisting these changes to disk if you need to restore them. + pub fn insert_checkpoint_preview( + &self, + block_id: BlockId, + ) -> Result, chain_graph::InsertCheckpointError> { + Ok(KeychainChangeSet { + chain_graph: self.chain_graph.insert_checkpoint_preview(block_id)?, + ..Default::default() + }) + } + + /// Directly insert a `block_id` into the tracker. + /// + /// This is equivalent of calling [`insert_checkpoint_preview`] and [`apply_changeset`] in + /// sequence. + /// + /// [`insert_checkpoint_preview`]: Self::insert_checkpoint_preview + /// [`apply_changeset`]: Self::apply_changeset + pub fn insert_checkpoint( + &mut self, + block_id: BlockId, + ) -> Result, chain_graph::InsertCheckpointError> { + let changeset = self.insert_checkpoint_preview(block_id)?; + self.apply_changeset(changeset.clone()); + Ok(changeset) + } + + /// Determines the changes as result of inserting a transaction into the inner [`ChainGraph`] + /// and optionally into the inner chain at `position`. + /// + /// **Warning**: This function modifies the internal state of the chain graph. You are + /// responsible for persisting these changes to disk if you need to restore them. + pub fn insert_tx_preview( + &self, + tx: Transaction, + pos: P, + ) -> Result, chain_graph::InsertTxError

> { + Ok(KeychainChangeSet { + chain_graph: self.chain_graph.insert_tx_preview(tx, pos)?, + ..Default::default() + }) + } + + /// Directly insert a transaction into the inner [`ChainGraph`] and optionally into the inner + /// chain at `position`. + /// + /// This is equivilant of calling [`insert_tx_preview`] and [`apply_changeset`] in sequence. + /// + /// [`insert_tx_preview`]: Self::insert_tx_preview + /// [`apply_changeset`]: Self::apply_changeset + pub fn insert_tx( + &mut self, + tx: Transaction, + pos: P, + ) -> Result, chain_graph::InsertTxError

> { + let changeset = self.insert_tx_preview(tx, pos)?; + self.apply_changeset(changeset.clone()); + Ok(changeset) + } + + /// Returns the *balance* of the keychain i.e. the value of unspent transaction outputs tracked. + /// + /// The caller provides a `should_trust` predicate which must decide whether the value of + /// unconfirmed outputs on this keychain are guaranteed to be realized or not. For example: + /// + /// - For an *internal* (change) keychain `should_trust` should in general be `true` since even if + /// you lose an internal output due to eviction you will always gain back the value from whatever output the + /// unconfirmed transaction was spending (since that output is presumeably from your wallet). + /// - For an *external* keychain you might want `should_trust` to return `false` since someone may cancel (by double spending) + /// a payment made to addresses on that keychain. + /// + /// When in doubt set `should_trust` to return false. This doesn't do anything other than change + /// where the unconfirmed output's value is accounted for in `Balance`. + pub fn balance(&self, mut should_trust: impl FnMut(&K) -> bool) -> Balance { + let mut immature = 0; + let mut trusted_pending = 0; + let mut untrusted_pending = 0; + let mut confirmed = 0; + let last_sync_height = self.chain().latest_checkpoint().map(|latest| latest.height); + for ((keychain, _), utxo) in self.full_utxos() { + let chain_position = &utxo.chain_position; + + match chain_position.height() { + TxHeight::Confirmed(_) => { + if utxo.is_on_coinbase { + if utxo.is_mature( + last_sync_height + .expect("since it's confirmed we must have a checkpoint"), + ) { + confirmed += utxo.txout.value; + } else { + immature += utxo.txout.value; + } + } else { + confirmed += utxo.txout.value; + } + } + TxHeight::Unconfirmed => { + if should_trust(keychain) { + trusted_pending += utxo.txout.value; + } else { + untrusted_pending += utxo.txout.value; + } + } + } + } + + Balance { + immature, + trusted_pending, + untrusted_pending, + confirmed, + } + } + + /// Returns the balance of all spendable confirmed unspent outputs of this tracker at a + /// particular height. + pub fn balance_at(&self, height: u32) -> u64 { + self.full_txouts() + .filter(|(_, full_txout)| full_txout.is_spendable_at(height)) + .map(|(_, full_txout)| full_txout.txout.value) + .sum() + } +} + +impl Default for KeychainTracker { + fn default() -> Self { + Self { + txout_index: Default::default(), + chain_graph: Default::default(), + } + } +} + +impl AsRef> for KeychainTracker { + fn as_ref(&self) -> &SparseChain

{ + self.chain_graph.chain() + } +} + +impl AsRef for KeychainTracker { + fn as_ref(&self) -> &TxGraph { + self.chain_graph.graph() + } +} + +impl AsRef> for KeychainTracker { + fn as_ref(&self) -> &ChainGraph

{ + &self.chain_graph + } +} diff --git a/crates/chain/src/keychain/txout_index.rs b/crates/chain/src/keychain/txout_index.rs new file mode 100644 index 000000000..16ee49fd1 --- /dev/null +++ b/crates/chain/src/keychain/txout_index.rs @@ -0,0 +1,591 @@ +use crate::{ + collections::*, + miniscript::{Descriptor, DescriptorPublicKey}, + ForEachTxOut, SpkTxOutIndex, +}; +use alloc::{borrow::Cow, vec::Vec}; +use bitcoin::{secp256k1::Secp256k1, OutPoint, Script, TxOut}; +use core::{fmt::Debug, ops::Deref}; + +use super::DerivationAdditions; + +/// Maximum [BIP32](https://bips.xyz/32) derivation index. +pub const BIP32_MAX_INDEX: u32 = (1 << 31) - 1; + +/// A convenient wrapper around [`SpkTxOutIndex`] that relates script pubkeys to miniscript public +/// [`Descriptor`]s. +/// +/// Descriptors are referenced by the provided keychain generic (`K`). +/// +/// Script pubkeys for a descriptor are revealed chronologically from index 0. I.e. If the last +/// revealed index of a descriptor is 5, scripts of indices 0 to 4 are guaranteed to already be +/// revealed. In addition to revealed scripts, we have a `lookahead` parameter for each keychain +/// which defines the number of script pubkeys to store ahead of the last revealed index. +/// +/// Methods that could update the last revealed index will return [`DerivationAdditions`] to report +/// these changes. This can be persisted for future recovery. +/// +/// ## Synopsis +/// +/// ``` +/// use bdk_chain::keychain::KeychainTxOutIndex; +/// # use bdk_chain::{ miniscript::{Descriptor, DescriptorPublicKey} }; +/// # use core::str::FromStr; +/// +/// // imagine our service has internal and external addresses but also addresses for users +/// #[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd)] +/// enum MyKeychain { +/// External, +/// Internal, +/// MyAppUser { +/// user_id: u32 +/// } +/// } +/// +/// let mut txout_index = KeychainTxOutIndex::::default(); +/// +/// # let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only(); +/// # let (external_descriptor,_) = Descriptor::::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap(); +/// # let (internal_descriptor,_) = Descriptor::::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap(); +/// # let descriptor_for_user_42 = external_descriptor.clone(); +/// txout_index.add_keychain(MyKeychain::External, external_descriptor); +/// txout_index.add_keychain(MyKeychain::Internal, internal_descriptor); +/// txout_index.add_keychain(MyKeychain::MyAppUser { user_id: 42 }, descriptor_for_user_42); +/// +/// let new_spk_for_user = txout_index.reveal_next_spk(&MyKeychain::MyAppUser{ user_id: 42 }); +/// ``` +/// +/// [`Ord`]: core::cmp::Ord +/// [`SpkTxOutIndex`]: crate::spk_txout_index::SpkTxOutIndex +/// [`Descriptor`]: crate::miniscript::Descriptor +#[derive(Clone, Debug)] +pub struct KeychainTxOutIndex { + inner: SpkTxOutIndex<(K, u32)>, + // descriptors of each keychain + keychains: BTreeMap>, + // last stored indexes + last_revealed: BTreeMap, + // lookahead settings for each keychain + lookahead: BTreeMap, +} + +impl Default for KeychainTxOutIndex { + fn default() -> Self { + Self { + inner: SpkTxOutIndex::default(), + keychains: BTreeMap::default(), + last_revealed: BTreeMap::default(), + lookahead: BTreeMap::default(), + } + } +} + +impl Deref for KeychainTxOutIndex { + type Target = SpkTxOutIndex<(K, u32)>; + + fn deref(&self) -> &Self::Target { + &self.inner + } +} + +impl KeychainTxOutIndex { + /// Scans an object for relevant outpoints, which are stored and indexed internally. + /// + /// If the matched script pubkey is part of the lookahead, the last stored index is updated for + /// the script pubkey's keychain and the [`DerivationAdditions`] returned will reflect the + /// change. + /// + /// Typically this method is used in two situations: + /// + /// 1. After loading transaction data from disk you may scan over all the txouts to restore all + /// your txouts. + /// 2. When getting new data from the chain you usually scan it before incorporating it into + /// your chain state (i.e. `SparseChain`, `ChainGraph`). + /// + /// See [`ForEachTxout`] for the types that support this. + /// + /// [`ForEachTxout`]: crate::ForEachTxOut + pub fn scan(&mut self, txouts: &impl ForEachTxOut) -> DerivationAdditions { + let mut additions = DerivationAdditions::::default(); + txouts.for_each_txout(|(op, txout)| additions.append(self.scan_txout(op, txout))); + additions + } + + /// Scan a single outpoint for a matching script pubkey. + /// + /// If it matches the index will store and index it. + pub fn scan_txout(&mut self, op: OutPoint, txout: &TxOut) -> DerivationAdditions { + match self.inner.scan_txout(op, txout).cloned() { + Some((keychain, index)) => self.reveal_to_target(&keychain, index).1, + None => DerivationAdditions::default(), + } + } + + /// Return a reference to the internal [`SpkTxOutIndex`]. + pub fn inner(&self) -> &SpkTxOutIndex<(K, u32)> { + &self.inner + } + + /// Return a reference to the internal map of keychain to descriptors. + pub fn keychains(&self) -> &BTreeMap> { + &self.keychains + } + + /// Add a keychain to the tracker's `txout_index` with a descriptor to derive addresses for it. + /// + /// Adding a keychain means you will be able to derive new script pubkeys under that keychain + /// and the txout index will discover transaction outputs with those script pubkeys. + /// + /// # Panics + /// + /// This will panic if a different `descriptor` is introduced to the same `keychain`. + pub fn add_keychain(&mut self, keychain: K, descriptor: Descriptor) { + let old_descriptor = &*self.keychains.entry(keychain).or_insert(descriptor.clone()); + assert_eq!( + &descriptor, old_descriptor, + "keychain already contains a different descriptor" + ); + } + + /// Return the lookahead setting for each keychain. + /// + /// Refer to [`set_lookahead`] for a deeper explanation on `lookahead`. + /// + /// [`set_lookahead`]: Self::set_lookahead + pub fn lookaheads(&self) -> &BTreeMap { + &self.lookahead + } + + /// Convenience method to call [`set_lookahead`] for all keychains. + /// + /// [`set_lookahead`]: Self::set_lookahead + pub fn set_lookahead_for_all(&mut self, lookahead: u32) { + for keychain in &self.keychains.keys().cloned().collect::>() { + self.lookahead.insert(keychain.clone(), lookahead); + self.replenish_lookahead(keychain); + } + } + + /// Set the lookahead count for `keychain`. + /// + /// The lookahead is the number of scripts to cache ahead of the last stored script index. This + /// is useful during a scan via [`scan`] or [`scan_txout`]. + /// + /// # Panics + /// + /// This will panic if `keychain` does not exist. + /// + /// [`scan`]: Self::scan + /// [`scan_txout`]: Self::scan_txout + pub fn set_lookahead(&mut self, keychain: &K, lookahead: u32) { + self.lookahead.insert(keychain.clone(), lookahead); + self.replenish_lookahead(keychain); + } + + /// Convenience method to call [`lookahead_to_target`] for multiple keychains. + /// + /// [`lookahead_to_target`]: Self::lookahead_to_target + pub fn lookahead_to_target_multi(&mut self, target_indexes: BTreeMap) { + for (keychain, target_index) in target_indexes { + self.lookahead_to_target(&keychain, target_index) + } + } + + /// Store lookahead scripts until `target_index`. + /// + /// This does not change the `lookahead` setting. + pub fn lookahead_to_target(&mut self, keychain: &K, target_index: u32) { + let next_index = self.next_store_index(keychain); + if let Some(temp_lookahead) = target_index.checked_sub(next_index).filter(|&v| v > 0) { + let old_lookahead = self.lookahead.insert(keychain.clone(), temp_lookahead); + self.replenish_lookahead(keychain); + + // revert + match old_lookahead { + Some(lookahead) => self.lookahead.insert(keychain.clone(), lookahead), + None => self.lookahead.remove(keychain), + }; + } + } + + fn replenish_lookahead(&mut self, keychain: &K) { + let descriptor = self.keychains.get(keychain).expect("keychain must exist"); + let next_store_index = self.next_store_index(keychain); + let next_reveal_index = self.last_revealed.get(keychain).map_or(0, |v| *v + 1); + let lookahead = self.lookahead.get(keychain).map_or(0, |v| *v); + + for (new_index, new_spk) in range_descriptor_spks( + Cow::Borrowed(descriptor), + next_store_index..next_reveal_index + lookahead, + ) { + let _inserted = self + .inner + .insert_spk((keychain.clone(), new_index), new_spk); + debug_assert!(_inserted, "replenish lookahead: must not have existing spk: keychain={:?}, lookahead={}, next_store_index={}, next_reveal_index={}", keychain, lookahead, next_store_index, next_reveal_index); + } + } + + fn next_store_index(&self, keychain: &K) -> u32 { + self.inner() + .all_spks() + .range((keychain.clone(), u32::MIN)..(keychain.clone(), u32::MAX)) + .last() + .map_or(0, |((_, v), _)| *v + 1) + } + + /// Generates script pubkey iterators for every `keychain`. The iterators iterate over all + /// derivable script pubkeys. + pub fn spks_of_all_keychains( + &self, + ) -> BTreeMap + Clone> { + self.keychains + .iter() + .map(|(keychain, descriptor)| { + ( + keychain.clone(), + range_descriptor_spks(Cow::Owned(descriptor.clone()), 0..), + ) + }) + .collect() + } + + /// Generates a script pubkey iterator for the given `keychain`'s descriptor (if exists). The + /// iterator iterates over all derivable scripts of the keychain's descriptor. + /// + /// # Panics + /// + /// This will panic if `keychain` does not exist. + pub fn spks_of_keychain(&self, keychain: &K) -> impl Iterator + Clone { + let descriptor = self + .keychains + .get(keychain) + .expect("keychain must exist") + .clone(); + range_descriptor_spks(Cow::Owned(descriptor), 0..) + } + + /// Convenience method to get [`revealed_spks_of_keychain`] of all keychains. + /// + /// [`revealed_spks_of_keychain`]: Self::revealed_spks_of_keychain + pub fn revealed_spks_of_all_keychains( + &self, + ) -> BTreeMap + Clone> { + self.keychains + .keys() + .map(|keychain| (keychain.clone(), self.revealed_spks_of_keychain(keychain))) + .collect() + } + + /// Iterates over the script pubkeys revealed by this index under `keychain`. + pub fn revealed_spks_of_keychain( + &self, + keychain: &K, + ) -> impl DoubleEndedIterator + Clone { + let next_index = self.last_revealed.get(keychain).map_or(0, |v| *v + 1); + self.inner + .all_spks() + .range((keychain.clone(), u32::MIN)..(keychain.clone(), next_index)) + .map(|((_, derivation_index), spk)| (*derivation_index, spk)) + } + + /// Get the next derivation index for `keychain`. This is the index after the last revealed + /// derivation index. + /// + /// The second field in the returned tuple represents whether the next derivation index is new. + /// There are two scenarios where the next derivation index is reused (not new): + /// + /// 1. The keychain's descriptor has no wildcard, and a script has already been revealed. + /// 2. The number of revealed scripts has already reached 2^31 (refer to BIP-32). + /// + /// Not checking the second field of the tuple may result in address reuse. + /// + /// # Panics + /// + /// Panics if the `keychain` does not exist. + pub fn next_index(&self, keychain: &K) -> (u32, bool) { + let descriptor = self.keychains.get(keychain).expect("keychain must exist"); + let last_index = self.last_revealed.get(keychain).cloned(); + + // we can only get the next index if wildcard exists + let has_wildcard = descriptor.has_wildcard(); + + match last_index { + // if there is no index, next_index is always 0 + None => (0, true), + // descriptors without wildcards can only have one index + Some(_) if !has_wildcard => (0, false), + // derivation index must be < 2^31 (BIP-32) + Some(index) if index > BIP32_MAX_INDEX => { + unreachable!("index is out of bounds") + } + Some(index) if index == BIP32_MAX_INDEX => (index, false), + // get next derivation index + Some(index) => (index + 1, true), + } + } + + /// Get the last derivation index that is revealed for each keychain. + /// + /// Keychains with no revealed indices will not be included in the returned [`BTreeMap`]. + pub fn last_revealed_indices(&self) -> &BTreeMap { + &self.last_revealed + } + + /// Get the last derivation index revealed for `keychain`. + pub fn last_revealed_index(&self, keychain: &K) -> Option { + self.last_revealed.get(keychain).cloned() + } + + /// Convenience method to call [`Self::reveal_to_target`] on multiple keychains. + pub fn reveal_to_target_multi( + &mut self, + keychains: &BTreeMap, + ) -> ( + BTreeMap>, + DerivationAdditions, + ) { + let mut additions = DerivationAdditions::default(); + let mut spks = BTreeMap::new(); + + for (keychain, &index) in keychains { + let (new_spks, new_additions) = self.reveal_to_target(keychain, index); + if !new_additions.is_empty() { + spks.insert(keychain.clone(), new_spks); + additions.append(new_additions); + } + } + + (spks, additions) + } + + /// Reveals script pubkeys of the `keychain`'s descriptor **up to and including** the + /// `target_index`. + /// + /// If the `target_index` cannot be reached (due to the descriptor having no wildcard, and/or + /// the `target_index` is in the hardened index range), this method will do a best-effort and + /// reveal up to the last possible index. + /// + /// This returns an iterator of newly revealed indices (along side their scripts), and a + /// [`DerivationAdditions`] which reports updates to the latest revealed index. If no new script + /// pubkeys are revealed, both of these will be empty. + /// + /// # Panics + /// + /// Panics if `keychain` does not exist. + pub fn reveal_to_target( + &mut self, + keychain: &K, + target_index: u32, + ) -> (impl Iterator, DerivationAdditions) { + let descriptor = self.keychains.get(keychain).expect("keychain must exist"); + let has_wildcard = descriptor.has_wildcard(); + + let target_index = if has_wildcard { target_index } else { 0 }; + let next_store_index = self.next_store_index(keychain); + let next_reveal_index = self.last_revealed.get(keychain).map_or(0, |v| *v + 1); + let lookahead = self.lookahead.get(keychain).map_or(0, |v| *v); + + // if we are able to reveal new indexes, the latest revealed index goes here + let mut revealed_index = None; + + // if target is already surpassed, we have nothing to reveal + if next_reveal_index <= target_index + // if target is already stored (due to lookahead), this can be our new revealed index + && target_index < next_reveal_index + lookahead + { + revealed_index = Some(target_index); + } + + // we range over indexes that are not stored + let range = next_reveal_index + lookahead..=target_index + lookahead; + + for (new_index, new_spk) in range_descriptor_spks(Cow::Borrowed(descriptor), range) { + // no need to store if already stored + if new_index >= next_store_index { + let _inserted = self + .inner + .insert_spk((keychain.clone(), new_index), new_spk); + debug_assert!(_inserted, "must not have existing spk",); + } + + // everything after `target_index` is stored for lookahead only + if new_index <= target_index { + revealed_index = Some(new_index); + } + } + + match revealed_index { + Some(index) => { + let _old_index = self.last_revealed.insert(keychain.clone(), index); + debug_assert!(_old_index < Some(index)); + ( + range_descriptor_spks( + Cow::Owned(descriptor.clone()), + next_reveal_index..index + 1, + ), + DerivationAdditions(core::iter::once((keychain.clone(), index)).collect()), + ) + } + None => ( + range_descriptor_spks( + Cow::Owned(descriptor.clone()), + next_reveal_index..next_reveal_index, + ), + DerivationAdditions::default(), + ), + } + } + + /// Attempts to reveal the next script pubkey for `keychain`. + /// + /// Returns the derivation index of the revealed script pubkey, the revealed script pubkey and a + /// [`DerivationAdditions`] which represents changes in the last revealed index (if any). + /// + /// When a new script cannot be revealed, we return the last revealed script and an empty + /// [`DerivationAdditions`]. There are two scenarios when a new script pubkey cannot be derived: + /// + /// 1. The descriptor has no wildcard and already has one script revealed. + /// 2. The descriptor has already revealed scripts up to the numeric bound. + /// + /// # Panics + /// + /// Panics if the `keychain` does not exist. + pub fn reveal_next_spk(&mut self, keychain: &K) -> ((u32, &Script), DerivationAdditions) { + let (next_index, _) = self.next_index(keychain); + let additions = self.reveal_to_target(keychain, next_index).1; + let script = self + .inner + .spk_at_index(&(keychain.clone(), next_index)) + .expect("script must already be stored"); + ((next_index, script), additions) + } + + /// Gets the next unused script pubkey in the keychain. I.e. the script pubkey with the lowest + /// index that has not been used yet. + /// + /// This will derive and reveal a new script pubkey if no more unused script pubkeys exist. + /// + /// If the descriptor has no wildcard and already has a used script pubkey, or if a descriptor + /// has used all scripts up to the derivation bounds, the last derived script pubkey will be + /// returned. + /// + /// # Panics + /// + /// Panics if `keychain` has never been added to the index + pub fn next_unused_spk(&mut self, keychain: &K) -> ((u32, &Script), DerivationAdditions) { + let need_new = self.unused_spks_of_keychain(keychain).next().is_none(); + // this rather strange branch is needed because of some lifetime issues + if need_new { + self.reveal_next_spk(keychain) + } else { + ( + self.unused_spks_of_keychain(keychain) + .next() + .expect("we already know next exists"), + DerivationAdditions::default(), + ) + } + } + + /// Marks the script pubkey at `index` as used even though it hasn't seen an output with it. + /// This only has an effect when the `index` had been added to `self` already and was unused. + /// + /// Returns whether the `index` was originally present as `unused`. + /// + /// This is useful when you want to reserve a script pubkey for something but don't want to add + /// the transaction output using it to the index yet. Other callers will consider `index` on + /// `keychain` used until you call [`unmark_used`]. + /// + /// [`unmark_used`]: Self::unmark_used + pub fn mark_used(&mut self, keychain: &K, index: u32) -> bool { + self.inner.mark_used(&(keychain.clone(), index)) + } + + /// Undoes the effect of [`mark_used`]. Returns whether the `index` is inserted back into + /// `unused`. + /// + /// Note that if `self` has scanned an output with this script pubkey then this will have no + /// effect. + /// + /// [`mark_used`]: Self::mark_used + pub fn unmark_used(&mut self, keychain: &K, index: u32) -> bool { + self.inner.unmark_used(&(keychain.clone(), index)) + } + + /// Iterates over all unused script pubkeys for a `keychain` that have been stored in the index. + pub fn unused_spks_of_keychain( + &self, + keychain: &K, + ) -> impl DoubleEndedIterator { + let next_index = self.last_revealed.get(keychain).map_or(0, |&v| v + 1); + let range = (keychain.clone(), u32::MIN)..(keychain.clone(), next_index); + self.inner + .unused_spks(range) + .map(|((_, i), script)| (*i, script)) + } + + /// Iterates over all the [`OutPoint`] that have a `TxOut` with a script pubkey derived from + /// `keychain`. + pub fn txouts_of_keychain( + &self, + keychain: &K, + ) -> impl DoubleEndedIterator + '_ { + self.inner + .outputs_in_range((keychain.clone(), u32::MIN)..(keychain.clone(), u32::MAX)) + .map(|((_, i), op)| (*i, op)) + } + + /// Returns the highest derivation index of the `keychain` where [`KeychainTxOutIndex`] has + /// found a [`TxOut`] with it's script pubkey. + pub fn last_used_index(&self, keychain: &K) -> Option { + self.txouts_of_keychain(keychain).last().map(|(i, _)| i) + } + + /// Returns the highest derivation index of each keychain that [`KeychainTxOutIndex`] has found + /// a [`TxOut`] with it's script pubkey. + pub fn last_used_indices(&self) -> BTreeMap { + self.keychains + .iter() + .filter_map(|(keychain, _)| { + self.last_used_index(keychain) + .map(|index| (keychain.clone(), index)) + }) + .collect() + } + + /// Applies the derivation additions to the [`KeychainTxOutIndex`], extending the number of + /// derived scripts per keychain, as specified in the `additions`. + pub fn apply_additions(&mut self, additions: DerivationAdditions) { + let _ = self.reveal_to_target_multi(&additions.0); + } +} + +fn range_descriptor_spks<'a, R>( + descriptor: Cow<'a, Descriptor>, + range: R, +) -> impl Iterator + Clone + Send + 'a +where + R: Iterator + Clone + Send + 'a, +{ + let secp = Secp256k1::verification_only(); + let has_wildcard = descriptor.has_wildcard(); + range + .into_iter() + // non-wildcard descriptors can only have one derivation index (0) + .take_while(move |&index| has_wildcard || index == 0) + // we can only iterate over non-hardened indices + .take_while(|&index| index <= BIP32_MAX_INDEX) + .map( + move |index| -> Result<_, miniscript::descriptor::ConversionError> { + Ok(( + index, + descriptor + .at_derivation_index(index) + .derived_descriptor(&secp)? + .script_pubkey(), + )) + }, + ) + .take_while(Result::is_ok) + .map(Result::unwrap) +} diff --git a/crates/chain/src/lib.rs b/crates/chain/src/lib.rs new file mode 100644 index 000000000..7bb4ed0df --- /dev/null +++ b/crates/chain/src/lib.rs @@ -0,0 +1,89 @@ +//! This crate is a collection of core structures for [Bitcoin Dev Kit] (alpha release). +//! +//! The goal of this crate is give wallets the mechanisms needed to: +//! +//! 1. Figure out what data they need to fetch. +//! 2. Process that data in a way that never leads to inconsistent states. +//! 3. Fully index that data and expose it so that it can be consumed without friction. +//! +//! Our design goals for these mechanisms are: +//! +//! 1. Data source agnostic -- nothing in `bdk_chain` cares about where you get data from or whether +//! you do it synchronously or asynchronously. If you know a fact about the blockchain you can just +//! tell `bdk_chain`'s APIs about it and that information will be integrated if it can be done +//! consistently. +//! 2. Error free APIs. +//! 3. Data persistence agnostic -- `bdk_chain` does not care where you cache on-chain data, what you +//! cache or how you fetch it. +//! +//! [Bitcoin Dev Kit]: https://bitcoindevkit.org/ +#![no_std] +pub use bitcoin; +pub mod chain_graph; +mod spk_txout_index; +pub use spk_txout_index::*; +mod chain_data; +pub use chain_data::*; +pub mod keychain; +pub mod sparse_chain; +mod tx_data_traits; +pub mod tx_graph; +pub use tx_data_traits::*; + +#[doc(hidden)] +pub mod example_utils; + +#[cfg(feature = "miniscript")] +pub use miniscript; +#[cfg(feature = "miniscript")] +mod descriptor_ext; +#[cfg(feature = "miniscript")] +pub use descriptor_ext::DescriptorExt; + +#[allow(unused_imports)] +#[macro_use] +extern crate alloc; + +#[cfg(feature = "serde")] +pub extern crate serde_crate as serde; + +#[cfg(feature = "bincode")] +extern crate bincode; + +#[cfg(feature = "std")] +#[macro_use] +extern crate std; + +#[cfg(all(not(feature = "std"), feature = "hashbrown"))] +extern crate hashbrown; + +// When no-std use `alloc`'s Hash collections. This is activated by default +#[cfg(all(not(feature = "std"), not(feature = "hashbrown")))] +#[doc(hidden)] +pub mod collections { + #![allow(dead_code)] + pub type HashSet = alloc::collections::BTreeSet; + pub type HashMap = alloc::collections::BTreeMap; + pub use alloc::collections::{btree_map as hash_map, *}; +} + +// When we have std use `std`'s all collections +#[cfg(all(feature = "std", not(feature = "hashbrown")))] +#[doc(hidden)] +pub mod collections { + pub use std::collections::{hash_map, *}; +} + +// With special feature `hashbrown` use `hashbrown`'s hash collections, and else from `alloc`. +#[cfg(feature = "hashbrown")] +#[doc(hidden)] +pub mod collections { + #![allow(dead_code)] + pub type HashSet = hashbrown::HashSet; + pub type HashMap = hashbrown::HashMap; + pub use alloc::collections::*; + pub use hashbrown::hash_map; +} + +/// How many confirmations are needed for a coinbase output to be spent +pub const COINBASE_MATURITY: u32 = 100; diff --git a/crates/chain/src/sparse_chain.rs b/crates/chain/src/sparse_chain.rs new file mode 100644 index 000000000..32e9cce53 --- /dev/null +++ b/crates/chain/src/sparse_chain.rs @@ -0,0 +1,1102 @@ +//! Module for structures that maintain sparse (purposely incomplete) snapshots of blockchain data. +//! +//! [`SparseChain`] stores [`Txid`]s ordered by an index that implements [`ChainPosition`] (this +//! represents the transaction's position in the blockchain, by default [`TxHeight`] is used). +//! [`SparseChain`] also contains "checkpoints" which relate block height to block hash. Changes to +//! a [`SparseChain`] are reported by returning [`ChangeSet`]s. +//! +//! # Updating [`SparseChain`] +//! +//! A sparsechain can be thought of as a consistent snapshot of history. A [`SparseChain`] can be +//! updated by applying an update [`SparseChain`] on top, but only if they "connect" via their +//! checkpoints and don't result in unexpected movements of transactions. +//! +//! ``` +//! # use bdk_chain::{BlockId, TxHeight, sparse_chain::*, example_utils::*}; +//! # use bitcoin::BlockHash; +//! # let hash_a = new_hash::("a"); +//! # let hash_b = new_hash::("b"); +//! # let hash_c = new_hash::("c"); +//! # let hash_d = new_hash::("d"); +//! // create empty sparsechain +//! let mut chain = SparseChain::::default(); +//! +//! /* Updating an empty sparsechain will always succeed */ +//! +//! let update = SparseChain::from_checkpoints(vec![ +//! BlockId { +//! height: 1, +//! hash: hash_a, +//! }, +//! BlockId { +//! height: 2, +//! hash: hash_b, +//! }, +//! ]); +//! let _ = chain +//! .apply_update(update) +//! .expect("updating an empty sparsechain will always succeed"); +//! +//! /* To update a non-empty sparsechain, the update must connect */ +//! +//! let update = SparseChain::from_checkpoints(vec![ +//! BlockId { +//! height: 2, +//! hash: hash_b, +//! }, +//! BlockId { +//! height: 3, +//! hash: hash_c, +//! }, +//! ]); +//! let _ = chain +//! .apply_update(update) +//! .expect("we have connected at block height 2, so this must succeed"); +//! ``` +//! +//! ## Invalid updates +//! +//! As shown above, sparsechains can be "connected" by comparing their checkpoints. However, there +//! are situations where two sparsechains cannot connect in a way that guarantees consistency. +//! +//! ``` +//! # use bdk_chain::{BlockId, TxHeight, sparse_chain::*, example_utils::*}; +//! # use bitcoin::BlockHash; +//! # let hash_a = new_hash::("a"); +//! # let hash_b = new_hash::("b"); +//! # let hash_c = new_hash::("c"); +//! # let hash_d = new_hash::("d"); +//! // our sparsechain has 2 checkpoints +//! let chain = SparseChain::::from_checkpoints(vec![ +//! BlockId { +//! height: 1, +//! hash: hash_a, +//! }, +//! BlockId { +//! height: 2, +//! hash: hash_b, +//! }, +//! ]); +//! +//! /* Example of an ambiguous update that does not fully connect */ +//! +//! let ambiguous_update = SparseChain::from_checkpoints(vec![ +//! // the update sort of "connects" at checkpoint 1, but... +//! BlockId { +//! height: 1, +//! hash: hash_a, +//! }, +//! // we cannot determine whether checkpoint 3 connects with checkpoint 2 +//! BlockId { +//! height: 3, +//! hash: hash_c, +//! }, +//! ]); +//! let _ = chain +//! .determine_changeset(&ambiguous_update) +//! .expect_err("cannot apply ambiguous update"); +//! +//! /* Example of an update that completely misses the point */ +//! +//! let disconnected_update = SparseChain::from_checkpoints(vec![ +//! // the last checkpoint in chain is 2, so 3 and 4 do not connect +//! BlockId { +//! height: 3, +//! hash: hash_c, +//! }, +//! BlockId { +//! height: 4, +//! hash: hash_d, +//! }, +//! ]); +//! let _ = chain +//! .determine_changeset(&disconnected_update) +//! .expect_err("cannot apply a totally-disconnected update"); +//! ``` +//! +//! ## Handling reorgs +//! +//! Updates can be formed to evict data from the original sparsechain. This is useful for handling +//! blockchain reorgs. +//! +//! ``` +//! # use bdk_chain::{BlockId, TxHeight, sparse_chain::*, example_utils::*}; +//! # use bitcoin::BlockHash; +//! # let hash_a = new_hash::("a"); +//! # let hash_b = new_hash::("b"); +//! # let hash_c = new_hash::("c"); +//! # let hash_d = new_hash::("d"); +//! // our chain has a single checkpoint at height 11 +//! let mut chain = SparseChain::::from_checkpoints(vec![BlockId { +//! height: 11, +//! hash: hash_a, +//! }]); +//! +//! // we detect a reorg at height 11, and we introduce a new checkpoint at height 12 +//! let update = SparseChain::from_checkpoints(vec![ +//! BlockId { +//! height: 11, +//! hash: hash_b, +//! }, +//! BlockId { +//! height: 12, +//! hash: hash_c, +//! }, +//! ]); +//! let _ = chain +//! .apply_update(update) +//! .expect("we can evict/replace checkpoint 11 since it is the only checkpoint"); +//! +//! // now our `chain` has 2 checkpoints (11:hash_b & 12:hash_c) +//! // we detect another reorg, this time at height 12... +//! let update = SparseChain::from_checkpoints(vec![ +//! // we connect at checkpoint 11 as this is our "point of agreement" +//! BlockId { +//! height: 11, +//! hash: hash_b, +//! }, +//! BlockId { +//! height: 12, +//! hash: hash_d, +//! }, +//! ]); +//! let _ = chain +//! .apply_update(update) +//! .expect("we have provided a valid point of agreement, so our reorg update will succeed"); +//! ``` +//! +//! ## Movement of transactions during update +//! +//! If the original sparsechain and update sparsechain contain the same transaction at different +//! [`ChainPosition`]s, the transaction is considered as "moved". There are various movements of a +//! transaction that are invalid and update will fail. +//! +//! Valid movements: +//! +//! * When the transaction moved from unconfirmed (in original) to confirmed (in update). In other +//! words, confirming transactions are allowed! +//! * If there has been a reorg at height x, an originally confirmed transaction at height x or +//! above, may move to another height (that is at x or above, including becoming unconfirmed). +//! +//! Invalid movements: +//! +//! * A confirmed transaction cannot move without a reorg. +//! * Even with a reorg, an originally confirmed transaction cannot be moved below the height of the +//! reorg. +//! +//! # Custom [`ChainPosition`] +//! +//! [`SparseChain`] maintains a list of txids ordered by [`ChainPosition`]. By default, [`TxHeight`] +//! is used, however additional data can be incorporated into the implementation. +//! +//! For example, we can have "perfect ordering" of transactions if our positional index is a +//! combination of block height and transaction position in block. +//! +//! ``` +//! # use bdk_chain::{BlockId, TxHeight, sparse_chain::*, example_utils::*}; +//! # use bitcoin::{BlockHash, Txid}; +//! # let hash_a = new_hash::("a"); +//! # let txid_1 = new_hash::("1"); +//! # let txid_2 = new_hash::("2"); +//! # let txid_3 = new_hash::("3"); +//! #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +//! pub enum TxPosition { +//! Confirmed { +//! height: u32, // height of block +//! position: u32, // position of transaction in the block +//! }, +//! Unconfirmed, +//! } +//! +//! impl Default for TxPosition { +//! fn default() -> Self { +//! Self::Unconfirmed +//! } +//! } +//! +//! impl ChainPosition for TxPosition { +//! fn height(&self) -> TxHeight { +//! match self { +//! Self::Confirmed { height, .. } => TxHeight::Confirmed(*height), +//! Self::Unconfirmed => TxHeight::Unconfirmed, +//! } +//! } +//! +//! fn max_ord_of_height(height: TxHeight) -> Self { +//! match height { +//! TxHeight::Confirmed(height) => Self::Confirmed { +//! height, +//! position: u32::MAX, +//! }, +//! TxHeight::Unconfirmed => Self::Unconfirmed, +//! } +//! } +//! +//! fn min_ord_of_height(height: TxHeight) -> Self { +//! match height { +//! TxHeight::Confirmed(height) => Self::Confirmed { +//! height, +//! position: u32::MIN, +//! }, +//! TxHeight::Unconfirmed => Self::Unconfirmed, +//! } +//! } +//! } +//! +//! let mut chain = SparseChain::::default(); +//! let _ = chain +//! .insert_checkpoint(BlockId { +//! height: 10, +//! hash: hash_a, +//! }) +//! .unwrap(); +//! let _ = chain +//! .insert_tx( +//! txid_1, +//! TxPosition::Confirmed { +//! height: 9, +//! position: 4321, +//! }, +//! ) +//! .unwrap(); +//! let _ = chain +//! .insert_tx( +//! txid_2, +//! TxPosition::Confirmed { +//! height: 9, +//! position: 1234, +//! }, +//! ) +//! .unwrap(); +//! let _ = chain +//! .insert_tx( +//! txid_3, +//! TxPosition::Confirmed { +//! height: 10, +//! position: 321, +//! }, +//! ) +//! .unwrap(); +//! +//! // transactions are ordered correctly +//! assert_eq!( +//! chain.txids().collect::>(), +//! vec![ +//! &( +//! TxPosition::Confirmed { +//! height: 9, +//! position: 1234 +//! }, +//! txid_2 +//! ), +//! &( +//! TxPosition::Confirmed { +//! height: 9, +//! position: 4321 +//! }, +//! txid_1 +//! ), +//! &( +//! TxPosition::Confirmed { +//! height: 10, +//! position: 321 +//! }, +//! txid_3 +//! ), +//! ], +//! ); +//! ``` +use core::{ + fmt::Debug, + ops::{Bound, RangeBounds}, +}; + +use crate::{collections::*, tx_graph::TxGraph, BlockId, FullTxOut, TxHeight}; +use bitcoin::{hashes::Hash, BlockHash, OutPoint, Txid}; + +/// This is a non-monotone structure that tracks relevant [`Txid`]s that are ordered by chain +/// position `P`. +/// +/// We use [`BlockHash`]s alongside their chain height as "checkpoints" to enforce consistency. +/// +/// To "merge" two [`SparseChain`]s, the [`ChangeSet`] can be calculated by calling +/// [`determine_changeset`] and applying the [`ChangeSet`] via [`apply_changeset`]. For convenience, +/// [`apply_update`] does the above two steps in one call. +/// +/// Refer to [module-level documentation] for more. +/// +/// [`determine_changeset`]: Self::determine_changeset +/// [`apply_changeset`]: Self::apply_changeset +/// [`apply_update`]: Self::apply_update +/// [module-level documentation]: crate::sparse_chain +#[derive(Clone, Debug, PartialEq)] +pub struct SparseChain

{ + /// Block height to checkpoint data. + checkpoints: BTreeMap, + /// Txids ordered by the pos `P`. + ordered_txids: BTreeSet<(P, Txid)>, + /// Confirmation heights of txids. + txid_to_pos: HashMap, + /// Limit number of checkpoints. + checkpoint_limit: Option, +} + +impl

AsRef> for SparseChain

{ + fn as_ref(&self) -> &SparseChain

{ + self + } +} + +impl

Default for SparseChain

{ + fn default() -> Self { + Self { + checkpoints: Default::default(), + ordered_txids: Default::default(), + txid_to_pos: Default::default(), + checkpoint_limit: Default::default(), + } + } +} + +/// Represents a failure when trying to insert a [`Txid`] into [`SparseChain`]. +#[derive(Clone, Debug, PartialEq)] +pub enum InsertTxError

{ + /// Occurs when the [`Txid`] is to be inserted at a hight higher than the [`SparseChain`]'s tip. + TxTooHigh { + txid: Txid, + tx_height: u32, + tip_height: Option, + }, + /// Occurs when the [`Txid`] is already in the [`SparseChain`] and the insertion would result in + /// an unexpected move in [`ChainPosition`]. + TxMovedUnexpectedly { + txid: Txid, + original_pos: P, + update_pos: P, + }, +} + +impl core::fmt::Display for InsertTxError

{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + InsertTxError::TxTooHigh { + txid, + tx_height, + tip_height, + } => write!( + f, + "txid ({}) cannot be inserted at height ({}) greater than chain tip ({:?})", + txid, tx_height, tip_height + ), + InsertTxError::TxMovedUnexpectedly { + txid, + original_pos, + update_pos, + } => write!( + f, + "txid ({}) insertion resulted in an expected positional move from {:?} to {:?}", + txid, original_pos, update_pos + ), + } + } +} + +#[cfg(feature = "std")] +impl std::error::Error for InsertTxError

{} + +/// Represents a failure when trying to insert a checkpoint into [`SparseChain`]. +#[derive(Clone, Debug, PartialEq)] +pub enum InsertCheckpointError { + /// Occurs when checkpoint of the same height already exists with a different [`BlockHash`]. + HashNotMatching { + height: u32, + original_hash: BlockHash, + update_hash: BlockHash, + }, +} + +impl core::fmt::Display for InsertCheckpointError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "{:?}", self) + } +} + +#[cfg(feature = "std")] +impl std::error::Error for InsertCheckpointError {} + +/// Represents an update failure of [`SparseChain`]. +#[derive(Clone, Debug, PartialEq)] +pub enum UpdateError

{ + /// The update cannot be applied to the chain because the chain suffix it represents did not + /// connect to the existing chain. This error case contains the checkpoint height to include so + /// that the chains can connect. + NotConnected(u32), + /// The update contains inconsistent tx states (e.g. it changed the transaction's height). This + /// error is usually the inconsistency found. + TxInconsistent { + txid: Txid, + original_pos: P, + update_pos: P, + }, +} + +impl core::fmt::Display for UpdateError

{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::NotConnected(h) => + write!(f, "the checkpoints in the update could not be connected to the checkpoints in the chain, try include checkpoint of height {} to connect", + h), + Self::TxInconsistent { txid, original_pos, update_pos } => + write!(f, "tx ({}) had position ({:?}), but is ({:?}) in the update", + txid, original_pos, update_pos), + } + } +} + +#[cfg(feature = "std")] +impl std::error::Error for UpdateError

{} + +impl SparseChain

{ + /// Creates a new chain from a list of block hashes and heights. The caller must guarantee they + /// are in the same chain. + pub fn from_checkpoints(checkpoints: C) -> Self + where + C: IntoIterator, + { + Self { + checkpoints: checkpoints + .into_iter() + .map(|block_id| block_id.into()) + .collect(), + ..Default::default() + } + } + + /// Get the checkpoint for the last known tip. + pub fn latest_checkpoint(&self) -> Option { + self.checkpoints + .iter() + .last() + .map(|(&height, &hash)| BlockId { height, hash }) + } + + /// Get the checkpoint at the given height if it exists. + pub fn checkpoint_at(&self, height: u32) -> Option { + self.checkpoints + .get(&height) + .map(|&hash| BlockId { height, hash }) + } + + /// Return the [`ChainPosition`] of a `txid`. + /// + /// This returns [`None`] if the transation does not exist. + pub fn tx_position(&self, txid: Txid) -> Option<&P> { + self.txid_to_pos.get(&txid) + } + + /// Return a [`BTreeMap`] of all checkpoints (block hashes by height). + pub fn checkpoints(&self) -> &BTreeMap { + &self.checkpoints + } + + /// Return an iterator over checkpoints in a height range, in ascending height order. + pub fn range_checkpoints( + &self, + range: impl RangeBounds, + ) -> impl DoubleEndedIterator + '_ { + self.checkpoints + .range(range) + .map(|(&height, &hash)| BlockId { height, hash }) + } + + /// Preview changes of updating [`Self`] with another chain that connects to it. + /// + /// If the `update` wishes to introduce confirmed transactions, it must contain a checkpoint + /// that is exactly the same height as one of `self`'s checkpoints. + /// + /// To invalidate from a given checkpoint, `update` must contain a checkpoint of the same height + /// but different hash. Invalidated checkpoints result in invalidated transactions becoming + /// "unconfirmed". + /// + /// An error will be returned if an update will result in inconsistencies or if the update does + /// not properly connect with `self`. + /// + /// Refer to [module-level documentation] for more. + /// + /// [module-level documentation]: crate::sparse_chain + pub fn determine_changeset(&self, update: &Self) -> Result, UpdateError

> { + let agreement_point = update + .checkpoints + .iter() + .rev() + .find(|&(height, hash)| self.checkpoints.get(height) == Some(hash)) + .map(|(&h, _)| h); + + let last_update_cp = update.checkpoints.iter().last().map(|(&h, _)| h); + + // the lower bound of the invalidation range + let invalid_lb = if last_update_cp.is_none() || last_update_cp == agreement_point { + // if agreement point is the last update checkpoint, or there is no update checkpoints, + // no invalidation is required + u32::MAX + } else { + agreement_point.map(|h| h + 1).unwrap_or(0) + }; + + // the first checkpoint of the sparsechain to invalidate (if any) + let invalid_from = self.checkpoints.range(invalid_lb..).next().map(|(&h, _)| h); + + // the first checkpoint to invalidate (if any) should be represented in the update + if let Some(first_invalid) = invalid_from { + if !update.checkpoints.contains_key(&first_invalid) { + return Err(UpdateError::NotConnected(first_invalid)); + } + } + + for (&txid, update_pos) in &update.txid_to_pos { + // ensure all currently confirmed txs are still at the same height (unless they are + // within invalidation range, or to be confirmed) + if let Some(original_pos) = &self.txid_to_pos.get(&txid) { + if original_pos.height() < TxHeight::Confirmed(invalid_lb) + && original_pos != &update_pos + { + return Err(UpdateError::TxInconsistent { + txid, + original_pos: P::clone(original_pos), + update_pos: update_pos.clone(), + }); + } + } + } + + // create initial change-set, based on checkpoints and txids that are to be "invalidated" + let mut changeset = invalid_from + .map(|from_height| self.invalidate_checkpoints_preview(from_height)) + .unwrap_or_default(); + + for (&height, &new_hash) in &update.checkpoints { + let original_hash = self.checkpoints.get(&height).cloned(); + + let update_hash = *changeset + .checkpoints + .entry(height) + .and_modify(|change| *change = Some(new_hash)) + .or_insert_with(|| Some(new_hash)); + + if original_hash == update_hash { + changeset.checkpoints.remove(&height); + } + } + + for (txid, new_pos) in &update.txid_to_pos { + let original_pos = self.txid_to_pos.get(txid).cloned(); + + let update_pos = changeset + .txids + .entry(*txid) + .and_modify(|change| *change = Some(new_pos.clone())) + .or_insert_with(|| Some(new_pos.clone())); + + if original_pos == *update_pos { + changeset.txids.remove(txid); + } + } + + Ok(changeset) + } + + /// Updates [`SparseChain`] with another chain that connects to it. + /// + /// This is equivilant to calling [`determine_changeset`] and [`apply_changeset`] in sequence. + /// + /// [`determine_changeset`]: Self::determine_changeset + /// [`apply_changeset`]: Self::apply_changeset + pub fn apply_update(&mut self, update: Self) -> Result, UpdateError

> { + let changeset = self.determine_changeset(&update)?; + self.apply_changeset(changeset.clone()); + Ok(changeset) + } + + pub fn apply_changeset(&mut self, changeset: ChangeSet

) { + for (height, update_hash) in changeset.checkpoints { + let _original_hash = match update_hash { + Some(update_hash) => self.checkpoints.insert(height, update_hash), + None => self.checkpoints.remove(&height), + }; + } + + for (txid, update_pos) in changeset.txids { + let original_pos = self.txid_to_pos.remove(&txid); + + if let Some(pos) = original_pos { + self.ordered_txids.remove(&(pos, txid)); + } + + if let Some(pos) = update_pos { + self.txid_to_pos.insert(txid, pos.clone()); + self.ordered_txids.insert((pos.clone(), txid)); + } + } + + self.prune_checkpoints(); + } + + /// Derives a [`ChangeSet`] that assumes that there are no preceding changesets. + /// + /// The changeset returned will record additions of all [`Txid`]s and checkpoints included in + /// [`Self`]. + pub fn initial_changeset(&self) -> ChangeSet

{ + ChangeSet { + checkpoints: self + .checkpoints + .iter() + .map(|(height, hash)| (*height, Some(*hash))) + .collect(), + txids: self + .ordered_txids + .iter() + .map(|(pos, txid)| (*txid, Some(pos.clone()))) + .collect(), + } + } + + /// Determines the [`ChangeSet`] when checkpoints `from_height` (inclusive) and above are + /// invalidated. Displaced [`Txid`]s will be repositioned to [`TxHeight::Unconfirmed`]. + pub fn invalidate_checkpoints_preview(&self, from_height: u32) -> ChangeSet

{ + ChangeSet::

{ + checkpoints: self + .checkpoints + .range(from_height..) + .map(|(height, _)| (*height, None)) + .collect(), + // invalidated transactions become unconfirmed + txids: self + .range_txids_by_height(TxHeight::Confirmed(from_height)..TxHeight::Unconfirmed) + .map(|(_, txid)| (*txid, Some(P::max_ord_of_height(TxHeight::Unconfirmed)))) + .collect(), + } + } + + /// Invalidate checkpoints `from_height` (inclusive) and above. + /// + /// This is equivalent to calling [`invalidate_checkpoints_preview`] and [`apply_changeset`] in + /// sequence. + /// + /// [`invalidate_checkpoints_preview`]: Self::invalidate_checkpoints_preview + /// [`apply_changeset`]: Self::apply_changeset + pub fn invalidate_checkpoints(&mut self, from_height: u32) -> ChangeSet

{ + let changeset = self.invalidate_checkpoints_preview(from_height); + self.apply_changeset(changeset.clone()); + changeset + } + + /// Determines the [`ChangeSet`] when all transactions of height [`TxHeight::Unconfirmed`] are + /// removed completely. + pub fn clear_mempool_preview(&self) -> ChangeSet

{ + let mempool_range = &( + P::min_ord_of_height(TxHeight::Unconfirmed), + Txid::all_zeros(), + )..; + + let txids = self + .ordered_txids + .range(mempool_range) + .map(|(_, txid)| (*txid, None)) + .collect(); + + ChangeSet::

{ + txids, + ..Default::default() + } + } + + /// Clears all transactions of height [`TxHeight::Unconfirmed`]. + /// + /// This is equivalent to calling [`clear_mempool_preview`] and [`apply_changeset`] in sequence. + /// + /// [`clear_mempool_preview`]: Self::clear_mempool_preview + /// [`apply_changeset`]: Self::apply_changeset + /// [`ChangeSet`]. + pub fn clear_mempool(&mut self) -> ChangeSet

{ + let changeset = self.clear_mempool_preview(); + self.apply_changeset(changeset.clone()); + changeset + } + + /// Determines the resultant [`ChangeSet`] if [`Txid`] was inserted at position `pos`. + /// + /// Changes to the [`Txid`]'s position is allowed (under the rules noted in + /// [module-level documentation]) and will be reflected in the [`ChangeSet`]. + /// + /// [module-level documentation]: crate::sparse_chain + pub fn insert_tx_preview(&self, txid: Txid, pos: P) -> Result, InsertTxError

> { + let mut update = Self::default(); + + if let Some(block_id) = self.latest_checkpoint() { + let _old_hash = update.checkpoints.insert(block_id.height, block_id.hash); + debug_assert!(_old_hash.is_none()); + } + + let tip_height = self.checkpoints.iter().last().map(|(h, _)| *h); + if let TxHeight::Confirmed(tx_height) = pos.height() { + if Some(tx_height) > tip_height { + return Err(InsertTxError::TxTooHigh { + txid, + tx_height, + tip_height, + }); + } + } + + let _old_pos = update.txid_to_pos.insert(txid, pos.clone()); + debug_assert!(_old_pos.is_none()); + + let _inserted = update.ordered_txids.insert((pos, txid)); + debug_assert!(_inserted, "must insert tx"); + + match self.determine_changeset(&update) { + Ok(changeset) => Ok(changeset), + Err(UpdateError::NotConnected(_)) => panic!("should always connect"), + Err(UpdateError::TxInconsistent { + txid: inconsistent_txid, + original_pos, + update_pos, + }) => Err(InsertTxError::TxMovedUnexpectedly { + txid: inconsistent_txid, + original_pos, + update_pos, + }), + } + } + + /// Inserts a given [`Txid`] at `pos`. + /// + /// This is equivilant to calling [`insert_tx_preview`] and [`apply_changeset`] in sequence. + /// + /// [`insert_tx_preview`]: Self::insert_tx_preview + /// [`apply_changeset`]: Self::apply_changeset + pub fn insert_tx(&mut self, txid: Txid, pos: P) -> Result, InsertTxError

> { + let changeset = self.insert_tx_preview(txid, pos)?; + self.apply_changeset(changeset.clone()); + Ok(changeset) + } + + /// Determines the resultant [`ChangeSet`] if [`BlockId`] was inserted. + /// + /// If the change would result in a change in block hash of a certain height, insertion would + /// fail. + pub fn insert_checkpoint_preview( + &self, + block_id: BlockId, + ) -> Result, InsertCheckpointError> { + let mut update = Self::default(); + + if let Some(block_id) = self.latest_checkpoint() { + let _old_hash = update.checkpoints.insert(block_id.height, block_id.hash); + debug_assert!(_old_hash.is_none()); + } + + if let Some(original_hash) = update.checkpoints.insert(block_id.height, block_id.hash) { + if original_hash != block_id.hash { + return Err(InsertCheckpointError::HashNotMatching { + height: block_id.height, + original_hash, + update_hash: block_id.hash, + }); + } + } + + match self.determine_changeset(&update) { + Ok(changeset) => Ok(changeset), + Err(UpdateError::NotConnected(_)) => panic!("error should have caught above"), + Err(UpdateError::TxInconsistent { .. }) => panic!("should never add txs"), + } + } + + /// Insert a checkpoint ([`BlockId`]). + /// + /// This is equivilant to calling [`insert_checkpoint_preview`] and [`apply_changeset`] in + /// sequence. + /// + /// [`insert_checkpoint_preview`]: Self::insert_checkpoint_preview + /// [`apply_changeset`]: Self::apply_changeset + pub fn insert_checkpoint( + &mut self, + block_id: BlockId, + ) -> Result, InsertCheckpointError> { + let changeset = self.insert_checkpoint_preview(block_id)?; + self.apply_changeset(changeset.clone()); + Ok(changeset) + } + + /// Iterate over all [`Txid`]s ordered by their [`ChainPosition`]. + pub fn txids(&self) -> impl DoubleEndedIterator + ExactSizeIterator + '_ { + self.ordered_txids.iter() + } + + /// Iterate over a sub-range of positioned [`Txid`]s. + pub fn range_txids(&self, range: R) -> impl DoubleEndedIterator + '_ + where + R: RangeBounds<(P, Txid)>, + { + let map_bound = |b: Bound<&(P, Txid)>| match b { + Bound::Included((pos, txid)) => Bound::Included((pos.clone(), *txid)), + Bound::Excluded((pos, txid)) => Bound::Excluded((pos.clone(), *txid)), + Bound::Unbounded => Bound::Unbounded, + }; + + self.ordered_txids + .range((map_bound(range.start_bound()), map_bound(range.end_bound()))) + } + + /// Iterate over a sub-range of positioned [`Txid`]s, where the range is defined by + /// [`ChainPosition`] only. + pub fn range_txids_by_position( + &self, + range: R, + ) -> impl DoubleEndedIterator + '_ + where + R: RangeBounds

, + { + let map_bound = |b: Bound<&P>, inc: Txid, exc: Txid| match b { + Bound::Included(pos) => Bound::Included((pos.clone(), inc)), + Bound::Excluded(pos) => Bound::Excluded((pos.clone(), exc)), + Bound::Unbounded => Bound::Unbounded, + }; + + self.ordered_txids.range(( + map_bound(range.start_bound(), min_txid(), max_txid()), + map_bound(range.end_bound(), max_txid(), min_txid()), + )) + } + + /// Iterate over a sub-range of positioned [`Txid`]s, where the range is define by [`TxHeight`] + /// only. + pub fn range_txids_by_height( + &self, + range: R, + ) -> impl DoubleEndedIterator + '_ + where + R: RangeBounds, + { + let ord_it = |height, is_max| match is_max { + true => P::max_ord_of_height(height), + false => P::min_ord_of_height(height), + }; + + let map_bound = |b: Bound<&TxHeight>, inc: (bool, Txid), exc: (bool, Txid)| match b { + Bound::Included(&h) => Bound::Included((ord_it(h, inc.0), inc.1)), + Bound::Excluded(&h) => Bound::Excluded((ord_it(h, exc.0), exc.1)), + Bound::Unbounded => Bound::Unbounded, + }; + + self.ordered_txids.range(( + map_bound(range.start_bound(), (false, min_txid()), (true, max_txid())), + map_bound(range.end_bound(), (true, max_txid()), (false, min_txid())), + )) + } + + /// Attempt to retrieve a [`FullTxOut`] of the given `outpoint`. + /// + /// This will return `Some` only if the output's transaction is in both `self` and `graph`. + pub fn full_txout(&self, graph: &TxGraph, outpoint: OutPoint) -> Option> { + let chain_pos = self.tx_position(outpoint.txid)?; + + let tx = graph.get_tx(outpoint.txid)?; + let is_on_coinbase = tx.is_coin_base(); + let txout = tx.output.get(outpoint.vout as usize)?.clone(); + + let spent_by = self + .spent_by(graph, outpoint) + .map(|(pos, txid)| (pos.clone(), txid)); + + Some(FullTxOut { + outpoint, + txout, + chain_position: chain_pos.clone(), + spent_by, + is_on_coinbase, + }) + } + + /// Returns the value set as the checkpoint limit. + /// + /// Refer to [`set_checkpoint_limit`]. + /// + /// [`set_checkpoint_limit`]: Self::set_checkpoint_limit + pub fn checkpoint_limit(&self) -> Option { + self.checkpoint_limit + } + + /// Set the checkpoint limit. + /// + /// The checkpoint limit restricts the number of checkpoints that can be stored in [`Self`]. + /// Oldest checkpoints are pruned first. + pub fn set_checkpoint_limit(&mut self, limit: Option) { + self.checkpoint_limit = limit; + self.prune_checkpoints(); + } + + /// Return [`Txid`]s that would be added to the sparse chain if this `changeset` was applied. + pub fn changeset_additions<'a>( + &'a self, + changeset: &'a ChangeSet

, + ) -> impl Iterator + 'a { + changeset + .txids + .iter() + .filter(move |(&txid, pos)| { + pos.is_some() /*it was not a deletion*/ && + self.tx_position(txid).is_none() /* we don't have the txid already */ + }) + .map(|(&txid, _)| txid) + } + + fn prune_checkpoints(&mut self) -> Option> { + let limit = self.checkpoint_limit?; + + // find last height to be pruned + let last_height = *self.checkpoints.keys().rev().nth(limit)?; + // first height to be kept + let keep_height = last_height + 1; + + let mut split = self.checkpoints.split_off(&keep_height); + core::mem::swap(&mut self.checkpoints, &mut split); + + Some(split) + } + + /// Finds the transaction in the chain that spends `outpoint`. + /// + /// [`TxGraph`] is used to provide the spend relationships. + /// + /// Note that the transaction including `outpoint` does not need to be in the `graph` or the + /// `chain` for this to return `Some`. + pub fn spent_by(&self, graph: &TxGraph, outpoint: OutPoint) -> Option<(&P, Txid)> { + graph + .outspends(outpoint) + .iter() + .find_map(|&txid| Some((self.tx_position(txid)?, txid))) + } + + /// Returns whether the sparse chain contains any checkpoints or transactions. + pub fn is_empty(&self) -> bool { + self.checkpoints.is_empty() && self.txid_to_pos.is_empty() + } +} + +/// The return value of [`determine_changeset`]. +/// +/// [`determine_changeset`]: SparseChain::determine_changeset. +#[derive(Debug, Clone, PartialEq)] +#[cfg_attr( + feature = "serde", + derive(serde::Deserialize, serde::Serialize), + serde(crate = "serde_crate") +)] +#[must_use] +pub struct ChangeSet

{ + pub checkpoints: BTreeMap>, + pub txids: BTreeMap>, +} + +impl Default for ChangeSet { + fn default() -> Self { + Self { + checkpoints: Default::default(), + txids: Default::default(), + } + } +} + +impl

ChangeSet

{ + /// Appends the changes in `other` into self such that applying `self` afterwards has the same + /// effect as sequentially applying the original `self` and `other`. + pub fn append(&mut self, mut other: Self) + where + P: ChainPosition, + { + self.checkpoints.append(&mut other.checkpoints); + self.txids.append(&mut other.txids); + } + + /// Whether this changeset contains no changes. + pub fn is_empty(&self) -> bool { + self.checkpoints.is_empty() && self.txids.is_empty() + } +} + +fn min_txid() -> Txid { + Txid::from_inner([0x00; 32]) +} + +fn max_txid() -> Txid { + Txid::from_inner([0xff; 32]) +} + +/// Represents an position in which transactions are ordered in [`SparseChain`]. +/// +/// [`ChainPosition`] implementations must be [`Ord`] by [`TxHeight`] first. +pub trait ChainPosition: + core::fmt::Debug + Clone + Eq + PartialOrd + Ord + core::hash::Hash + Send + Sync + 'static +{ + /// Get the transaction height of the positon. + fn height(&self) -> TxHeight; + + /// Get the positon's upper bound of a given height. + fn max_ord_of_height(height: TxHeight) -> Self; + + /// Get the position's lower bound of a given height. + fn min_ord_of_height(height: TxHeight) -> Self; + + /// Get the unconfirmed position. + fn unconfirmed() -> Self { + Self::max_ord_of_height(TxHeight::Unconfirmed) + } +} + +#[cfg(test)] +pub mod verify_chain_position { + use crate::{sparse_chain::ChainPosition, ConfirmationTime, TxHeight}; + use alloc::vec::Vec; + + pub fn verify_chain_position(head_count: u32, tail_count: u32) { + let values = (0..head_count) + .chain(u32::MAX - tail_count..u32::MAX) + .flat_map(|i| { + [ + P::min_ord_of_height(TxHeight::Confirmed(i)), + P::max_ord_of_height(TxHeight::Confirmed(i)), + ] + }) + .chain([ + P::min_ord_of_height(TxHeight::Unconfirmed), + P::max_ord_of_height(TxHeight::Unconfirmed), + ]) + .collect::>(); + + for i in 0..values.len() { + for j in 0..values.len() { + if i == j { + assert_eq!(values[i], values[j]); + } + if i < j { + assert!(values[i] <= values[j]); + } + if i > j { + assert!(values[i] >= values[j]); + } + } + } + } + + #[test] + fn verify_tx_height() { + verify_chain_position::(1000, 1000); + } + + #[test] + fn verify_confirmation_time() { + verify_chain_position::(1000, 1000); + } +} diff --git a/crates/chain/src/spk_txout_index.rs b/crates/chain/src/spk_txout_index.rs new file mode 100644 index 000000000..f7dffb5fe --- /dev/null +++ b/crates/chain/src/spk_txout_index.rs @@ -0,0 +1,309 @@ +use core::ops::RangeBounds; + +use crate::{ + collections::{hash_map::Entry, BTreeMap, BTreeSet, HashMap}, + ForEachTxOut, +}; +use bitcoin::{self, OutPoint, Script, Transaction, TxOut, Txid}; + +/// An index storing [`TxOut`]s that have a script pubkey that matches those in a list. +/// +/// The basic idea is that you insert script pubkeys you care about into the index with +/// [`insert_spk`] and then when you call [`scan`] the index will look at any txouts you pass in and +/// store and index any txouts matching one of its script pubkeys. +/// +/// Each script pubkey is associated with a application defined index script index `I` which must be +/// [`Ord`]. Usually this is used to associate the derivation index of the script pubkey or even a +/// combination of `(keychain, derivation_index)`. +/// +/// Note there is no harm in scanning transactions that disappear from the blockchain or were never +/// in there in the first place. `SpkTxOutIndex` is intentionally *monotone* -- you cannot delete or +/// modify txouts that have been indexed. To find out which txouts from the index are actually in the +/// chain or unspent etc you must use other sources of information like a [`SparseChain`]. +/// +/// [`TxOut`]: bitcoin::TxOut +/// [`insert_spk`]: Self::insert_spk +/// [`Ord`]: core::cmp::Ord +/// [`scan`]: Self::scan +/// [`SparseChain`]: crate::sparse_chain::SparseChain +#[derive(Clone, Debug)] +pub struct SpkTxOutIndex { + /// script pubkeys ordered by index + spks: BTreeMap, + /// A reverse lookup from spk to spk index + spk_indices: HashMap, + /// The set of unused indexes. + unused: BTreeSet, + /// Lookup index and txout by outpoint. + txouts: BTreeMap, + /// Lookup from spk index to outpoints that had that spk + spk_txouts: BTreeSet<(I, OutPoint)>, +} + +impl Default for SpkTxOutIndex { + fn default() -> Self { + Self { + txouts: Default::default(), + spks: Default::default(), + spk_indices: Default::default(), + spk_txouts: Default::default(), + unused: Default::default(), + } + } +} + +/// This macro is used instead of a member function of `SpkTxOutIndex` which would result in a +/// compiler error[E0521]: "borrowed data escapes out of closure" when we attempt to take a +/// reference out of the `FprEachTxOut` closure during scanning. +macro_rules! scan_txout { + ($self:ident, $op:expr, $txout:expr) => {{ + let spk_i = $self.spk_indices.get(&$txout.script_pubkey); + if let Some(spk_i) = spk_i { + $self.txouts.insert($op, (spk_i.clone(), $txout.clone())); + $self.spk_txouts.insert((spk_i.clone(), $op)); + $self.unused.remove(&spk_i); + } + spk_i + }}; +} + +impl SpkTxOutIndex { + /// Scans an object containing many txouts. + /// + /// Typically this is used in two situations: + /// + /// 1. After loading transaction data from disk you may scan over all the txouts to restore all + /// your txouts. + /// 2. When getting new data from the chain you usually scan it before incorporating it into your chain state. + /// + /// See [`ForEachTxout`] for the types that support this. + /// + /// [`ForEachTxout`]: crate::ForEachTxOut + pub fn scan(&mut self, txouts: &impl ForEachTxOut) -> BTreeSet { + let mut scanned_indices = BTreeSet::new(); + + txouts.for_each_txout(|(op, txout)| { + if let Some(spk_i) = scan_txout!(self, op, txout) { + scanned_indices.insert(spk_i.clone()); + } + }); + + scanned_indices + } + + /// Scan a single `TxOut` for a matching script pubkey, and returns the index that matched the + /// script pubkey (if any). + pub fn scan_txout(&mut self, op: OutPoint, txout: &TxOut) -> Option<&I> { + scan_txout!(self, op, txout) + } + + /// Iterate over all known txouts that spend to tracked script pubkeys. + pub fn txouts( + &self, + ) -> impl DoubleEndedIterator + ExactSizeIterator { + self.txouts + .iter() + .map(|(op, (index, txout))| (index, *op, txout)) + } + + /// Finds all txouts on a transaction that has previously been scanned and indexed. + pub fn txouts_in_tx( + &self, + txid: Txid, + ) -> impl DoubleEndedIterator { + self.txouts + .range(OutPoint::new(txid, u32::MIN)..=OutPoint::new(txid, u32::MAX)) + .map(|(op, (index, txout))| (index, *op, txout)) + } + + /// Iterates over all outputs with script pubkeys in an index range. + pub fn outputs_in_range( + &self, + range: impl RangeBounds, + ) -> impl DoubleEndedIterator { + use bitcoin::hashes::Hash; + use core::ops::Bound::*; + let min_op = OutPoint { + txid: Txid::from_inner([0x00; 32]), + vout: u32::MIN, + }; + let max_op = OutPoint { + txid: Txid::from_inner([0xff; 32]), + vout: u32::MAX, + }; + + let start = match range.start_bound() { + Included(index) => Included((index.clone(), min_op)), + Excluded(index) => Excluded((index.clone(), max_op)), + Unbounded => Unbounded, + }; + + let end = match range.end_bound() { + Included(index) => Included((index.clone(), max_op)), + Excluded(index) => Excluded((index.clone(), min_op)), + Unbounded => Unbounded, + }; + + self.spk_txouts.range((start, end)).map(|(i, op)| (i, *op)) + } + + /// Returns the txout and script pubkey index of the `TxOut` at `OutPoint`. + /// + /// Returns `None` if the `TxOut` hasn't been scanned or if nothing matching was found there. + pub fn txout(&self, outpoint: OutPoint) -> Option<(&I, &TxOut)> { + self.txouts + .get(&outpoint) + .map(|(spk_i, txout)| (spk_i, txout)) + } + + /// Returns the script that has been inserted at the `index`. + /// + /// If that index hasn't been inserted yet it will return `None`. + pub fn spk_at_index(&self, index: &I) -> Option<&Script> { + self.spks.get(index) + } + + /// The script pubkeys being tracked by the index. + pub fn all_spks(&self) -> &BTreeMap { + &self.spks + } + + /// Adds a script pubkey to scan for. Returns `false` and does nothing if spk already exists in the map + /// + /// the index will look for outputs spending to whenever it scans new data. + pub fn insert_spk(&mut self, index: I, spk: Script) -> bool { + match self.spk_indices.entry(spk.clone()) { + Entry::Vacant(value) => { + value.insert(index.clone()); + self.spks.insert(index.clone(), spk); + self.unused.insert(index); + true + } + Entry::Occupied(_) => false, + } + } + + /// Iterates over a unused script pubkeys in a index range. + /// + /// Here "unused" means that after the script pubkey was stored in the index, the index has + /// never scanned a transaction output with it. + /// + /// # Example + /// + /// ```rust + /// # use bdk_chain::SpkTxOutIndex; + /// + /// // imagine our spks are indexed like (keychain, derivation_index). + /// let txout_index = SpkTxOutIndex::<(u32, u32)>::default(); + /// let all_unused_spks = txout_index.unused_spks(..); + /// let change_index = 1; + /// let unused_change_spks = + /// txout_index.unused_spks((change_index, u32::MIN)..(change_index, u32::MAX)); + /// ``` + pub fn unused_spks(&self, range: R) -> impl DoubleEndedIterator + where + R: RangeBounds, + { + self.unused + .range(range) + .map(move |index| (index, self.spk_at_index(index).expect("must exist"))) + } + + /// Returns whether the script pubkey at `index` has been used or not. + /// + /// Here "unused" means that after the script pubkey was stored in the index, the index has + /// never scanned a transaction output with it. + pub fn is_used(&self, index: &I) -> bool { + self.unused.get(index).is_none() + } + + /// Marks the script pubkey at `index` as used even though it hasn't seen an output with it. + /// This only has an effect when the `index` had been added to `self` already and was unused. + /// + /// Returns whether the `index` was originally present as `unused`. + /// + /// This is useful when you want to reserve a script pubkey for something but don't want to add + /// the transaction output using it to the index yet. Other callers will consider `index` used + /// until you call [`unmark_used`]. + /// + /// [`unmark_used`]: Self::unmark_used + pub fn mark_used(&mut self, index: &I) -> bool { + self.unused.remove(index) + } + + /// Undoes the effect of [`mark_used`]. Returns whether the `index` is inserted back into + /// `unused`. + /// + /// Note that if `self` has scanned an output with this script pubkey then this will have no + /// effect. + /// + /// [`mark_used`]: Self::mark_used + pub fn unmark_used(&mut self, index: &I) -> bool { + // we cannot set index as unused when it does not exist + if !self.spks.contains_key(index) { + return false; + } + // we cannot set index as unused when txouts are indexed under it + if self.outputs_in_range(index..=index).next().is_some() { + return false; + } + self.unused.insert(index.clone()) + } + + /// Returns the index associated with the script pubkey. + pub fn index_of_spk(&self, script: &Script) -> Option<&I> { + self.spk_indices.get(script) + } + + /// Computes total input value going from script pubkeys in the index (sent) and total output + /// value going to script pubkeys in the index (received) in `tx`. For the `sent` to be computed + /// correctly the output being spent must have already been scanned by the index. Calculating + /// received just uses the transaction outputs directly so will be correct even if it has not + /// been scanned. + pub fn sent_and_received(&self, tx: &Transaction) -> (u64, u64) { + let mut sent = 0; + let mut received = 0; + + for txin in &tx.input { + if let Some((_, txout)) = self.txout(txin.previous_output) { + sent += txout.value; + } + } + for txout in &tx.output { + if self.index_of_spk(&txout.script_pubkey).is_some() { + received += txout.value; + } + } + + (sent, received) + } + + /// Computes the net value that this transaction gives to the script pubkeys in the index and + /// *takes* from the transaction outputs in the index. Shorthand for calling + /// [`sent_and_received`] and subtracting sent from received. + /// + /// [`sent_and_received`]: Self::sent_and_received + pub fn net_value(&self, tx: &Transaction) -> i64 { + let (sent, received) = self.sent_and_received(tx); + received as i64 - sent as i64 + } + + /// Whether any of the inputs of this transaction spend a txout tracked or whether any output + /// matches one of our script pubkeys. + /// + /// It is easily possible to misuse this method and get false negatives by calling it before you + /// have scanned the `TxOut`s the transaction is spending. For example if you want to filter out + /// all the transactions in a block that are irrelevant you **must first scan all the + /// transactions in the block** and only then use this method. + pub fn is_relevant(&self, tx: &Transaction) -> bool { + let input_matches = tx + .input + .iter() + .any(|input| self.txouts.contains_key(&input.previous_output)); + let output_matches = tx + .output + .iter() + .any(|output| self.spk_indices.contains_key(&output.script_pubkey)); + input_matches || output_matches + } +} diff --git a/crates/chain/src/tx_data_traits.rs b/crates/chain/src/tx_data_traits.rs new file mode 100644 index 000000000..db95a5d46 --- /dev/null +++ b/crates/chain/src/tx_data_traits.rs @@ -0,0 +1,33 @@ +use bitcoin::{Block, OutPoint, Transaction, TxOut}; + +/// Trait to do something with every txout contained in a structure. +/// +/// We would prefer just work with things that can give us a `Iterator` +/// here but rust's type system makes it extremely hard to do this (without trait objects). +pub trait ForEachTxOut { + /// The provided closure `f` will called with each `outpoint/txout` pair. + fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))); +} + +impl ForEachTxOut for Block { + fn for_each_txout(&self, mut f: impl FnMut((OutPoint, &TxOut))) { + for tx in self.txdata.iter() { + tx.for_each_txout(&mut f) + } + } +} + +impl ForEachTxOut for Transaction { + fn for_each_txout(&self, mut f: impl FnMut((OutPoint, &TxOut))) { + let txid = self.txid(); + for (i, txout) in self.output.iter().enumerate() { + f(( + OutPoint { + txid, + vout: i as u32, + }, + txout, + )) + } + } +} diff --git a/crates/chain/src/tx_graph.rs b/crates/chain/src/tx_graph.rs new file mode 100644 index 000000000..70a45cf2e --- /dev/null +++ b/crates/chain/src/tx_graph.rs @@ -0,0 +1,581 @@ +//! Module for structures that store and traverse transactions. +//! +//! [`TxGraph`] is a monotone structure that inserts transactions and indexes spends. The +//! [`Additions`] structure reports changes of [`TxGraph`], but can also be applied on to a +//! [`TxGraph`] as well. Lastly, [`TxDescendants`] is an [`Iterator`] which traverses descendants of +//! a given transaction. +//! +//! Conflicting transactions are allowed to coexist within a [`TxGraph`]. This is useful for +//! identifying and traversing conflicts and descendants of a given transaction. +//! +//! # Previewing and applying changes +//! +//! Methods that either preview or apply changes to [`TxGraph`] will return [`Additions`]. +//! [`Additions`] can be applied back on to a [`TxGraph`], or be used to inform persistent storage +//! of the changes to [`TxGraph`]. +//! +//! ``` +//! # use bdk_chain::tx_graph::TxGraph; +//! # use bdk_chain::example_utils::*; +//! # use bitcoin::Transaction; +//! # let tx_a = tx_from_hex(RAW_TX_1); +//! # let tx_b = tx_from_hex(RAW_TX_2); +//! let mut graph = TxGraph::default(); +//! +//! // preview a transaction insertion (not actually inserted) +//! let additions = graph.insert_tx_preview(tx_a); +//! // apply the insertion +//! graph.apply_additions(additions); +//! +//! // you can also insert a transaction directly +//! let already_applied_additions = graph.insert_tx(tx_b); +//! ``` +//! +//! A [`TxGraph`] can also be updated with another [`TxGraph`]. +//! +//! ``` +//! # use bdk_chain::tx_graph::TxGraph; +//! # use bdk_chain::example_utils::*; +//! # use bitcoin::Transaction; +//! # let tx_a = tx_from_hex(RAW_TX_1); +//! # let tx_b = tx_from_hex(RAW_TX_2); +//! let mut graph = TxGraph::default(); +//! let update = TxGraph::new(vec![tx_a, tx_b]); +//! +//! // preview additions as result of the update +//! let additions = graph.determine_additions(&update); +//! // apply the additions +//! graph.apply_additions(additions); +//! +//! // we can also apply the update graph directly +//! // the additions will be empty as we have already applied the same update above +//! let additions = graph.apply_update(update); +//! assert!(additions.is_empty()); +//! ``` +use crate::{collections::*, ForEachTxOut}; +use alloc::vec::Vec; +use bitcoin::{OutPoint, Transaction, TxOut, Txid}; +use core::ops::RangeInclusive; + +/// A graph of transactions and spends. +/// +/// See the [module-level documentation] for more. +/// +/// [module-level documentation]: crate::tx_graph +#[derive(Clone, Debug, PartialEq, Default)] +pub struct TxGraph { + txs: HashMap, + spends: BTreeMap>, + + // This atrocity exists so that `TxGraph::outspends()` can return a reference. + // FIXME: This can be removed once `HashSet::new` is a const fn. + empty_outspends: HashSet, +} + +/// Node of a [`TxGraph`]. This can either be a whole transaction, or a partial transaction (where +/// we only have select outputs). +#[derive(Clone, Debug, PartialEq)] +enum TxNode { + Whole(Transaction), + Partial(BTreeMap), +} + +impl Default for TxNode { + fn default() -> Self { + Self::Partial(BTreeMap::new()) + } +} + +impl TxGraph { + /// Iterate over all tx outputs known by [`TxGraph`]. + pub fn all_txouts(&self) -> impl Iterator { + self.txs.iter().flat_map(|(txid, tx)| match tx { + TxNode::Whole(tx) => tx + .output + .iter() + .enumerate() + .map(|(vout, txout)| (OutPoint::new(*txid, vout as _), txout)) + .collect::>(), + TxNode::Partial(txouts) => txouts + .iter() + .map(|(vout, txout)| (OutPoint::new(*txid, *vout as _), txout)) + .collect::>(), + }) + } + + /// Iterate over all full transactions in the graph. + pub fn full_transactions(&self) -> impl Iterator { + self.txs.iter().filter_map(|(_, tx)| match tx { + TxNode::Whole(tx) => Some(tx), + TxNode::Partial(_) => None, + }) + } + + /// Get a transaction by txid. This only returns `Some` for full transactions. + /// + /// Refer to [`get_txout`] for getting a specific [`TxOut`]. + /// + /// [`get_txout`]: Self::get_txout + pub fn get_tx(&self, txid: Txid) -> Option<&Transaction> { + match self.txs.get(&txid)? { + TxNode::Whole(tx) => Some(tx), + TxNode::Partial(_) => None, + } + } + + /// Obtains a single tx output (if any) at specified outpoint. + pub fn get_txout(&self, outpoint: OutPoint) -> Option<&TxOut> { + match self.txs.get(&outpoint.txid)? { + TxNode::Whole(tx) => tx.output.get(outpoint.vout as usize), + TxNode::Partial(txouts) => txouts.get(&outpoint.vout), + } + } + + /// Returns a [`BTreeMap`] of vout to output of the provided `txid`. + pub fn txouts(&self, txid: Txid) -> Option> { + Some(match self.txs.get(&txid)? { + TxNode::Whole(tx) => tx + .output + .iter() + .enumerate() + .map(|(vout, txout)| (vout as u32, txout)) + .collect::>(), + TxNode::Partial(txouts) => txouts + .iter() + .map(|(vout, txout)| (*vout, txout)) + .collect::>(), + }) + } + + /// Calculates the fee of a given transaction. Returns 0 if `tx` is a coinbase transaction. + /// Returns `Some(_)` if we have all the `TxOut`s being spent by `tx` in the graph (either as + /// the full transactions or individual txouts). If the returned value is negative then the + /// transaction is invalid according to the graph. + /// + /// Returns `None` if we're missing an input for the tx in the graph. + /// + /// Note `tx` does not have to be in the graph for this to work. + pub fn calculate_fee(&self, tx: &Transaction) -> Option { + if tx.is_coin_base() { + return Some(0); + } + let inputs_sum = tx + .input + .iter() + .map(|txin| { + self.get_txout(txin.previous_output) + .map(|txout| txout.value as i64) + }) + .sum::>()?; + + let outputs_sum = tx + .output + .iter() + .map(|txout| txout.value as i64) + .sum::(); + + Some(inputs_sum - outputs_sum) + } +} + +impl TxGraph { + /// Contruct a new [`TxGraph`] from a list of transaction. + pub fn new(txs: impl IntoIterator) -> Self { + let mut new = Self::default(); + for tx in txs.into_iter() { + let _ = new.insert_tx(tx); + } + new + } + /// Inserts the given [`TxOut`] at [`OutPoint`]. + /// + /// Note this will ignore the action if we already have the full transaction that the txout is + /// alledged to be on (even if it doesn't match it!). + pub fn insert_txout(&mut self, outpoint: OutPoint, txout: TxOut) -> Additions { + let additions = self.insert_txout_preview(outpoint, txout); + self.apply_additions(additions.clone()); + additions + } + + /// Inserts the given transaction into [`TxGraph`]. + /// + /// The [`Additions`] returned will be empty if `tx` already exists. + pub fn insert_tx(&mut self, tx: Transaction) -> Additions { + let additions = self.insert_tx_preview(tx); + self.apply_additions(additions.clone()); + additions + } + + /// Extends this graph with another so that `self` becomes the union of the two sets of + /// transactions. + /// + /// The returned [`Additions`] is the set difference of `update` and `self` (transactions that + /// exist in `update` but not in `self`). + pub fn apply_update(&mut self, update: TxGraph) -> Additions { + let additions = self.determine_additions(&update); + self.apply_additions(additions.clone()); + additions + } + + /// Applies [`Additions`] to [`TxGraph`]. + pub fn apply_additions(&mut self, additions: Additions) { + for tx in additions.tx { + let txid = tx.txid(); + + tx.input + .iter() + .map(|txin| txin.previous_output) + // coinbase spends are not to be counted + .filter(|outpoint| !outpoint.is_null()) + // record spend as this tx has spent this outpoint + .for_each(|outpoint| { + self.spends.entry(outpoint).or_default().insert(txid); + }); + + if let Some(TxNode::Whole(old_tx)) = self.txs.insert(txid, TxNode::Whole(tx)) { + debug_assert_eq!( + old_tx.txid(), + txid, + "old tx of same txid should not be different" + ); + } + } + + for (outpoint, txout) in additions.txout { + let tx_entry = self + .txs + .entry(outpoint.txid) + .or_insert_with(TxNode::default); + + match tx_entry { + TxNode::Whole(_) => { /* do nothing since we already have full tx */ } + TxNode::Partial(txouts) => { + txouts.insert(outpoint.vout, txout); + } + } + } + } + + /// Previews the resultant [`Additions`] when [`Self`] is updated against the `update` graph. + /// + /// The [`Additions`] would be the set difference of `update` and `self` (transactions that + /// exist in `update` but not in `self`). + pub fn determine_additions(&self, update: &TxGraph) -> Additions { + let mut additions = Additions::default(); + + for (&txid, update_tx) in &update.txs { + if self.get_tx(txid).is_some() { + continue; + } + + match update_tx { + TxNode::Whole(tx) => { + if matches!(self.txs.get(&txid), None | Some(TxNode::Partial(_))) { + additions.tx.insert(tx.clone()); + } + } + TxNode::Partial(partial) => { + for (&vout, update_txout) in partial { + let outpoint = OutPoint::new(txid, vout); + + if self.get_txout(outpoint) != Some(update_txout) { + additions.txout.insert(outpoint, update_txout.clone()); + } + } + } + } + } + + additions + } + + /// Returns the resultant [`Additions`] if the given transaction is inserted. Does not actually + /// mutate [`Self`]. + /// + /// The [`Additions`] result will be empty if `tx` already existed in `self`. + pub fn insert_tx_preview(&self, tx: Transaction) -> Additions { + let mut update = Self::default(); + update.txs.insert(tx.txid(), TxNode::Whole(tx)); + self.determine_additions(&update) + } + + /// Returns the resultant [`Additions`] if the given `txout` is inserted at `outpoint`. Does not + /// mutate `self`. + /// + /// The [`Additions`] result will be empty if the `outpoint` (or a full transaction containing + /// the `outpoint`) already existed in `self`. + pub fn insert_txout_preview(&self, outpoint: OutPoint, txout: TxOut) -> Additions { + let mut update = Self::default(); + update.txs.insert( + outpoint.txid, + TxNode::Partial([(outpoint.vout, txout)].into()), + ); + self.determine_additions(&update) + } +} + +impl TxGraph { + /// The transactions spending from this output. + /// + /// `TxGraph` allows conflicting transactions within the graph. Obviously the transactions in + /// the returned will never be in the same blockchain. + pub fn outspends(&self, outpoint: OutPoint) -> &HashSet { + self.spends.get(&outpoint).unwrap_or(&self.empty_outspends) + } + + /// Iterates over the transactions spending from `txid`. + /// + /// The iterator item is a union of `(vout, txid-set)` where: + /// + /// - `vout` is the provided `txid`'s outpoint that is being spent + /// - `txid-set` is the set of txids that is spending the `vout` + pub fn tx_outspends( + &self, + txid: Txid, + ) -> impl DoubleEndedIterator)> + '_ { + let start = OutPoint { txid, vout: 0 }; + let end = OutPoint { + txid, + vout: u32::MAX, + }; + self.spends + .range(start..=end) + .map(|(outpoint, spends)| (outpoint.vout, spends)) + } + + /// Iterate over all partial transactions (outputs only) in the graph. + pub fn partial_transactions(&self) -> impl Iterator)> { + self.txs.iter().filter_map(|(txid, tx)| match tx { + TxNode::Whole(_) => None, + TxNode::Partial(partial) => Some((*txid, partial)), + }) + } + + /// Creates an iterator that both filters and maps descendants from the starting `txid`. + /// + /// The supplied closure takes in two inputs `(depth, descendant_txid)`: + /// + /// * `depth` is the distance between the starting `txid` and the `descendant_txid`. I.e. if the + /// descendant is spending an output of the starting `txid`, the `depth` will be 1. + /// * `descendant_txid` is the descendant's txid which we are considering to walk. + /// + /// The supplied closure returns an `Option`, allowing the caller to map each node it vists + /// and decide whether to visit descendants. + pub fn walk_descendants<'g, F, O>(&'g self, txid: Txid, walk_map: F) -> TxDescendants + where + F: FnMut(usize, Txid) -> Option + 'g, + { + TxDescendants::new_exclude_root(self, txid, walk_map) + } + + /// Creates an iterator that both filters and maps conflicting transactions (this includes + /// descendants of directly-conflicting transactions, which are also considered conflicts). + /// + /// Refer to [`Self::walk_descendants`] for `walk_map` usage. + pub fn walk_conflicts<'g, F, O>(&'g self, tx: &'g Transaction, walk_map: F) -> TxDescendants + where + F: FnMut(usize, Txid) -> Option + 'g, + { + let txids = self.direct_conflicts_of_tx(tx).map(|(_, txid)| txid); + TxDescendants::from_multiple_include_root(self, txids, walk_map) + } + + /// Given a transaction, return an iterator of txids which directly conflict with the given + /// transaction's inputs (spends). The conflicting txids are returned with the given + /// transaction's vin (in which it conflicts). + /// + /// Note that this only returns directly conflicting txids and does not include descendants of + /// those txids (which are technically also conflicting). + pub fn direct_conflicts_of_tx<'g>( + &'g self, + tx: &'g Transaction, + ) -> impl Iterator + '_ { + let txid = tx.txid(); + tx.input + .iter() + .enumerate() + .filter_map(move |(vin, txin)| self.spends.get(&txin.previous_output).zip(Some(vin))) + .flat_map(|(spends, vin)| core::iter::repeat(vin).zip(spends.iter().cloned())) + .filter(move |(_, conflicting_txid)| *conflicting_txid != txid) + } + + /// Whether the graph has any transactions or outputs in it. + pub fn is_empty(&self) -> bool { + self.txs.is_empty() + } +} + +/// A structure that represents changes to a [`TxGraph`]. +/// +/// It is named "additions" because [`TxGraph`] is monotone so transactions can only be added and +/// not removed. +/// +/// Refer to [module-level documentation] for more. +/// +/// [module-level documentation]: crate::tx_graph +#[derive(Debug, Clone, PartialEq, Default)] +#[cfg_attr( + feature = "serde", + derive(serde::Deserialize, serde::Serialize), + serde(crate = "serde_crate") +)] +#[must_use] +pub struct Additions { + pub tx: BTreeSet, + pub txout: BTreeMap, +} + +impl Additions { + /// Returns true if the [`Additions`] is empty (no transactions or txouts). + pub fn is_empty(&self) -> bool { + self.tx.is_empty() && self.txout.is_empty() + } + + /// Iterates over all outpoints contained within [`Additions`]. + pub fn txouts(&self) -> impl Iterator { + self.tx + .iter() + .flat_map(|tx| { + tx.output + .iter() + .enumerate() + .map(move |(vout, txout)| (OutPoint::new(tx.txid(), vout as _), txout)) + }) + .chain(self.txout.iter().map(|(op, txout)| (*op, txout))) + } + + /// Appends the changes in `other` into self such that applying `self` afterwards has the same + /// effect as sequentially applying the original `self` and `other`. + pub fn append(&mut self, mut other: Additions) { + self.tx.append(&mut other.tx); + self.txout.append(&mut other.txout); + } +} + +impl AsRef for TxGraph { + fn as_ref(&self) -> &TxGraph { + self + } +} + +impl ForEachTxOut for Additions { + fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))) { + self.txouts().for_each(f) + } +} + +impl ForEachTxOut for TxGraph { + fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))) { + self.all_txouts().for_each(f) + } +} + +/// An iterator that traverses transaction descendants. +/// +/// This `struct` is created by the [`walk_descendants`] method of [`TxGraph`]. +/// +/// [`walk_descendants`]: TxGraph::walk_descendants +pub struct TxDescendants<'g, F> { + graph: &'g TxGraph, + visited: HashSet, + stack: Vec<(usize, Txid)>, + filter_map: F, +} + +impl<'g, F> TxDescendants<'g, F> { + /// Creates a `TxDescendants` that includes the starting `txid` when iterating. + #[allow(unused)] + pub(crate) fn new_include_root(graph: &'g TxGraph, txid: Txid, filter_map: F) -> Self { + Self { + graph, + visited: Default::default(), + stack: [(0, txid)].into(), + filter_map, + } + } + + /// Creates a `TxDescendants` that excludes the starting `txid` when iterating. + pub(crate) fn new_exclude_root(graph: &'g TxGraph, txid: Txid, filter_map: F) -> Self { + let mut descendants = Self { + graph, + visited: Default::default(), + stack: Default::default(), + filter_map, + }; + descendants.populate_stack(1, txid); + descendants + } + + /// Creates a `TxDescendants` from multiple starting transactions that includes the starting + /// `txid`s when iterating. + pub(crate) fn from_multiple_include_root(graph: &'g TxGraph, txids: I, filter_map: F) -> Self + where + I: IntoIterator, + { + Self { + graph, + visited: Default::default(), + stack: txids.into_iter().map(|txid| (0, txid)).collect(), + filter_map, + } + } + + /// Creates a `TxDescendants` from multiple starting transactions that excludes the starting + /// `txid`s when iterating. + #[allow(unused)] + pub(crate) fn from_multiple_exclude_root(graph: &'g TxGraph, txids: I, filter_map: F) -> Self + where + I: IntoIterator, + { + let mut descendants = Self { + graph, + visited: Default::default(), + stack: Default::default(), + filter_map, + }; + for txid in txids { + descendants.populate_stack(1, txid); + } + descendants + } +} + +impl<'g, F> TxDescendants<'g, F> { + fn populate_stack(&mut self, depth: usize, txid: Txid) { + let spend_paths = self + .graph + .spends + .range(tx_outpoint_range(txid)) + .flat_map(|(_, spends)| spends) + .map(|&txid| (depth, txid)); + self.stack.extend(spend_paths); + } +} + +impl<'g, F, O> Iterator for TxDescendants<'g, F> +where + F: FnMut(usize, Txid) -> Option, +{ + type Item = O; + + fn next(&mut self) -> Option { + let (op_spends, txid, item) = loop { + // we have exhausted all paths when stack is empty + let (op_spends, txid) = self.stack.pop()?; + // we do not want to visit the same transaction twice + if self.visited.insert(txid) { + // ignore paths when user filters them out + if let Some(item) = (self.filter_map)(op_spends, txid) { + break (op_spends, txid, item); + } + } + }; + + self.populate_stack(op_spends + 1, txid); + Some(item) + } +} + +fn tx_outpoint_range(txid: Txid) -> RangeInclusive { + OutPoint::new(txid, u32::MIN)..=OutPoint::new(txid, u32::MAX) +} diff --git a/crates/chain/tests/common/mod.rs b/crates/chain/tests/common/mod.rs new file mode 100644 index 000000000..e9b7a101f --- /dev/null +++ b/crates/chain/tests/common/mod.rs @@ -0,0 +1,60 @@ +#[allow(unused_macros)] +macro_rules! h { + ($index:literal) => {{ + bitcoin::hashes::Hash::hash($index.as_bytes()) + }}; +} + +#[allow(unused_macros)] +macro_rules! chain { + ($([$($tt:tt)*]),*) => { chain!( checkpoints: [$([$($tt)*]),*] ) }; + (checkpoints: $($tail:tt)*) => { chain!( index: TxHeight, checkpoints: $($tail)*) }; + (index: $ind:ty, checkpoints: [ $([$height:expr, $block_hash:expr]),* ] $(,txids: [$(($txid:expr, $tx_height:expr)),*])?) => {{ + #[allow(unused_mut)] + let mut chain = bdk_chain::sparse_chain::SparseChain::<$ind>::from_checkpoints([$(($height, $block_hash).into()),*]); + + $( + $( + let _ = chain.insert_tx($txid, $tx_height).expect("should succeed"); + )* + )? + + chain + }}; +} + +#[allow(unused_macros)] +macro_rules! changeset { + (checkpoints: $($tail:tt)*) => { changeset!(index: TxHeight, checkpoints: $($tail)*) }; + ( + index: $ind:ty, + checkpoints: [ $(( $height:expr, $cp_to:expr )),* ] + $(,txids: [ $(( $txid:expr, $tx_to:expr )),* ])? + ) => {{ + use bdk_chain::collections::BTreeMap; + + #[allow(unused_mut)] + bdk_chain::sparse_chain::ChangeSet::<$ind> { + checkpoints: { + let mut changes = BTreeMap::default(); + $(changes.insert($height, $cp_to);)* + changes + }, + txids: { + let mut changes = BTreeMap::default(); + $($(changes.insert($txid, $tx_to.map(|h: TxHeight| h.into()));)*)? + changes + } + } + }}; +} + +#[allow(unused)] +pub fn new_tx(lt: u32) -> bitcoin::Transaction { + bitcoin::Transaction { + version: 0x00, + lock_time: bitcoin::PackedLockTime(lt), + input: vec![], + output: vec![], + } +} diff --git a/crates/chain/tests/test_chain_graph.rs b/crates/chain/tests/test_chain_graph.rs new file mode 100644 index 000000000..68f50b8f7 --- /dev/null +++ b/crates/chain/tests/test_chain_graph.rs @@ -0,0 +1,653 @@ +#[macro_use] +mod common; + +use bdk_chain::{ + chain_graph::*, + collections::HashSet, + sparse_chain, + tx_graph::{self, TxGraph}, + BlockId, TxHeight, +}; +use bitcoin::{OutPoint, PackedLockTime, Script, Sequence, Transaction, TxIn, TxOut, Witness}; + +#[test] +fn test_spent_by() { + let tx1 = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut::default()], + }; + + let op = OutPoint { + txid: tx1.txid(), + vout: 0, + }; + + let tx2 = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![TxIn { + previous_output: op, + ..Default::default() + }], + output: vec![], + }; + let tx3 = Transaction { + version: 0x01, + lock_time: PackedLockTime(42), + input: vec![TxIn { + previous_output: op, + ..Default::default() + }], + output: vec![], + }; + + let mut cg1 = ChainGraph::default(); + let _ = cg1 + .insert_tx(tx1, TxHeight::Unconfirmed) + .expect("should insert"); + let mut cg2 = cg1.clone(); + let _ = cg1 + .insert_tx(tx2.clone(), TxHeight::Unconfirmed) + .expect("should insert"); + let _ = cg2 + .insert_tx(tx3.clone(), TxHeight::Unconfirmed) + .expect("should insert"); + + assert_eq!(cg1.spent_by(op), Some((&TxHeight::Unconfirmed, tx2.txid()))); + assert_eq!(cg2.spent_by(op), Some((&TxHeight::Unconfirmed, tx3.txid()))); +} + +#[test] +fn update_evicts_conflicting_tx() { + let cp_a = BlockId { + height: 0, + hash: h!("A"), + }; + let cp_b = BlockId { + height: 1, + hash: h!("B"), + }; + let cp_b2 = BlockId { + height: 1, + hash: h!("B'"), + }; + + let tx_a = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut::default()], + }; + + let tx_b = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![TxIn { + previous_output: OutPoint::new(tx_a.txid(), 0), + script_sig: Script::new(), + sequence: Sequence::default(), + witness: Witness::new(), + }], + output: vec![TxOut::default()], + }; + + let tx_b2 = Transaction { + version: 0x02, + lock_time: PackedLockTime(0), + input: vec![TxIn { + previous_output: OutPoint::new(tx_a.txid(), 0), + script_sig: Script::new(), + sequence: Sequence::default(), + witness: Witness::new(), + }], + output: vec![TxOut::default(), TxOut::default()], + }; + { + let mut cg1 = { + let mut cg = ChainGraph::default(); + let _ = cg.insert_checkpoint(cp_a).expect("should insert cp"); + let _ = cg + .insert_tx(tx_a.clone(), TxHeight::Confirmed(0)) + .expect("should insert tx"); + let _ = cg + .insert_tx(tx_b.clone(), TxHeight::Unconfirmed) + .expect("should insert tx"); + cg + }; + let cg2 = { + let mut cg = ChainGraph::default(); + let _ = cg + .insert_tx(tx_b2.clone(), TxHeight::Unconfirmed) + .expect("should insert tx"); + cg + }; + + let changeset = ChangeSet:: { + chain: sparse_chain::ChangeSet { + checkpoints: Default::default(), + txids: [ + (tx_b.txid(), None), + (tx_b2.txid(), Some(TxHeight::Unconfirmed)), + ] + .into(), + }, + graph: tx_graph::Additions { + tx: [tx_b2.clone()].into(), + txout: [].into(), + }, + }; + assert_eq!( + cg1.determine_changeset(&cg2), + Ok(changeset.clone()), + "tx should be evicted from mempool" + ); + + cg1.apply_changeset(changeset); + } + + { + let cg1 = { + let mut cg = ChainGraph::default(); + let _ = cg.insert_checkpoint(cp_a).expect("should insert cp"); + let _ = cg.insert_checkpoint(cp_b).expect("should insert cp"); + let _ = cg + .insert_tx(tx_a.clone(), TxHeight::Confirmed(0)) + .expect("should insert tx"); + let _ = cg + .insert_tx(tx_b.clone(), TxHeight::Confirmed(1)) + .expect("should insert tx"); + cg + }; + let cg2 = { + let mut cg = ChainGraph::default(); + let _ = cg + .insert_tx(tx_b2.clone(), TxHeight::Unconfirmed) + .expect("should insert tx"); + cg + }; + assert_eq!( + cg1.determine_changeset(&cg2), + Err(UpdateError::UnresolvableConflict(UnresolvableConflict { + already_confirmed_tx: (TxHeight::Confirmed(1), tx_b.txid()), + update_tx: (TxHeight::Unconfirmed, tx_b2.txid()), + })), + "fail if tx is evicted from valid block" + ); + } + + { + // Given 2 blocks `{A, B}`, and an update that invalidates block B with + // `{A, B'}`, we expect txs that exist in `B` that conflicts with txs + // introduced in the update to be successfully evicted. + let mut cg1 = { + let mut cg = ChainGraph::default(); + let _ = cg.insert_checkpoint(cp_a).expect("should insert cp"); + let _ = cg.insert_checkpoint(cp_b).expect("should insert cp"); + let _ = cg + .insert_tx(tx_a, TxHeight::Confirmed(0)) + .expect("should insert tx"); + let _ = cg + .insert_tx(tx_b.clone(), TxHeight::Confirmed(1)) + .expect("should insert tx"); + cg + }; + let cg2 = { + let mut cg = ChainGraph::default(); + let _ = cg.insert_checkpoint(cp_a).expect("should insert cp"); + let _ = cg.insert_checkpoint(cp_b2).expect("should insert cp"); + let _ = cg + .insert_tx(tx_b2.clone(), TxHeight::Unconfirmed) + .expect("should insert tx"); + cg + }; + + let changeset = ChangeSet:: { + chain: sparse_chain::ChangeSet { + checkpoints: [(1, Some(h!("B'")))].into(), + txids: [ + (tx_b.txid(), None), + (tx_b2.txid(), Some(TxHeight::Unconfirmed)), + ] + .into(), + }, + graph: tx_graph::Additions { + tx: [tx_b2].into(), + txout: [].into(), + }, + }; + assert_eq!( + cg1.determine_changeset(&cg2), + Ok(changeset.clone()), + "tx should be evicted from B", + ); + + cg1.apply_changeset(changeset); + } +} + +#[test] +fn chain_graph_new_missing() { + let tx_a = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut::default()], + }; + let tx_b = Transaction { + version: 0x02, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut::default()], + }; + + let update = chain!( + index: TxHeight, + checkpoints: [[0, h!("A")]], + txids: [ + (tx_a.txid(), TxHeight::Confirmed(0)), + (tx_b.txid(), TxHeight::Confirmed(0)) + ] + ); + let mut graph = TxGraph::default(); + + let mut expected_missing = HashSet::new(); + expected_missing.insert(tx_a.txid()); + expected_missing.insert(tx_b.txid()); + + assert_eq!( + ChainGraph::new(update.clone(), graph.clone()), + Err(NewError::Missing(expected_missing.clone())) + ); + + let _ = graph.insert_tx(tx_b.clone()); + expected_missing.remove(&tx_b.txid()); + + assert_eq!( + ChainGraph::new(update.clone(), graph.clone()), + Err(NewError::Missing(expected_missing.clone())) + ); + + let _ = graph.insert_txout( + OutPoint { + txid: tx_a.txid(), + vout: 0, + }, + tx_a.output[0].clone(), + ); + + assert_eq!( + ChainGraph::new(update.clone(), graph.clone()), + Err(NewError::Missing(expected_missing)), + "inserting an output instead of full tx doesn't satisfy constraint" + ); + + let _ = graph.insert_tx(tx_a.clone()); + + let new_graph = ChainGraph::new(update.clone(), graph.clone()).unwrap(); + let expected_graph = { + let mut cg = ChainGraph::::default(); + let _ = cg + .insert_checkpoint(update.latest_checkpoint().unwrap()) + .unwrap(); + let _ = cg.insert_tx(tx_a, TxHeight::Confirmed(0)).unwrap(); + let _ = cg.insert_tx(tx_b, TxHeight::Confirmed(0)).unwrap(); + cg + }; + + assert_eq!(new_graph, expected_graph); +} + +#[test] +fn chain_graph_new_conflicts() { + let tx_a = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut::default()], + }; + + let tx_b = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![TxIn { + previous_output: OutPoint::new(tx_a.txid(), 0), + script_sig: Script::new(), + sequence: Sequence::default(), + witness: Witness::new(), + }], + output: vec![TxOut::default()], + }; + + let tx_b2 = Transaction { + version: 0x02, + lock_time: PackedLockTime(0), + input: vec![TxIn { + previous_output: OutPoint::new(tx_a.txid(), 0), + script_sig: Script::new(), + sequence: Sequence::default(), + witness: Witness::new(), + }], + output: vec![TxOut::default(), TxOut::default()], + }; + + let chain = chain!( + index: TxHeight, + checkpoints: [[5, h!("A")]], + txids: [ + (tx_a.txid(), TxHeight::Confirmed(1)), + (tx_b.txid(), TxHeight::Confirmed(2)), + (tx_b2.txid(), TxHeight::Confirmed(3)) + ] + ); + + let graph = TxGraph::new([tx_a, tx_b, tx_b2]); + + assert!(matches!( + ChainGraph::new(chain, graph), + Err(NewError::Conflict { .. }) + )); +} + +#[test] +fn test_get_tx_in_chain() { + let mut cg = ChainGraph::default(); + let tx = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut::default()], + }; + + let _ = cg.insert_tx(tx.clone(), TxHeight::Unconfirmed).unwrap(); + assert_eq!( + cg.get_tx_in_chain(tx.txid()), + Some((&TxHeight::Unconfirmed, &tx)) + ); +} + +#[test] +fn test_iterate_transactions() { + let mut cg = ChainGraph::default(); + let txs = (0..3) + .map(|i| Transaction { + version: i, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut::default()], + }) + .collect::>(); + let _ = cg + .insert_checkpoint(BlockId { + height: 1, + hash: h!("A"), + }) + .unwrap(); + let _ = cg + .insert_tx(txs[0].clone(), TxHeight::Confirmed(1)) + .unwrap(); + let _ = cg.insert_tx(txs[1].clone(), TxHeight::Unconfirmed).unwrap(); + let _ = cg + .insert_tx(txs[2].clone(), TxHeight::Confirmed(0)) + .unwrap(); + + assert_eq!( + cg.transactions_in_chain().collect::>(), + vec![ + (&TxHeight::Confirmed(0), &txs[2]), + (&TxHeight::Confirmed(1), &txs[0]), + (&TxHeight::Unconfirmed, &txs[1]), + ] + ); +} + +/// Start with: block1, block2a, tx1, tx2a +/// Update 1: block2a -> block2b , tx2a -> tx2b +/// Update 2: block2b -> block2c , tx2b -> tx2a +#[test] +fn test_apply_changes_reintroduce_tx() { + let block1 = BlockId { + height: 1, + hash: h!("block 1"), + }; + let block2a = BlockId { + height: 2, + hash: h!("block 2a"), + }; + let block2b = BlockId { + height: 2, + hash: h!("block 2b"), + }; + let block2c = BlockId { + height: 2, + hash: h!("block 2c"), + }; + + let tx1 = Transaction { + version: 0, + lock_time: PackedLockTime(1), + input: Vec::new(), + output: [TxOut { + value: 1, + script_pubkey: Script::new(), + }] + .into(), + }; + + let tx2a = Transaction { + version: 0, + lock_time: PackedLockTime('a'.into()), + input: [TxIn { + previous_output: OutPoint::new(tx1.txid(), 0), + ..Default::default() + }] + .into(), + output: [TxOut { + value: 0, + ..Default::default() + }] + .into(), + }; + + let tx2b = Transaction { + lock_time: PackedLockTime('b'.into()), + ..tx2a.clone() + }; + + // block1, block2a, tx1, tx2a + let mut cg = { + let mut cg = ChainGraph::default(); + let _ = cg.insert_checkpoint(block1).unwrap(); + let _ = cg.insert_checkpoint(block2a).unwrap(); + let _ = cg.insert_tx(tx1, TxHeight::Confirmed(1)).unwrap(); + let _ = cg.insert_tx(tx2a.clone(), TxHeight::Confirmed(2)).unwrap(); + cg + }; + + // block2a -> block2b , tx2a -> tx2b + let update = { + let mut update = ChainGraph::default(); + let _ = update.insert_checkpoint(block1).unwrap(); + let _ = update.insert_checkpoint(block2b).unwrap(); + let _ = update + .insert_tx(tx2b.clone(), TxHeight::Confirmed(2)) + .unwrap(); + update + }; + assert_eq!( + cg.apply_update(update).expect("should update"), + ChangeSet { + chain: changeset! { + checkpoints: [(2, Some(block2b.hash))], + txids: [(tx2a.txid(), None), (tx2b.txid(), Some(TxHeight::Confirmed(2)))] + }, + graph: tx_graph::Additions { + tx: [tx2b.clone()].into(), + ..Default::default() + }, + } + ); + + // block2b -> block2c , tx2b -> tx2a + let update = { + let mut update = ChainGraph::default(); + let _ = update.insert_checkpoint(block1).unwrap(); + let _ = update.insert_checkpoint(block2c).unwrap(); + let _ = update + .insert_tx(tx2a.clone(), TxHeight::Confirmed(2)) + .unwrap(); + update + }; + assert_eq!( + cg.apply_update(update).expect("should update"), + ChangeSet { + chain: changeset! { + checkpoints: [(2, Some(block2c.hash))], + txids: [(tx2b.txid(), None), (tx2a.txid(), Some(TxHeight::Confirmed(2)))] + }, + ..Default::default() + } + ); +} + +#[test] +fn test_evict_descendants() { + let block_1 = BlockId { + height: 1, + hash: h!("block 1"), + }; + + let block_2a = BlockId { + height: 2, + hash: h!("block 2 a"), + }; + + let block_2b = BlockId { + height: 2, + hash: h!("block 2 b"), + }; + + let tx_1 = Transaction { + input: vec![TxIn { + previous_output: OutPoint::new(h!("fake tx"), 0), + ..Default::default() + }], + output: vec![TxOut { + value: 10_000, + script_pubkey: Script::new(), + }], + ..common::new_tx(1) + }; + let tx_2 = Transaction { + input: vec![TxIn { + previous_output: OutPoint::new(tx_1.txid(), 0), + ..Default::default() + }], + output: vec![ + TxOut { + value: 20_000, + script_pubkey: Script::new(), + }, + TxOut { + value: 30_000, + script_pubkey: Script::new(), + }, + ], + ..common::new_tx(2) + }; + let tx_3 = Transaction { + input: vec![TxIn { + previous_output: OutPoint::new(tx_2.txid(), 0), + ..Default::default() + }], + output: vec![TxOut { + value: 40_000, + script_pubkey: Script::new(), + }], + ..common::new_tx(3) + }; + let tx_4 = Transaction { + input: vec![TxIn { + previous_output: OutPoint::new(tx_2.txid(), 1), + ..Default::default() + }], + output: vec![TxOut { + value: 40_000, + script_pubkey: Script::new(), + }], + ..common::new_tx(4) + }; + let tx_5 = Transaction { + input: vec![TxIn { + previous_output: OutPoint::new(tx_4.txid(), 0), + ..Default::default() + }], + output: vec![TxOut { + value: 40_000, + script_pubkey: Script::new(), + }], + ..common::new_tx(5) + }; + + let tx_conflict = Transaction { + input: vec![TxIn { + previous_output: OutPoint::new(tx_1.txid(), 0), + ..Default::default() + }], + output: vec![TxOut { + value: 12345, + script_pubkey: Script::new(), + }], + ..common::new_tx(6) + }; + + // 1 is spent by 2, 2 is spent by 3 and 4, 4 is spent by 5 + let _txid_1 = tx_1.txid(); + let txid_2 = tx_2.txid(); + let txid_3 = tx_3.txid(); + let txid_4 = tx_4.txid(); + let txid_5 = tx_5.txid(); + + // this tx conflicts with 2 + let txid_conflict = tx_conflict.txid(); + + let cg = { + let mut cg = ChainGraph::::default(); + let _ = cg.insert_checkpoint(block_1); + let _ = cg.insert_checkpoint(block_2a); + let _ = cg.insert_tx(tx_1, TxHeight::Confirmed(1)); + let _ = cg.insert_tx(tx_2, TxHeight::Confirmed(2)); + let _ = cg.insert_tx(tx_3, TxHeight::Confirmed(2)); + let _ = cg.insert_tx(tx_4, TxHeight::Confirmed(2)); + let _ = cg.insert_tx(tx_5, TxHeight::Confirmed(2)); + cg + }; + + let update = { + let mut cg = ChainGraph::::default(); + let _ = cg.insert_checkpoint(block_1); + let _ = cg.insert_checkpoint(block_2b); + let _ = cg.insert_tx(tx_conflict.clone(), TxHeight::Confirmed(2)); + cg + }; + + assert_eq!( + cg.determine_changeset(&update), + Ok(ChangeSet { + chain: changeset! { + checkpoints: [(2, Some(block_2b.hash))], + txids: [(txid_2, None), (txid_3, None), (txid_4, None), (txid_5, None), (txid_conflict, Some(TxHeight::Confirmed(2)))] + }, + graph: tx_graph::Additions { + tx: [tx_conflict.clone()].into(), + ..Default::default() + } + }) + ); + + let err = cg + .insert_tx_preview(tx_conflict, TxHeight::Unconfirmed) + .expect_err("must fail due to conflicts"); + assert!(matches!(err, InsertTxError::UnresolvableConflict(_))); +} diff --git a/crates/chain/tests/test_keychain_tracker.rs b/crates/chain/tests/test_keychain_tracker.rs new file mode 100644 index 000000000..3bf0a1d50 --- /dev/null +++ b/crates/chain/tests/test_keychain_tracker.rs @@ -0,0 +1,239 @@ +#![cfg(feature = "miniscript")] +#[macro_use] +mod common; +use bdk_chain::{ + keychain::{Balance, KeychainTracker}, + miniscript::{ + bitcoin::{secp256k1::Secp256k1, OutPoint, PackedLockTime, Transaction, TxOut}, + Descriptor, + }, + BlockId, ConfirmationTime, TxHeight, +}; +use bitcoin::TxIn; + +#[test] +fn test_insert_tx() { + let mut tracker = KeychainTracker::default(); + let secp = Secp256k1::new(); + let (descriptor, _) = Descriptor::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap(); + tracker.add_keychain((), descriptor.clone()); + let txout = TxOut { + value: 100_000, + script_pubkey: descriptor.at_derivation_index(5).script_pubkey(), + }; + + let tx = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![txout], + }; + + let _ = tracker.txout_index.reveal_to_target(&(), 5); + + let changeset = tracker + .insert_tx_preview(tx.clone(), ConfirmationTime::Unconfirmed) + .unwrap(); + tracker.apply_changeset(changeset); + assert_eq!( + tracker + .chain_graph() + .transactions_in_chain() + .collect::>(), + vec![(&ConfirmationTime::Unconfirmed, &tx)] + ); + + assert_eq!( + tracker + .txout_index + .txouts_of_keychain(&()) + .collect::>(), + vec![( + 5, + OutPoint { + txid: tx.txid(), + vout: 0 + } + )] + ); +} + +#[test] +fn test_balance() { + use core::str::FromStr; + #[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd)] + enum Keychain { + One, + Two, + } + let mut tracker = KeychainTracker::::default(); + let one = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/0/*)#rg247h69").unwrap(); + let two = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/1/*)#ju05rz2a").unwrap(); + tracker.add_keychain(Keychain::One, one); + tracker.add_keychain(Keychain::Two, two); + + let tx1 = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut { + value: 13_000, + script_pubkey: tracker + .txout_index + .reveal_next_spk(&Keychain::One) + .0 + .1 + .clone(), + }], + }; + + let tx2 = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut { + value: 7_000, + script_pubkey: tracker + .txout_index + .reveal_next_spk(&Keychain::Two) + .0 + .1 + .clone(), + }], + }; + + let tx_coinbase = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![TxIn::default()], + output: vec![TxOut { + value: 11_000, + script_pubkey: tracker + .txout_index + .reveal_next_spk(&Keychain::Two) + .0 + .1 + .clone(), + }], + }; + + assert!(tx_coinbase.is_coin_base()); + + let _ = tracker + .insert_checkpoint(BlockId { + height: 5, + hash: h!("1"), + }) + .unwrap(); + + let should_trust = |keychain: &Keychain| match *keychain { + Keychain::One => false, + Keychain::Two => true, + }; + + assert_eq!(tracker.balance(should_trust), Balance::default()); + + let _ = tracker + .insert_tx(tx1.clone(), TxHeight::Unconfirmed) + .unwrap(); + + assert_eq!( + tracker.balance(should_trust), + Balance { + untrusted_pending: 13_000, + ..Default::default() + } + ); + + let _ = tracker + .insert_tx(tx2.clone(), TxHeight::Unconfirmed) + .unwrap(); + + assert_eq!( + tracker.balance(should_trust), + Balance { + trusted_pending: 7_000, + untrusted_pending: 13_000, + ..Default::default() + } + ); + + let _ = tracker + .insert_tx(tx_coinbase, TxHeight::Confirmed(0)) + .unwrap(); + + assert_eq!( + tracker.balance(should_trust), + Balance { + trusted_pending: 7_000, + untrusted_pending: 13_000, + immature: 11_000, + ..Default::default() + } + ); + + let _ = tracker.insert_tx(tx1, TxHeight::Confirmed(1)).unwrap(); + + assert_eq!( + tracker.balance(should_trust), + Balance { + trusted_pending: 7_000, + untrusted_pending: 0, + immature: 11_000, + confirmed: 13_000, + } + ); + + let _ = tracker.insert_tx(tx2, TxHeight::Confirmed(2)).unwrap(); + + assert_eq!( + tracker.balance(should_trust), + Balance { + trusted_pending: 0, + untrusted_pending: 0, + immature: 11_000, + confirmed: 20_000, + } + ); + + let _ = tracker + .insert_checkpoint(BlockId { + height: 98, + hash: h!("98"), + }) + .unwrap(); + + assert_eq!( + tracker.balance(should_trust), + Balance { + trusted_pending: 0, + untrusted_pending: 0, + immature: 11_000, + confirmed: 20_000, + } + ); + + let _ = tracker + .insert_checkpoint(BlockId { + height: 99, + hash: h!("99"), + }) + .unwrap(); + + assert_eq!( + tracker.balance(should_trust), + Balance { + trusted_pending: 0, + untrusted_pending: 0, + immature: 0, + confirmed: 31_000, + } + ); + + assert_eq!(tracker.balance_at(0), 0); + assert_eq!(tracker.balance_at(1), 13_000); + assert_eq!(tracker.balance_at(2), 20_000); + assert_eq!(tracker.balance_at(98), 20_000); + assert_eq!(tracker.balance_at(99), 31_000); + assert_eq!(tracker.balance_at(100), 31_000); +} diff --git a/crates/chain/tests/test_keychain_txout_index.rs b/crates/chain/tests/test_keychain_txout_index.rs new file mode 100644 index 000000000..cfbcd123b --- /dev/null +++ b/crates/chain/tests/test_keychain_txout_index.rs @@ -0,0 +1,328 @@ +#![cfg(feature = "miniscript")] + +#[macro_use] +mod common; +use bdk_chain::{ + collections::BTreeMap, + keychain::{DerivationAdditions, KeychainTxOutIndex}, +}; + +use bitcoin::{secp256k1::Secp256k1, Script, Transaction, TxOut}; +use miniscript::{Descriptor, DescriptorPublicKey}; + +#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd)] +enum TestKeychain { + External, + Internal, +} + +fn init_txout_index() -> ( + bdk_chain::keychain::KeychainTxOutIndex, + Descriptor, + Descriptor, +) { + let mut txout_index = bdk_chain::keychain::KeychainTxOutIndex::::default(); + + let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only(); + let (external_descriptor,_) = Descriptor::::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap(); + let (internal_descriptor,_) = Descriptor::::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap(); + + txout_index.add_keychain(TestKeychain::External, external_descriptor.clone()); + txout_index.add_keychain(TestKeychain::Internal, internal_descriptor.clone()); + + (txout_index, external_descriptor, internal_descriptor) +} + +fn spk_at_index(descriptor: &Descriptor, index: u32) -> Script { + descriptor + .derived_descriptor(&Secp256k1::verification_only(), index) + .expect("must derive") + .script_pubkey() +} + +#[test] +fn test_set_all_derivation_indices() { + let (mut txout_index, _, _) = init_txout_index(); + let derive_to: BTreeMap<_, _> = + [(TestKeychain::External, 12), (TestKeychain::Internal, 24)].into(); + assert_eq!( + txout_index.reveal_to_target_multi(&derive_to).1.as_inner(), + &derive_to + ); + assert_eq!(txout_index.last_revealed_indices(), &derive_to); + assert_eq!( + txout_index.reveal_to_target_multi(&derive_to).1, + DerivationAdditions::default(), + "no changes if we set to the same thing" + ); +} + +#[test] +fn test_lookahead() { + let (mut txout_index, external_desc, internal_desc) = init_txout_index(); + + // ensure it does not break anything if lookahead is set multiple times + (0..=10).for_each(|lookahead| txout_index.set_lookahead(&TestKeychain::External, lookahead)); + (0..=20) + .filter(|v| v % 2 == 0) + .for_each(|lookahead| txout_index.set_lookahead(&TestKeychain::Internal, lookahead)); + + assert_eq!(txout_index.inner().all_spks().len(), 30); + + // given: + // - external lookahead set to 10 + // - internal lookahead set to 20 + // when: + // - set external derivation index to value higher than last, but within the lookahead value + // expect: + // - scripts cached in spk_txout_index should increase correctly + // - stored scripts of external keychain should be of expected counts + for index in (0..20).skip_while(|i| i % 2 == 1) { + let (revealed_spks, revealed_additions) = + txout_index.reveal_to_target(&TestKeychain::External, index); + assert_eq!( + revealed_spks.collect::>(), + vec![(index, spk_at_index(&external_desc, index))], + ); + assert_eq!( + revealed_additions.as_inner(), + &[(TestKeychain::External, index)].into() + ); + + assert_eq!( + txout_index.inner().all_spks().len(), + 10 /* external lookahead */ + + 20 /* internal lookahead */ + + index as usize + 1 /* `derived` count */ + ); + assert_eq!( + txout_index + .revealed_spks_of_keychain(&TestKeychain::External) + .count(), + index as usize + 1, + ); + assert_eq!( + txout_index + .revealed_spks_of_keychain(&TestKeychain::Internal) + .count(), + 0, + ); + assert_eq!( + txout_index + .unused_spks_of_keychain(&TestKeychain::External) + .count(), + index as usize + 1, + ); + assert_eq!( + txout_index + .unused_spks_of_keychain(&TestKeychain::Internal) + .count(), + 0, + ); + } + + // given: + // - internal lookahead is 20 + // - internal derivation index is `None` + // when: + // - derivation index is set ahead of current derivation index + lookahead + // expect: + // - scripts cached in spk_txout_index should increase correctly, a.k.a. no scripts are skipped + let (revealed_spks, revealed_additions) = + txout_index.reveal_to_target(&TestKeychain::Internal, 24); + assert_eq!( + revealed_spks.collect::>(), + (0..=24) + .map(|index| (index, spk_at_index(&internal_desc, index))) + .collect::>(), + ); + assert_eq!( + revealed_additions.as_inner(), + &[(TestKeychain::Internal, 24)].into() + ); + assert_eq!( + txout_index.inner().all_spks().len(), + 10 /* external lookahead */ + + 20 /* internal lookahead */ + + 20 /* external stored index count */ + + 25 /* internal stored index count */ + ); + assert_eq!( + txout_index + .revealed_spks_of_keychain(&TestKeychain::Internal) + .count(), + 25, + ); + + // ensure derivation indices are expected for each keychain + let last_external_index = txout_index + .last_revealed_index(&TestKeychain::External) + .expect("already derived"); + let last_internal_index = txout_index + .last_revealed_index(&TestKeychain::Internal) + .expect("already derived"); + assert_eq!(last_external_index, 19); + assert_eq!(last_internal_index, 24); + + // when: + // - scanning txouts with spks within stored indexes + // expect: + // - no changes to stored index counts + let external_iter = 0..=last_external_index; + let internal_iter = last_internal_index - last_external_index..=last_internal_index; + for (external_index, internal_index) in external_iter.zip(internal_iter) { + let tx = Transaction { + output: vec![ + TxOut { + script_pubkey: external_desc + .at_derivation_index(external_index) + .script_pubkey(), + value: 10_000, + }, + TxOut { + script_pubkey: internal_desc + .at_derivation_index(internal_index) + .script_pubkey(), + value: 10_000, + }, + ], + ..common::new_tx(external_index) + }; + assert_eq!(txout_index.scan(&tx), DerivationAdditions::default()); + assert_eq!( + txout_index.last_revealed_index(&TestKeychain::External), + Some(last_external_index) + ); + assert_eq!( + txout_index.last_revealed_index(&TestKeychain::Internal), + Some(last_internal_index) + ); + assert_eq!( + txout_index + .revealed_spks_of_keychain(&TestKeychain::External) + .count(), + last_external_index as usize + 1, + ); + assert_eq!( + txout_index + .revealed_spks_of_keychain(&TestKeychain::Internal) + .count(), + last_internal_index as usize + 1, + ); + } + + // when: + // - scanning txouts with spks above last stored index + // expect: + // - cached scripts count should increase as expected + // - last stored index should increase as expected + // TODO! +} + +#[test] +fn test_wildcard_derivations() { + let (mut txout_index, external_desc, _) = init_txout_index(); + let external_spk_0 = external_desc.at_derivation_index(0).script_pubkey(); + let external_spk_16 = external_desc.at_derivation_index(16).script_pubkey(); + let external_spk_26 = external_desc.at_derivation_index(26).script_pubkey(); + let external_spk_27 = external_desc.at_derivation_index(27).script_pubkey(); + + // - nothing is derived + // - unused list is also empty + // + // - next_derivation_index() == (0, true) + // - derive_new() == ((0, ), DerivationAdditions) + // - next_unused() == ((0, ), DerivationAdditions:is_empty()) + assert_eq!(txout_index.next_index(&TestKeychain::External), (0, true)); + let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External); + assert_eq!(spk, (0_u32, &external_spk_0)); + assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 0)].into()); + let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External); + assert_eq!(spk, (0_u32, &external_spk_0)); + assert_eq!(changeset.as_inner(), &[].into()); + + // - derived till 25 + // - used all spks till 15. + // - used list : [0..=15, 17, 20, 23] + // - unused list: [16, 18, 19, 21, 22, 24, 25] + + // - next_derivation_index() = (26, true) + // - derive_new() = ((26, ), DerivationAdditions) + // - next_unused() == ((16, ), DerivationAdditions::is_empty()) + let _ = txout_index.reveal_to_target(&TestKeychain::External, 25); + + (0..=15) + .into_iter() + .chain(vec![17, 20, 23].into_iter()) + .for_each(|index| assert!(txout_index.mark_used(&TestKeychain::External, index))); + + assert_eq!(txout_index.next_index(&TestKeychain::External), (26, true)); + + let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External); + assert_eq!(spk, (26, &external_spk_26)); + + assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 26)].into()); + + let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External); + assert_eq!(spk, (16, &external_spk_16)); + assert_eq!(changeset.as_inner(), &[].into()); + + // - Use all the derived till 26. + // - next_unused() = ((27, ), DerivationAdditions) + (0..=26).into_iter().for_each(|index| { + txout_index.mark_used(&TestKeychain::External, index); + }); + + let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External); + assert_eq!(spk, (27, &external_spk_27)); + assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 27)].into()); +} + +#[test] +fn test_non_wildcard_derivations() { + let mut txout_index = KeychainTxOutIndex::::default(); + + let secp = bitcoin::secp256k1::Secp256k1::signing_only(); + let (no_wildcard_descriptor, _) = Descriptor::::parse_descriptor(&secp, "wpkh([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/0)").unwrap(); + let external_spk = no_wildcard_descriptor + .at_derivation_index(0) + .script_pubkey(); + + txout_index.add_keychain(TestKeychain::External, no_wildcard_descriptor); + + // given: + // - `txout_index` with no stored scripts + // expect: + // - next derivation index should be new + // - when we derive a new script, script @ index 0 + // - when we get the next unused script, script @ index 0 + assert_eq!(txout_index.next_index(&TestKeychain::External), (0, true)); + let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External); + assert_eq!(spk, (0, &external_spk)); + assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 0)].into()); + + let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External); + assert_eq!(spk, (0, &external_spk)); + assert_eq!(changeset.as_inner(), &[].into()); + + // given: + // - the non-wildcard descriptor already has a stored and used script + // expect: + // - next derivation index should not be new + // - derive new and next unused should return the old script + // - store_up_to should not panic and return empty additions + assert_eq!(txout_index.next_index(&TestKeychain::External), (0, false)); + txout_index.mark_used(&TestKeychain::External, 0); + + let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External); + assert_eq!(spk, (0, &external_spk)); + assert_eq!(changeset.as_inner(), &[].into()); + + let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External); + assert_eq!(spk, (0, &external_spk)); + assert_eq!(changeset.as_inner(), &[].into()); + let (revealed_spks, revealed_additions) = + txout_index.reveal_to_target(&TestKeychain::External, 200); + assert_eq!(revealed_spks.count(), 0); + assert!(revealed_additions.is_empty()); +} diff --git a/crates/chain/tests/test_sparse_chain.rs b/crates/chain/tests/test_sparse_chain.rs new file mode 100644 index 000000000..ba8b23b88 --- /dev/null +++ b/crates/chain/tests/test_sparse_chain.rs @@ -0,0 +1,773 @@ +#[macro_use] +mod common; + +use bdk_chain::{collections::BTreeSet, sparse_chain::*, BlockId, TxHeight}; +use bitcoin::{hashes::Hash, Txid}; +use core::ops::Bound; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)] +pub struct TestIndex(TxHeight, u32); + +impl ChainPosition for TestIndex { + fn height(&self) -> TxHeight { + self.0 + } + + fn max_ord_of_height(height: TxHeight) -> Self { + Self(height, u32::MAX) + } + + fn min_ord_of_height(height: TxHeight) -> Self { + Self(height, u32::MIN) + } +} + +impl TestIndex { + pub fn new(height: H, ext: u32) -> Self + where + H: Into, + { + Self(height.into(), ext) + } +} + +#[test] +fn add_first_checkpoint() { + let chain = SparseChain::default(); + assert_eq!( + chain.determine_changeset(&chain!([0, h!("A")])), + Ok(changeset! { + checkpoints: [(0, Some(h!("A")))], + txids: [] + },), + "add first tip" + ); +} + +#[test] +fn add_second_tip() { + let chain = chain!([0, h!("A")]); + assert_eq!( + chain.determine_changeset(&chain!([0, h!("A")], [1, h!("B")])), + Ok(changeset! { + checkpoints: [(1, Some(h!("B")))], + txids: [] + },), + "extend tip by one" + ); +} + +#[test] +fn two_disjoint_chains_cannot_merge() { + let chain1 = chain!([0, h!("A")]); + let chain2 = chain!([1, h!("B")]); + assert_eq!( + chain1.determine_changeset(&chain2), + Err(UpdateError::NotConnected(0)) + ); +} + +#[test] +fn duplicate_chains_should_merge() { + let chain1 = chain!([0, h!("A")]); + let chain2 = chain!([0, h!("A")]); + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(ChangeSet::default()) + ); +} + +#[test] +fn duplicate_chains_with_txs_should_merge() { + let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); + let chain2 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(ChangeSet::default()) + ); +} + +#[test] +fn duplicate_chains_with_different_txs_should_merge() { + let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); + let chain2 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx1"), TxHeight::Confirmed(0))]); + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [], + txids: [(h!("tx1"), Some(TxHeight::Confirmed(0)))] + }) + ); +} + +#[test] +fn invalidate_first_and_only_checkpoint_without_tx_changes() { + let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); + let chain2 = chain!(checkpoints: [[0,h!("A'")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [(0, Some(h!("A'")))], + txids: [] + },) + ); +} + +#[test] +fn invalidate_first_and_only_checkpoint_with_tx_move_forward() { + let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); + let chain2 = chain!(checkpoints: [[0,h!("A'")],[1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]); + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [(0, Some(h!("A'"))), (1, Some(h!("B")))], + txids: [(h!("tx0"), Some(TxHeight::Confirmed(1)))] + },) + ); +} + +#[test] +fn invalidate_first_and_only_checkpoint_with_tx_move_backward() { + let chain1 = chain!(checkpoints: [[1,h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]); + let chain2 = chain!(checkpoints: [[0,h!("A")],[1, h!("B'")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [(0, Some(h!("A"))), (1, Some(h!("B'")))], + txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))] + },) + ); +} + +#[test] +fn invalidate_a_checkpoint_and_try_and_move_tx_when_it_wasnt_within_invalidation() { + let chain1 = chain!(checkpoints: [[0, h!("A")], [1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); + let chain2 = chain!(checkpoints: [[0, h!("A")], [1, h!("B'")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]); + assert_eq!( + chain1.determine_changeset(&chain2), + Err(UpdateError::TxInconsistent { + txid: h!("tx0"), + original_pos: TxHeight::Confirmed(0), + update_pos: TxHeight::Confirmed(1), + }) + ); +} + +/// This test doesn't make much sense. We're invalidating a block at height 1 and moving it to +/// height 0. It should be impossible for it to be at height 1 at any point if it was at height 0 +/// all along. +#[test] +fn move_invalidated_tx_into_earlier_checkpoint() { + let chain1 = chain!(checkpoints: [[0, h!("A")], [1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]); + let chain2 = chain!(checkpoints: [[0, h!("A")], [1, h!("B'")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [(1, Some(h!("B'")))], + txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))] + },) + ); +} + +#[test] +fn invalidate_first_and_only_checkpoint_with_tx_move_to_mempool() { + let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); + let chain2 = chain!(checkpoints: [[0,h!("A'")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]); + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [(0, Some(h!("A'")))], + txids: [(h!("tx0"), Some(TxHeight::Unconfirmed))] + },) + ); +} + +#[test] +fn confirm_tx_without_extending_chain() { + let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]); + let chain2 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [], + txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))] + },) + ); +} + +#[test] +fn confirm_tx_backwards_while_extending_chain() { + let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]); + let chain2 = chain!(checkpoints: [[0,h!("A")],[1,h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]); + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [(1, Some(h!("B")))], + txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))] + },) + ); +} + +#[test] +fn confirm_tx_in_new_block() { + let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]); + let chain2 = chain! { + checkpoints: [[0,h!("A")], [1,h!("B")]], + txids: [(h!("tx0"), TxHeight::Confirmed(1))] + }; + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [(1, Some(h!("B")))], + txids: [(h!("tx0"), Some(TxHeight::Confirmed(1)))] + },) + ); +} + +#[test] +fn merging_mempool_of_empty_chains_doesnt_fail() { + let chain1 = chain!(checkpoints: [], txids: [(h!("tx0"), TxHeight::Unconfirmed)]); + let chain2 = chain!(checkpoints: [], txids: [(h!("tx1"), TxHeight::Unconfirmed)]); + + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [], + txids: [(h!("tx1"), Some(TxHeight::Unconfirmed))] + },) + ); +} + +#[test] +fn cannot_insert_confirmed_tx_without_checkpoints() { + let chain = SparseChain::default(); + assert_eq!( + chain.insert_tx_preview(h!("A"), TxHeight::Confirmed(0)), + Err(InsertTxError::TxTooHigh { + txid: h!("A"), + tx_height: 0, + tip_height: None + }) + ); +} + +#[test] +fn empty_chain_can_add_unconfirmed_transactions() { + let chain1 = chain!(checkpoints: [[0, h!("A")]], txids: []); + let chain2 = chain!(checkpoints: [], txids: [(h!("tx0"), TxHeight::Unconfirmed)]); + + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [], + txids: [ (h!("tx0"), Some(TxHeight::Unconfirmed)) ] + },) + ); +} + +#[test] +fn can_update_with_shorter_chain() { + let chain1 = chain!(checkpoints: [[1, h!("B")],[2, h!("C")]], txids: []); + let chain2 = chain!(checkpoints: [[1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]); + + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [], + txids: [(h!("tx0"), Some(TxHeight::Confirmed(1)))] + },) + ) +} + +#[test] +fn can_introduce_older_checkpoints() { + let chain1 = chain!(checkpoints: [[2, h!("C")], [3, h!("D")]], txids: []); + let chain2 = chain!(checkpoints: [[1, h!("B")], [2, h!("C")]], txids: []); + + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [(1, Some(h!("B")))], + txids: [] + },) + ); +} + +#[test] +fn fix_blockhash_before_agreement_point() { + let chain1 = chain!([0, h!("im-wrong")], [1, h!("we-agree")]); + let chain2 = chain!([0, h!("fix")], [1, h!("we-agree")]); + + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [(0, Some(h!("fix")))], + txids: [] + },) + ) +} + +// TODO: Use macro +#[test] +fn cannot_change_ext_index_of_confirmed_tx() { + let chain1 = chain!( + index: TestIndex, + checkpoints: [[1, h!("A")]], + txids: [(h!("tx0"), TestIndex(TxHeight::Confirmed(1), 10))] + ); + let chain2 = chain!( + index: TestIndex, + checkpoints: [[1, h!("A")]], + txids: [(h!("tx0"), TestIndex(TxHeight::Confirmed(1), 20))] + ); + + assert_eq!( + chain1.determine_changeset(&chain2), + Err(UpdateError::TxInconsistent { + txid: h!("tx0"), + original_pos: TestIndex(TxHeight::Confirmed(1), 10), + update_pos: TestIndex(TxHeight::Confirmed(1), 20), + }), + ) +} + +#[test] +fn can_change_index_of_unconfirmed_tx() { + let chain1 = chain!( + index: TestIndex, + checkpoints: [[1, h!("A")]], + txids: [(h!("tx1"), TestIndex(TxHeight::Unconfirmed, 10))] + ); + let chain2 = chain!( + index: TestIndex, + checkpoints: [[1, h!("A")]], + txids: [(h!("tx1"), TestIndex(TxHeight::Unconfirmed, 20))] + ); + + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(ChangeSet { + checkpoints: [].into(), + txids: [(h!("tx1"), Some(TestIndex(TxHeight::Unconfirmed, 20)),)].into() + },), + ) +} + +/// B and C are in both chain and update +/// ``` +/// | 0 | 1 | 2 | 3 | 4 +/// chain | B C +/// update | A B C D +/// ``` +/// This should succeed with the point of agreement being C and A should be added in addition. +#[test] +fn two_points_of_agreement() { + let chain1 = chain!([1, h!("B")], [2, h!("C")]); + let chain2 = chain!([0, h!("A")], [1, h!("B")], [2, h!("C")], [3, h!("D")]); + + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [(0, Some(h!("A"))), (3, Some(h!("D")))] + },), + ); +} + +/// Update and chain does not connect: +/// ``` +/// | 0 | 1 | 2 | 3 | 4 +/// chain | B C +/// update | A B D +/// ``` +/// This should fail as we cannot figure out whether C & D are on the same chain +#[test] +fn update_and_chain_does_not_connect() { + let chain1 = chain!([1, h!("B")], [2, h!("C")]); + let chain2 = chain!([0, h!("A")], [1, h!("B")], [3, h!("D")]); + + assert_eq!( + chain1.determine_changeset(&chain2), + Err(UpdateError::NotConnected(2)), + ); +} + +/// Transient invalidation: +/// ``` +/// | 0 | 1 | 2 | 3 | 4 | 5 +/// chain | A B C E +/// update | A B' C' D +/// ``` +/// This should succeed and invalidate B,C and E with point of agreement being A. +/// It should also invalidate transactions at height 1. +#[test] +fn transitive_invalidation_applies_to_checkpoints_higher_than_invalidation() { + let chain1 = chain! { + checkpoints: [[0, h!("A")], [2, h!("B")], [3, h!("C")], [5, h!("E")]], + txids: [ + (h!("a"), TxHeight::Confirmed(0)), + (h!("b1"), TxHeight::Confirmed(1)), + (h!("b2"), TxHeight::Confirmed(2)), + (h!("d"), TxHeight::Confirmed(3)), + (h!("e"), TxHeight::Confirmed(5)) + ] + }; + let chain2 = chain! { + checkpoints: [[0, h!("A")], [2, h!("B'")], [3, h!("C'")], [4, h!("D")]], + txids: [(h!("b1"), TxHeight::Confirmed(4)), (h!("b2"), TxHeight::Confirmed(3))] + }; + + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [ + (2, Some(h!("B'"))), + (3, Some(h!("C'"))), + (4, Some(h!("D"))), + (5, None) + ], + txids: [ + (h!("b1"), Some(TxHeight::Confirmed(4))), + (h!("b2"), Some(TxHeight::Confirmed(3))), + (h!("d"), Some(TxHeight::Unconfirmed)), + (h!("e"), Some(TxHeight::Unconfirmed)) + ] + },) + ); +} + +/// Transient invalidation: +/// ``` +/// | 0 | 1 | 2 | 3 | 4 +/// chain | B C E +/// update | B' C' D +/// ``` +/// +/// This should succeed and invalidate B, C and E with no point of agreement +#[test] +fn transitive_invalidation_applies_to_checkpoints_higher_than_invalidation_no_point_of_agreement() { + let chain1 = chain!([1, h!("B")], [2, h!("C")], [4, h!("E")]); + let chain2 = chain!([1, h!("B'")], [2, h!("C'")], [3, h!("D")]); + + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [ + (1, Some(h!("B'"))), + (2, Some(h!("C'"))), + (3, Some(h!("D"))), + (4, None) + ] + },) + ) +} + +/// Transient invalidation: +/// ``` +/// | 0 | 1 | 2 | 3 | 4 +/// chain | A B C E +/// update | B' C' D +/// ``` +/// +/// This should fail since although it tells us that B and C are invalid it doesn't tell us whether +/// A was invalid. +#[test] +fn invalidation_but_no_connection() { + let chain1 = chain!([0, h!("A")], [1, h!("B")], [2, h!("C")], [4, h!("E")]); + let chain2 = chain!([1, h!("B'")], [2, h!("C'")], [3, h!("D")]); + + assert_eq!( + chain1.determine_changeset(&chain2), + Err(UpdateError::NotConnected(0)) + ) +} + +#[test] +fn checkpoint_limit_is_respected() { + let mut chain1 = SparseChain::default(); + let _ = chain1 + .apply_update(chain!( + [1, h!("A")], + [2, h!("B")], + [3, h!("C")], + [4, h!("D")], + [5, h!("E")] + )) + .unwrap(); + + assert_eq!(chain1.checkpoints().len(), 5); + chain1.set_checkpoint_limit(Some(4)); + assert_eq!(chain1.checkpoints().len(), 4); + + let _ = chain1 + .insert_checkpoint(BlockId { + height: 6, + hash: h!("F"), + }) + .unwrap(); + assert_eq!(chain1.checkpoints().len(), 4); + + let changeset = chain1.determine_changeset(&chain!([6, h!("F")], [7, h!("G")])); + assert_eq!(changeset, Ok(changeset!(checkpoints: [(7, Some(h!("G")))]))); + + chain1.apply_changeset(changeset.unwrap()); + + assert_eq!(chain1.checkpoints().len(), 4); +} + +#[test] +fn range_txids_by_height() { + let mut chain = chain!(index: TestIndex, checkpoints: [[1, h!("block 1")], [2, h!("block 2")]]); + + let txids: [(TestIndex, Txid); 4] = [ + ( + TestIndex(TxHeight::Confirmed(1), u32::MIN), + Txid::from_inner([0x00; 32]), + ), + ( + TestIndex(TxHeight::Confirmed(1), u32::MAX), + Txid::from_inner([0xfe; 32]), + ), + ( + TestIndex(TxHeight::Confirmed(2), u32::MIN), + Txid::from_inner([0x01; 32]), + ), + ( + TestIndex(TxHeight::Confirmed(2), u32::MAX), + Txid::from_inner([0xff; 32]), + ), + ]; + + // populate chain with txids + for (index, txid) in txids { + let _ = chain.insert_tx(txid, index).expect("should succeed"); + } + + // inclusive start + assert_eq!( + chain + .range_txids_by_height(TxHeight::Confirmed(1)..) + .collect::>(), + txids.iter().collect::>(), + ); + + // exclusive start + assert_eq!( + chain + .range_txids_by_height((Bound::Excluded(TxHeight::Confirmed(1)), Bound::Unbounded,)) + .collect::>(), + txids[2..].iter().collect::>(), + ); + + // inclusive end + assert_eq!( + chain + .range_txids_by_height((Bound::Unbounded, Bound::Included(TxHeight::Confirmed(2)))) + .collect::>(), + txids[..4].iter().collect::>(), + ); + + // exclusive end + assert_eq!( + chain + .range_txids_by_height(..TxHeight::Confirmed(2)) + .collect::>(), + txids[..2].iter().collect::>(), + ); +} + +#[test] +fn range_txids_by_index() { + let mut chain = chain!(index: TestIndex, checkpoints: [[1, h!("block 1")],[2, h!("block 2")]]); + + let txids: [(TestIndex, Txid); 4] = [ + (TestIndex(TxHeight::Confirmed(1), u32::MIN), h!("tx 1 min")), + (TestIndex(TxHeight::Confirmed(1), u32::MAX), h!("tx 1 max")), + (TestIndex(TxHeight::Confirmed(2), u32::MIN), h!("tx 2 min")), + (TestIndex(TxHeight::Confirmed(2), u32::MAX), h!("tx 2 max")), + ]; + + // populate chain with txids + for (index, txid) in txids { + let _ = chain.insert_tx(txid, index).expect("should succeed"); + } + + // inclusive start + assert_eq!( + chain + .range_txids_by_position(TestIndex(TxHeight::Confirmed(1), u32::MIN)..) + .collect::>(), + txids.iter().collect::>(), + ); + assert_eq!( + chain + .range_txids_by_position(TestIndex(TxHeight::Confirmed(1), u32::MAX)..) + .collect::>(), + txids[1..].iter().collect::>(), + ); + + // exclusive start + assert_eq!( + chain + .range_txids_by_position(( + Bound::Excluded(TestIndex(TxHeight::Confirmed(1), u32::MIN)), + Bound::Unbounded + )) + .collect::>(), + txids[1..].iter().collect::>(), + ); + assert_eq!( + chain + .range_txids_by_position(( + Bound::Excluded(TestIndex(TxHeight::Confirmed(1), u32::MAX)), + Bound::Unbounded + )) + .collect::>(), + txids[2..].iter().collect::>(), + ); + + // inclusive end + assert_eq!( + chain + .range_txids_by_position(( + Bound::Unbounded, + Bound::Included(TestIndex(TxHeight::Confirmed(2), u32::MIN)) + )) + .collect::>(), + txids[..3].iter().collect::>(), + ); + assert_eq!( + chain + .range_txids_by_position(( + Bound::Unbounded, + Bound::Included(TestIndex(TxHeight::Confirmed(2), u32::MAX)) + )) + .collect::>(), + txids[..4].iter().collect::>(), + ); + + // exclusive end + assert_eq!( + chain + .range_txids_by_position(..TestIndex(TxHeight::Confirmed(2), u32::MIN)) + .collect::>(), + txids[..2].iter().collect::>(), + ); + assert_eq!( + chain + .range_txids_by_position(..TestIndex(TxHeight::Confirmed(2), u32::MAX)) + .collect::>(), + txids[..3].iter().collect::>(), + ); +} + +#[test] +fn range_txids() { + let mut chain = SparseChain::default(); + + let txids = (0..100) + .map(|v| Txid::hash(v.to_string().as_bytes())) + .collect::>(); + + // populate chain + for txid in &txids { + let _ = chain + .insert_tx(*txid, TxHeight::Unconfirmed) + .expect("should succeed"); + } + + for txid in &txids { + assert_eq!( + chain + .range_txids((TxHeight::Unconfirmed, *txid)..) + .map(|(_, txid)| txid) + .collect::>(), + txids.range(*txid..).collect::>(), + "range with inclusive start should succeed" + ); + + assert_eq!( + chain + .range_txids(( + Bound::Excluded((TxHeight::Unconfirmed, *txid)), + Bound::Unbounded, + )) + .map(|(_, txid)| txid) + .collect::>(), + txids + .range((Bound::Excluded(*txid), Bound::Unbounded,)) + .collect::>(), + "range with exclusive start should succeed" + ); + + assert_eq!( + chain + .range_txids(..(TxHeight::Unconfirmed, *txid)) + .map(|(_, txid)| txid) + .collect::>(), + txids.range(..*txid).collect::>(), + "range with exclusive end should succeed" + ); + + assert_eq!( + chain + .range_txids(( + Bound::Included((TxHeight::Unconfirmed, *txid)), + Bound::Unbounded, + )) + .map(|(_, txid)| txid) + .collect::>(), + txids + .range((Bound::Included(*txid), Bound::Unbounded,)) + .collect::>(), + "range with inclusive end should succeed" + ); + } +} + +#[test] +fn invalidated_txs_move_to_unconfirmed() { + let chain1 = chain! { + checkpoints: [[0, h!("A")], [1, h!("B")], [2, h!("C")]], + txids: [ + (h!("a"), TxHeight::Confirmed(0)), + (h!("b"), TxHeight::Confirmed(1)), + (h!("c"), TxHeight::Confirmed(2)), + (h!("d"), TxHeight::Unconfirmed) + ] + }; + + let chain2 = chain!([0, h!("A")], [1, h!("B'")]); + + assert_eq!( + chain1.determine_changeset(&chain2), + Ok(changeset! { + checkpoints: [ + (1, Some(h!("B'"))), + (2, None) + ], + txids: [ + (h!("b"), Some(TxHeight::Unconfirmed)), + (h!("c"), Some(TxHeight::Unconfirmed)) + ] + },) + ); +} + +#[test] +fn change_tx_position_from_unconfirmed_to_confirmed() { + let mut chain = SparseChain::::default(); + let txid = h!("txid"); + + let _ = chain.insert_tx(txid, TxHeight::Unconfirmed).unwrap(); + + assert_eq!(chain.tx_position(txid), Some(&TxHeight::Unconfirmed)); + let _ = chain + .insert_checkpoint(BlockId { + height: 0, + hash: h!("0"), + }) + .unwrap(); + let _ = chain.insert_tx(txid, TxHeight::Confirmed(0)).unwrap(); + + assert_eq!(chain.tx_position(txid), Some(&TxHeight::Confirmed(0))); +} diff --git a/crates/chain/tests/test_spk_txout_index.rs b/crates/chain/tests/test_spk_txout_index.rs new file mode 100644 index 000000000..ada5a1973 --- /dev/null +++ b/crates/chain/tests/test_spk_txout_index.rs @@ -0,0 +1,100 @@ +use bdk_chain::SpkTxOutIndex; +use bitcoin::{hashes::hex::FromHex, OutPoint, PackedLockTime, Script, Transaction, TxIn, TxOut}; + +#[test] +fn spk_txout_sent_and_received() { + let spk1 = Script::from_hex("001404f1e52ce2bab3423c6a8c63b7cd730d8f12542c").unwrap(); + let spk2 = Script::from_hex("00142b57404ae14f08c3a0c903feb2af7830605eb00f").unwrap(); + + let mut index = SpkTxOutIndex::default(); + index.insert_spk(0, spk1.clone()); + index.insert_spk(1, spk2.clone()); + + let tx1 = Transaction { + version: 0x02, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut { + value: 42_000, + script_pubkey: spk1.clone(), + }], + }; + + assert_eq!(index.sent_and_received(&tx1), (0, 42_000)); + assert_eq!(index.net_value(&tx1), 42_000); + index.scan(&tx1); + assert_eq!( + index.sent_and_received(&tx1), + (0, 42_000), + "shouldn't change after scanning" + ); + + let tx2 = Transaction { + version: 0x1, + lock_time: PackedLockTime(0), + input: vec![TxIn { + previous_output: OutPoint { + txid: tx1.txid(), + vout: 0, + }, + ..Default::default() + }], + output: vec![ + TxOut { + value: 20_000, + script_pubkey: spk2, + }, + TxOut { + script_pubkey: spk1, + value: 30_000, + }, + ], + }; + + assert_eq!(index.sent_and_received(&tx2), (42_000, 50_000)); + assert_eq!(index.net_value(&tx2), 8_000); +} + +#[test] +fn mark_used() { + let spk1 = Script::from_hex("001404f1e52ce2bab3423c6a8c63b7cd730d8f12542c").unwrap(); + let spk2 = Script::from_hex("00142b57404ae14f08c3a0c903feb2af7830605eb00f").unwrap(); + + let mut spk_index = SpkTxOutIndex::default(); + spk_index.insert_spk(1, spk1.clone()); + spk_index.insert_spk(2, spk2); + + assert!(!spk_index.is_used(&1)); + spk_index.mark_used(&1); + assert!(spk_index.is_used(&1)); + spk_index.unmark_used(&1); + assert!(!spk_index.is_used(&1)); + spk_index.mark_used(&1); + assert!(spk_index.is_used(&1)); + + let tx1 = Transaction { + version: 0x02, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut { + value: 42_000, + script_pubkey: spk1, + }], + }; + + spk_index.scan(&tx1); + spk_index.unmark_used(&1); + assert!( + spk_index.is_used(&1), + "even though we unmark_used it doesn't matter because there was a tx scanned that used it" + ); +} + +#[test] +fn unmark_used_does_not_result_in_invalid_representation() { + let mut spk_index = SpkTxOutIndex::default(); + assert!(!spk_index.unmark_used(&0)); + assert!(!spk_index.unmark_used(&1)); + assert!(!spk_index.unmark_used(&2)); + assert!(spk_index.unused_spks(..).collect::>().is_empty()); +} diff --git a/crates/chain/tests/test_tx_graph.rs b/crates/chain/tests/test_tx_graph.rs new file mode 100644 index 000000000..04974bf30 --- /dev/null +++ b/crates/chain/tests/test_tx_graph.rs @@ -0,0 +1,512 @@ +#[macro_use] +mod common; +use bdk_chain::{ + collections::*, + tx_graph::{Additions, TxGraph}, +}; +use bitcoin::{hashes::Hash, OutPoint, PackedLockTime, Script, Transaction, TxIn, TxOut, Txid}; +use core::iter; + +#[test] +fn insert_txouts() { + let original_ops = [ + ( + OutPoint::new(h!("tx1"), 1), + TxOut { + value: 10_000, + script_pubkey: Script::new(), + }, + ), + ( + OutPoint::new(h!("tx1"), 2), + TxOut { + value: 20_000, + script_pubkey: Script::new(), + }, + ), + ]; + + let update_ops = [( + OutPoint::new(h!("tx2"), 0), + TxOut { + value: 20_000, + script_pubkey: Script::new(), + }, + )]; + + let mut graph = { + let mut graph = TxGraph::default(); + for (outpoint, txout) in &original_ops { + assert_eq!( + graph.insert_txout(*outpoint, txout.clone()), + Additions { + txout: [(*outpoint, txout.clone())].into(), + ..Default::default() + } + ); + } + graph + }; + + let update = { + let mut graph = TxGraph::default(); + for (outpoint, txout) in &update_ops { + assert_eq!( + graph.insert_txout(*outpoint, txout.clone()), + Additions { + txout: [(*outpoint, txout.clone())].into(), + ..Default::default() + } + ); + } + graph + }; + + let additions = graph.determine_additions(&update); + + assert_eq!( + additions, + Additions { + tx: [].into(), + txout: update_ops.into(), + } + ); + + graph.apply_additions(additions); + assert_eq!(graph.all_txouts().count(), 3); + assert_eq!(graph.full_transactions().count(), 0); + assert_eq!(graph.partial_transactions().count(), 2); +} + +#[test] +fn insert_tx_graph_doesnt_count_coinbase_as_spent() { + let tx = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![TxIn { + previous_output: OutPoint::null(), + ..Default::default() + }], + output: vec![], + }; + + let mut graph = TxGraph::default(); + let _ = graph.insert_tx(tx); + assert!(graph.outspends(OutPoint::null()).is_empty()); + assert!(graph.tx_outspends(Txid::all_zeros()).next().is_none()); +} + +#[test] +fn insert_tx_graph_keeps_track_of_spend() { + let tx1 = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut::default()], + }; + + let op = OutPoint { + txid: tx1.txid(), + vout: 0, + }; + + let tx2 = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![TxIn { + previous_output: op, + ..Default::default() + }], + output: vec![], + }; + + let mut graph1 = TxGraph::default(); + let mut graph2 = TxGraph::default(); + + // insert in different order + let _ = graph1.insert_tx(tx1.clone()); + let _ = graph1.insert_tx(tx2.clone()); + + let _ = graph2.insert_tx(tx2.clone()); + let _ = graph2.insert_tx(tx1); + + assert_eq!( + graph1.outspends(op), + &iter::once(tx2.txid()).collect::>() + ); + assert_eq!(graph2.outspends(op), graph1.outspends(op)); +} + +#[test] +fn insert_tx_can_retrieve_full_tx_from_graph() { + let tx = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![TxIn { + previous_output: OutPoint::null(), + ..Default::default() + }], + output: vec![TxOut::default()], + }; + + let mut graph = TxGraph::default(); + let _ = graph.insert_tx(tx.clone()); + assert_eq!(graph.get_tx(tx.txid()), Some(&tx)); +} + +#[test] +fn insert_tx_displaces_txouts() { + let mut tx_graph = TxGraph::default(); + let tx = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut { + value: 42_000, + script_pubkey: Script::default(), + }], + }; + + let _ = tx_graph.insert_txout( + OutPoint { + txid: tx.txid(), + vout: 0, + }, + TxOut { + value: 1_337_000, + script_pubkey: Script::default(), + }, + ); + + let _ = tx_graph.insert_txout( + OutPoint { + txid: tx.txid(), + vout: 0, + }, + TxOut { + value: 1_000_000_000, + script_pubkey: Script::default(), + }, + ); + + let _additions = tx_graph.insert_tx(tx.clone()); + + assert_eq!( + tx_graph + .get_txout(OutPoint { + txid: tx.txid(), + vout: 0 + }) + .unwrap() + .value, + 42_000 + ); + assert_eq!( + tx_graph.get_txout(OutPoint { + txid: tx.txid(), + vout: 1 + }), + None + ); +} + +#[test] +fn insert_txout_does_not_displace_tx() { + let mut tx_graph = TxGraph::default(); + let tx = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut { + value: 42_000, + script_pubkey: Script::default(), + }], + }; + + let _additions = tx_graph.insert_tx(tx.clone()); + + let _ = tx_graph.insert_txout( + OutPoint { + txid: tx.txid(), + vout: 0, + }, + TxOut { + value: 1_337_000, + script_pubkey: Script::default(), + }, + ); + + let _ = tx_graph.insert_txout( + OutPoint { + txid: tx.txid(), + vout: 0, + }, + TxOut { + value: 1_000_000_000, + script_pubkey: Script::default(), + }, + ); + + assert_eq!( + tx_graph + .get_txout(OutPoint { + txid: tx.txid(), + vout: 0 + }) + .unwrap() + .value, + 42_000 + ); + assert_eq!( + tx_graph.get_txout(OutPoint { + txid: tx.txid(), + vout: 1 + }), + None + ); +} + +#[test] +fn test_calculate_fee() { + let mut graph = TxGraph::default(); + let intx1 = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut { + value: 100, + ..Default::default() + }], + }; + let intx2 = Transaction { + version: 0x02, + lock_time: PackedLockTime(0), + input: vec![], + output: vec![TxOut { + value: 200, + ..Default::default() + }], + }; + + let intxout1 = ( + OutPoint { + txid: h!("dangling output"), + vout: 0, + }, + TxOut { + value: 300, + ..Default::default() + }, + ); + + let _ = graph.insert_tx(intx1.clone()); + let _ = graph.insert_tx(intx2.clone()); + let _ = graph.insert_txout(intxout1.0, intxout1.1); + + let mut tx = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![ + TxIn { + previous_output: OutPoint { + txid: intx1.txid(), + vout: 0, + }, + ..Default::default() + }, + TxIn { + previous_output: OutPoint { + txid: intx2.txid(), + vout: 0, + }, + ..Default::default() + }, + TxIn { + previous_output: intxout1.0, + ..Default::default() + }, + ], + output: vec![TxOut { + value: 500, + ..Default::default() + }], + }; + + assert_eq!(graph.calculate_fee(&tx), Some(100)); + + tx.input.remove(2); + + // fee would be negative + assert_eq!(graph.calculate_fee(&tx), Some(-200)); + + // If we have an unknown outpoint, fee should return None. + tx.input.push(TxIn { + previous_output: OutPoint { + txid: h!("unknown_txid"), + vout: 0, + }, + ..Default::default() + }); + assert_eq!(graph.calculate_fee(&tx), None); +} + +#[test] +fn test_calculate_fee_on_coinbase() { + let tx = Transaction { + version: 0x01, + lock_time: PackedLockTime(0), + input: vec![TxIn { + previous_output: OutPoint::null(), + ..Default::default() + }], + output: vec![TxOut::default()], + }; + + let graph = TxGraph::default(); + + assert_eq!(graph.calculate_fee(&tx), Some(0)); +} + +#[test] +fn test_conflicting_descendants() { + let previous_output = OutPoint::new(h!("op"), 2); + + // tx_a spends previous_output + let tx_a = Transaction { + input: vec![TxIn { + previous_output, + ..TxIn::default() + }], + output: vec![TxOut::default()], + ..common::new_tx(0) + }; + + // tx_a2 spends previous_output and conflicts with tx_a + let tx_a2 = Transaction { + input: vec![TxIn { + previous_output, + ..TxIn::default() + }], + output: vec![TxOut::default(), TxOut::default()], + ..common::new_tx(1) + }; + + // tx_b spends tx_a + let tx_b = Transaction { + input: vec![TxIn { + previous_output: OutPoint::new(tx_a.txid(), 0), + ..TxIn::default() + }], + output: vec![TxOut::default()], + ..common::new_tx(2) + }; + + let txid_a = tx_a.txid(); + let txid_b = tx_b.txid(); + + let mut graph = TxGraph::default(); + let _ = graph.insert_tx(tx_a); + let _ = graph.insert_tx(tx_b); + + assert_eq!( + graph + .walk_conflicts(&tx_a2, |depth, txid| Some((depth, txid))) + .collect::>(), + vec![(0_usize, txid_a), (1_usize, txid_b),], + ); +} + +#[test] +fn test_descendants_no_repeat() { + let tx_a = Transaction { + output: vec![TxOut::default(), TxOut::default(), TxOut::default()], + ..common::new_tx(0) + }; + + let txs_b = (0..3) + .map(|vout| Transaction { + input: vec![TxIn { + previous_output: OutPoint::new(tx_a.txid(), vout), + ..TxIn::default() + }], + output: vec![TxOut::default()], + ..common::new_tx(1) + }) + .collect::>(); + + let txs_c = (0..2) + .map(|vout| Transaction { + input: vec![TxIn { + previous_output: OutPoint::new(txs_b[vout as usize].txid(), vout), + ..TxIn::default() + }], + output: vec![TxOut::default()], + ..common::new_tx(2) + }) + .collect::>(); + + let tx_d = Transaction { + input: vec![ + TxIn { + previous_output: OutPoint::new(txs_c[0].txid(), 0), + ..TxIn::default() + }, + TxIn { + previous_output: OutPoint::new(txs_c[1].txid(), 0), + ..TxIn::default() + }, + ], + output: vec![TxOut::default()], + ..common::new_tx(3) + }; + + let tx_e = Transaction { + input: vec![TxIn { + previous_output: OutPoint::new(tx_d.txid(), 0), + ..TxIn::default() + }], + output: vec![TxOut::default()], + ..common::new_tx(4) + }; + + let txs_not_connected = (10..20) + .map(|v| Transaction { + input: vec![TxIn { + previous_output: OutPoint::new(h!("tx_does_not_exist"), v), + ..TxIn::default() + }], + output: vec![TxOut::default()], + ..common::new_tx(v) + }) + .collect::>(); + + let mut graph = TxGraph::default(); + let mut expected_txids = BTreeSet::new(); + + // these are NOT descendants of `tx_a` + for tx in txs_not_connected { + let _ = graph.insert_tx(tx.clone()); + } + + // these are the expected descendants of `tx_a` + for tx in txs_b + .iter() + .chain(&txs_c) + .chain(core::iter::once(&tx_d)) + .chain(core::iter::once(&tx_e)) + { + let _ = graph.insert_tx(tx.clone()); + assert!(expected_txids.insert(tx.txid())); + } + + let descendants = graph + .walk_descendants(tx_a.txid(), |_, txid| Some(txid)) + .collect::>(); + + assert_eq!(descendants.len(), expected_txids.len()); + + for txid in descendants { + assert!(expected_txids.remove(&txid)); + } + assert!(expected_txids.is_empty()); +} diff --git a/crates/electrum/Cargo.toml b/crates/electrum/Cargo.toml new file mode 100644 index 000000000..a7a7ec2b4 --- /dev/null +++ b/crates/electrum/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "bdk_electrum" +version = "0.1.0" +edition = "2021" +homepage = "https://bitcoindevkit.org" +repository = "https://github.com/bitcoindevkit/bdk" +documentation = "https://docs.rs/bdk_electrum" +description = "Fetch data from electrum in the form BDK accepts" +license = "MIT OR Apache-2.0" +readme = "README.md" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bdk_chain = { path = "../chain", version = "0.3.1", features = ["serde", "miniscript"] } +electrum-client = { version = "0.12" } diff --git a/crates/electrum/README.md b/crates/electrum/README.md new file mode 100644 index 000000000..d3ada695e --- /dev/null +++ b/crates/electrum/README.md @@ -0,0 +1,3 @@ +# BDK Electrum + +BDK Electrum client library for updating the keychain tracker. diff --git a/crates/electrum/src/lib.rs b/crates/electrum/src/lib.rs new file mode 100644 index 000000000..288c05c62 --- /dev/null +++ b/crates/electrum/src/lib.rs @@ -0,0 +1,588 @@ +//! This crate is used for updating structures of the [`bdk_chain`] crate with data from electrum. +//! +//! The star of the show is the [`ElectrumExt::scan`] method, which scans for relevant blockchain +//! data (via electrum) and outputs an [`ElectrumUpdate`]. +//! +//! An [`ElectrumUpdate`] only includes `txid`s and no full transactions. The caller is responsible +//! for obtaining full transactions before applying. This can be done with +//! these steps: +//! +//! 1. Determine which full transactions are missing. The method [`missing_full_txs`] of +//! [`ElectrumUpdate`] can be used. +//! +//! 2. Obtaining the full transactions. To do this via electrum, the method +//! [`batch_transaction_get`] can be used. +//! +//! Refer to [`bdk_electrum_example`] for a complete example. +//! +//! [`ElectrumClient::scan`]: ElectrumClient::scan +//! [`missing_full_txs`]: ElectrumUpdate::missing_full_txs +//! [`batch_transaction_get`]: ElectrumApi::batch_transaction_get +//! [`bdk_electrum_example`]: https://github.com/LLFourn/bdk_core_staging/tree/master/bdk_electrum_example + +use std::{ + collections::{BTreeMap, HashMap}, + fmt::Debug, +}; + +pub use bdk_chain; +use bdk_chain::{ + bitcoin::{hashes::hex::FromHex, BlockHash, OutPoint, Script, Transaction, Txid}, + chain_graph::{self, ChainGraph}, + keychain::KeychainScan, + sparse_chain::{self, ChainPosition, SparseChain}, + tx_graph::TxGraph, + BlockId, ConfirmationTime, TxHeight, +}; +pub use electrum_client; +use electrum_client::{Client, ElectrumApi, Error}; + +/// Trait to extend [`electrum_client::Client`] functionality. +/// +/// Refer to [crate-level documentation] for more. +/// +/// [crate-level documentation]: crate +pub trait ElectrumExt { + /// Fetch the latest block height. + fn get_tip(&self) -> Result<(u32, BlockHash), Error>; + + /// Scan the blockchain (via electrum) for the data specified. This returns a [`ElectrumUpdate`] + /// which can be transformed into a [`KeychainScan`] after we find all the missing full + /// transactions. + /// + /// - `local_chain`: the most recent block hashes present locally + /// - `keychain_spks`: keychains that we want to scan transactions for + /// - `txids`: transactions that we want updated [`ChainPosition`]s for + /// - `outpoints`: transactions associated with these outpoints (residing, spending) that we + /// want to included in the update + fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap>, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + stop_gap: usize, + batch_size: usize, + ) -> Result, Error>; + + /// Convenience method to call [`scan`] without requiring a keychain. + /// + /// [`scan`]: ElectrumExt::scan + fn scan_without_keychain( + &self, + local_chain: &BTreeMap, + misc_spks: impl IntoIterator, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + batch_size: usize, + ) -> Result { + let spk_iter = misc_spks + .into_iter() + .enumerate() + .map(|(i, spk)| (i as u32, spk)); + + self.scan( + local_chain, + [((), spk_iter)].into(), + txids, + outpoints, + usize::MAX, + batch_size, + ) + .map(|u| u.chain_update) + } +} + +impl ElectrumExt for Client { + fn get_tip(&self) -> Result<(u32, BlockHash), Error> { + // TODO: unsubscribe when added to the client, or is there a better call to use here? + self.block_headers_subscribe() + .map(|data| (data.height as u32, data.header.block_hash())) + } + + fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap>, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + stop_gap: usize, + batch_size: usize, + ) -> Result, Error> { + let mut request_spks = keychain_spks + .into_iter() + .map(|(k, s)| { + let iter = s.into_iter(); + (k, iter) + }) + .collect::>(); + let mut scanned_spks = BTreeMap::<(K, u32), (Script, bool)>::new(); + + let txids = txids.into_iter().collect::>(); + let outpoints = outpoints.into_iter().collect::>(); + + let update = loop { + let mut update = prepare_update(self, local_chain)?; + + if !request_spks.is_empty() { + if !scanned_spks.is_empty() { + let mut scanned_spk_iter = scanned_spks + .iter() + .map(|(i, (spk, _))| (i.clone(), spk.clone())); + match populate_with_spks::( + self, + &mut update, + &mut scanned_spk_iter, + stop_gap, + batch_size, + ) { + Err(InternalError::Reorg) => continue, + Err(InternalError::ElectrumError(e)) => return Err(e), + Ok(mut spks) => scanned_spks.append(&mut spks), + }; + } + for (keychain, keychain_spks) in &mut request_spks { + match populate_with_spks::( + self, + &mut update, + keychain_spks, + stop_gap, + batch_size, + ) { + Err(InternalError::Reorg) => continue, + Err(InternalError::ElectrumError(e)) => return Err(e), + Ok(spks) => scanned_spks.extend( + spks.into_iter() + .map(|(spk_i, spk)| ((keychain.clone(), spk_i), spk)), + ), + }; + } + } + + match populate_with_txids(self, &mut update, &mut txids.iter().cloned()) { + Err(InternalError::Reorg) => continue, + Err(InternalError::ElectrumError(e)) => return Err(e), + Ok(_) => {} + } + + match populate_with_outpoints(self, &mut update, &mut outpoints.iter().cloned()) { + Err(InternalError::Reorg) => continue, + Err(InternalError::ElectrumError(e)) => return Err(e), + Ok(_txs) => { /* [TODO] cache full txs to reduce bandwidth */ } + } + + // check for reorgs during scan process + let our_tip = update + .latest_checkpoint() + .expect("update must have atleast one checkpoint"); + let server_blockhash = self.block_header(our_tip.height as usize)?.block_hash(); + if our_tip.hash != server_blockhash { + continue; // reorg + } else { + break update; + } + }; + + let last_active_index = request_spks + .into_keys() + .filter_map(|k| { + scanned_spks + .range((k.clone(), u32::MIN)..=(k.clone(), u32::MAX)) + .rev() + .find(|(_, (_, active))| *active) + .map(|((_, i), _)| (k, *i)) + }) + .collect::>(); + + Ok(ElectrumUpdate { + chain_update: update, + last_active_indices: last_active_index, + }) + } +} + +/// The result of [`ElectrumExt::scan`]. +pub struct ElectrumUpdate { + /// The internal [`SparseChain`] update. + pub chain_update: SparseChain

, + /// The last keychain script pubkey indices which had transaction histories. + pub last_active_indices: BTreeMap, +} + +impl Default for ElectrumUpdate { + fn default() -> Self { + Self { + chain_update: Default::default(), + last_active_indices: Default::default(), + } + } +} + +impl AsRef> for ElectrumUpdate { + fn as_ref(&self) -> &SparseChain

{ + &self.chain_update + } +} + +impl ElectrumUpdate { + /// Return a list of missing full transactions that are required to [`inflate_update`]. + /// + /// [`inflate_update`]: bdk_chain::chain_graph::ChainGraph::inflate_update + pub fn missing_full_txs(&self, graph: G) -> Vec<&Txid> + where + G: AsRef, + { + self.chain_update + .txids() + .filter(|(_, txid)| graph.as_ref().get_tx(*txid).is_none()) + .map(|(_, txid)| txid) + .collect() + } + + /// Transform the [`ElectrumUpdate`] into a [`KeychainScan`] which can be applied to a + /// `tracker`. + /// + /// This will fail if there are missing full transactions not provided via `new_txs`. + pub fn into_keychain_scan( + self, + new_txs: Vec, + chain_graph: &CG, + ) -> Result, chain_graph::NewError

> + where + CG: AsRef>, + { + Ok(KeychainScan { + update: chain_graph + .as_ref() + .inflate_update(self.chain_update, new_txs)?, + last_active_indices: self.last_active_indices, + }) + } +} + +impl ElectrumUpdate { + /// Creates [`ElectrumUpdate`] from [`ElectrumUpdate`]. + pub fn into_confirmation_time_update( + self, + client: &electrum_client::Client, + ) -> Result, Error> { + let heights = self + .chain_update + .range_txids_by_height(..TxHeight::Unconfirmed) + .map(|(h, _)| match h { + TxHeight::Confirmed(h) => *h, + _ => unreachable!("already filtered out unconfirmed"), + }) + .collect::>(); + + let height_to_time = heights + .clone() + .into_iter() + .zip( + client + .batch_block_header(heights)? + .into_iter() + .map(|bh| bh.time as u64), + ) + .collect::>(); + + let mut new_update = SparseChain::::from_checkpoints( + self.chain_update.range_checkpoints(..), + ); + + for &(tx_height, txid) in self.chain_update.txids() { + let conf_time = match tx_height { + TxHeight::Confirmed(height) => ConfirmationTime::Confirmed { + height, + time: height_to_time[&height], + }, + TxHeight::Unconfirmed => ConfirmationTime::Unconfirmed, + }; + let _ = new_update.insert_tx(txid, conf_time).expect("must insert"); + } + + Ok(ElectrumUpdate { + chain_update: new_update, + last_active_indices: self.last_active_indices, + }) + } +} + +#[derive(Debug)] +enum InternalError { + ElectrumError(Error), + Reorg, +} + +impl From for InternalError { + fn from(value: electrum_client::Error) -> Self { + Self::ElectrumError(value) + } +} + +fn get_tip(client: &Client) -> Result<(u32, BlockHash), Error> { + // TODO: unsubscribe when added to the client, or is there a better call to use here? + client + .block_headers_subscribe() + .map(|data| (data.height as u32, data.header.block_hash())) +} + +/// Prepare an update sparsechain "template" based on the checkpoints of the `local_chain`. +fn prepare_update( + client: &Client, + local_chain: &BTreeMap, +) -> Result { + let mut update = SparseChain::default(); + + // Find local chain block that is still there so our update can connect to the local chain. + for (&existing_height, &existing_hash) in local_chain.iter().rev() { + // TODO: a batch request may be safer, as a reorg that happens when we are obtaining + // `block_header`s will result in inconsistencies + let current_hash = client.block_header(existing_height as usize)?.block_hash(); + let _ = update + .insert_checkpoint(BlockId { + height: existing_height, + hash: current_hash, + }) + .expect("This never errors because we are working with a fresh chain"); + + if current_hash == existing_hash { + break; + } + } + + // Insert the new tip so new transactions will be accepted into the sparse chain. + let tip = { + let (height, hash) = get_tip(client)?; + BlockId { height, hash } + }; + if let Err(failure) = update.insert_checkpoint(tip) { + match failure { + sparse_chain::InsertCheckpointError::HashNotMatching { .. } => { + // There has been a re-org before we even begin scanning addresses. + // Just recursively call (this should never happen). + return prepare_update(client, local_chain); + } + } + } + + Ok(update) +} + +/// This atrocity is required because electrum thinks height of 0 means "unconfirmed", but there is +/// such thing as a genesis block. +/// +/// We contain an expection for the genesis coinbase txid to always have a chain position of +/// [`TxHeight::Confirmed(0)`]. +fn determine_tx_height(raw_height: i32, tip_height: u32, txid: Txid) -> TxHeight { + if txid + == Txid::from_hex("4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b") + .expect("must deserialize genesis coinbase txid") + { + return TxHeight::Confirmed(0); + } + match raw_height { + h if h <= 0 => { + debug_assert!( + h == 0 || h == -1, + "unexpected height ({}) from electrum server", + h + ); + TxHeight::Unconfirmed + } + h => { + let h = h as u32; + if h > tip_height { + TxHeight::Unconfirmed + } else { + TxHeight::Confirmed(h) + } + } + } +} + +/// Populates the update [`SparseChain`] with related transactions and associated [`ChainPosition`]s +/// of the provided `outpoints` (this is the tx which contains the outpoint and the one spending the +/// outpoint). +/// +/// Unfortunately this is awkward to implement as electrum does not provide such an API. Instead, we +/// will get the tx history of the outpoint's spk, and try to find the containing tx and the +/// spending tx. +fn populate_with_outpoints( + client: &Client, + update: &mut SparseChain, + outpoints: &mut impl Iterator, +) -> Result, InternalError> { + let tip = update + .latest_checkpoint() + .expect("update must atleast have one checkpoint"); + + let mut full_txs = HashMap::new(); + for outpoint in outpoints { + let txid = outpoint.txid; + let tx = client.transaction_get(&txid)?; + debug_assert_eq!(tx.txid(), txid); + let txout = match tx.output.get(outpoint.vout as usize) { + Some(txout) => txout, + None => continue, + }; + + // attempt to find the following transactions (alongside their chain positions), and + // add to our sparsechain `update`: + let mut has_residing = false; // tx in which the outpoint resides + let mut has_spending = false; // tx that spends the outpoint + for res in client.script_get_history(&txout.script_pubkey)? { + if has_residing && has_spending { + break; + } + + if res.tx_hash == txid { + if has_residing { + continue; + } + has_residing = true; + full_txs.insert(res.tx_hash, tx.clone()); + } else { + if has_spending { + continue; + } + let res_tx = match full_txs.get(&res.tx_hash) { + Some(tx) => tx, + None => { + let res_tx = client.transaction_get(&res.tx_hash)?; + full_txs.insert(res.tx_hash, res_tx); + full_txs.get(&res.tx_hash).expect("just inserted") + } + }; + has_spending = res_tx + .input + .iter() + .any(|txin| txin.previous_output == outpoint); + if !has_spending { + continue; + } + }; + + let tx_height = determine_tx_height(res.height, tip.height, res.tx_hash); + + if let Err(failure) = update.insert_tx(res.tx_hash, tx_height) { + match failure { + sparse_chain::InsertTxError::TxTooHigh { .. } => { + unreachable!("we should never encounter this as we ensured height <= tip"); + } + sparse_chain::InsertTxError::TxMovedUnexpectedly { .. } => { + return Err(InternalError::Reorg); + } + } + } + } + } + Ok(full_txs) +} + +/// Populate an update [`SparseChain`] with transactions (and associated block positions) from +/// the given `txids`. +fn populate_with_txids( + client: &Client, + update: &mut SparseChain, + txids: &mut impl Iterator, +) -> Result<(), InternalError> { + let tip = update + .latest_checkpoint() + .expect("update must have atleast one checkpoint"); + for txid in txids { + let tx = match client.transaction_get(&txid) { + Ok(tx) => tx, + Err(electrum_client::Error::Protocol(_)) => continue, + Err(other_err) => return Err(other_err.into()), + }; + + let spk = tx + .output + .get(0) + .map(|txo| &txo.script_pubkey) + .expect("tx must have an output"); + + let tx_height = match client + .script_get_history(spk)? + .into_iter() + .find(|r| r.tx_hash == txid) + { + Some(r) => determine_tx_height(r.height, tip.height, r.tx_hash), + None => continue, + }; + + if let Err(failure) = update.insert_tx(txid, tx_height) { + match failure { + sparse_chain::InsertTxError::TxTooHigh { .. } => { + unreachable!("we should never encounter this as we ensured height <= tip"); + } + sparse_chain::InsertTxError::TxMovedUnexpectedly { .. } => { + return Err(InternalError::Reorg); + } + } + } + } + Ok(()) +} + +/// Populate an update [`SparseChain`] with transactions (and associated block positions) from +/// the transaction history of the provided `spks`. +fn populate_with_spks( + client: &Client, + update: &mut SparseChain, + spks: &mut S, + stop_gap: usize, + batch_size: usize, +) -> Result, InternalError> +where + K: Ord + Clone, + I: Ord + Clone, + S: Iterator, +{ + let tip = update.latest_checkpoint().map_or(0, |cp| cp.height); + let mut unused_spk_count = 0_usize; + let mut scanned_spks = BTreeMap::new(); + + loop { + let spks = (0..batch_size) + .map_while(|_| spks.next()) + .collect::>(); + if spks.is_empty() { + return Ok(scanned_spks); + } + + let spk_histories = client.batch_script_get_history(spks.iter().map(|(_, s)| s))?; + + for ((spk_index, spk), spk_history) in spks.into_iter().zip(spk_histories) { + if spk_history.is_empty() { + scanned_spks.insert(spk_index, (spk, false)); + unused_spk_count += 1; + if unused_spk_count > stop_gap { + return Ok(scanned_spks); + } + continue; + } else { + scanned_spks.insert(spk_index, (spk, true)); + unused_spk_count = 0; + } + + for tx in spk_history { + let tx_height = determine_tx_height(tx.height, tip, tx.tx_hash); + + if let Err(failure) = update.insert_tx(tx.tx_hash, tx_height) { + match failure { + sparse_chain::InsertTxError::TxTooHigh { .. } => { + unreachable!( + "we should never encounter this as we ensured height <= tip" + ); + } + sparse_chain::InsertTxError::TxMovedUnexpectedly { .. } => { + return Err(InternalError::Reorg); + } + } + } + } + } + } +} diff --git a/crates/esplora/Cargo.toml b/crates/esplora/Cargo.toml new file mode 100644 index 000000000..bacb2aad3 --- /dev/null +++ b/crates/esplora/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "bdk_esplora" +version = "0.1.0" +edition = "2021" +homepage = "https://bitcoindevkit.org" +repository = "https://github.com/bitcoindevkit/bdk" +documentation = "https://docs.rs/bdk_esplora" +description = "Fetch data from esplora in the form that accepts" +license = "MIT OR Apache-2.0" +readme = "README.md" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bdk_chain = { path = "../chain", version = "0.3.1", features = ["serde", "miniscript"] } +esplora-client = { version = "0.3", default-features = false } +async-trait = { version = "0.1.66", optional = true } +futures = { version = "0.3.26", optional = true } + +[features] +default = ["async-https", "blocking"] +async = ["async-trait", "futures", "esplora-client/async"] +async-https = ["async", "esplora-client/async-https"] +blocking = ["esplora-client/blocking"] diff --git a/crates/esplora/README.md b/crates/esplora/README.md new file mode 100644 index 000000000..9880115b1 --- /dev/null +++ b/crates/esplora/README.md @@ -0,0 +1,36 @@ +# BDK Esplora + +BDK Esplora extends [`esplora_client`](crate::esplora_client) to update [`bdk_chain`] structures +from an Esplora server. + +## Usage + +There are two versions of the extension trait (blocking and async). + +For blocking-only: +```toml +bdk_esplora = { version = "0.1", features = ["blocking"] } +``` + +For async-only: +```toml +bdk_esplora = { version = "0.1", features = ["async"] } +``` + +For async-only (with https): +```toml +bdk_esplora = { version = "0.1", features = ["async-https"] } +``` + +To use the extension trait: + +```rust,no_run +// for blocking +use bdk_esplora::EsploraExt; +// for async +use bdk_esplora::EsploraAsyncExt; +``` + + + + diff --git a/crates/esplora/src/async_ext.rs b/crates/esplora/src/async_ext.rs new file mode 100644 index 000000000..fe5a82dca --- /dev/null +++ b/crates/esplora/src/async_ext.rs @@ -0,0 +1,309 @@ +use std::collections::BTreeMap; + +use async_trait::async_trait; +use bdk_chain::{ + bitcoin::{BlockHash, OutPoint, Script, Txid}, + chain_graph::ChainGraph, + keychain::KeychainScan, + sparse_chain, BlockId, ConfirmationTime, +}; +use esplora_client::{Error, OutputStatus}; +use futures::stream::{FuturesOrdered, TryStreamExt}; + +use crate::map_confirmation_time; + +#[cfg(feature = "async")] +#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait)] +pub trait EsploraAsyncExt { + /// Scan the blockchain (via esplora) for the data specified and returns a [`KeychainScan`]. + /// + /// - `local_chain`: the most recent block hashes present locally + /// - `keychain_spks`: keychains that we want to scan transactions for + /// - `txids`: transactions that we want updated [`ChainPosition`]s for + /// - `outpoints`: transactions associated with these outpoints (residing, spending) that we + /// want to included in the update + /// + /// The scan for each keychain stops after a gap of `stop_gap` script pubkeys with no associated + /// transactions. `parallel_requests` specifies the max number of HTTP requests to make in + /// parallel. + /// + /// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition + #[allow(clippy::result_large_err)] // FIXME + async fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap< + K, + impl IntoIterator + Send> + Send, + >, + txids: impl IntoIterator + Send> + Send, + outpoints: impl IntoIterator + Send> + Send, + stop_gap: usize, + parallel_requests: usize, + ) -> Result, Error>; + + /// Convenience method to call [`scan`] without requiring a keychain. + /// + /// [`scan`]: EsploraAsyncExt::scan + #[allow(clippy::result_large_err)] // FIXME + async fn scan_without_keychain( + &self, + local_chain: &BTreeMap, + misc_spks: impl IntoIterator + Send> + Send, + txids: impl IntoIterator + Send> + Send, + outpoints: impl IntoIterator + Send> + Send, + parallel_requests: usize, + ) -> Result, Error> { + let wallet_scan = self + .scan( + local_chain, + [( + (), + misc_spks + .into_iter() + .enumerate() + .map(|(i, spk)| (i as u32, spk)), + )] + .into(), + txids, + outpoints, + usize::MAX, + parallel_requests, + ) + .await?; + + Ok(wallet_scan.update) + } +} + +#[cfg(feature = "async")] +#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait)] +impl EsploraAsyncExt for esplora_client::AsyncClient { + #[allow(clippy::result_large_err)] // FIXME + async fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap< + K, + impl IntoIterator + Send> + Send, + >, + txids: impl IntoIterator + Send> + Send, + outpoints: impl IntoIterator + Send> + Send, + stop_gap: usize, + parallel_requests: usize, + ) -> Result, Error> { + let txids = txids.into_iter(); + let outpoints = outpoints.into_iter(); + let parallel_requests = parallel_requests.max(1); + let mut scan = KeychainScan::default(); + let update = &mut scan.update; + let last_active_indices = &mut scan.last_active_indices; + + for (&height, &original_hash) in local_chain.iter().rev() { + let update_block_id = BlockId { + height, + hash: self.get_block_hash(height).await?, + }; + let _ = update + .insert_checkpoint(update_block_id) + .expect("cannot repeat height here"); + if update_block_id.hash == original_hash { + break; + } + } + let tip_at_start = BlockId { + height: self.get_height().await?, + hash: self.get_tip_hash().await?, + }; + if let Err(failure) = update.insert_checkpoint(tip_at_start) { + match failure { + sparse_chain::InsertCheckpointError::HashNotMatching { .. } => { + // there has been a re-org before we started scanning. We haven't consumed any iterators so it's safe to recursively call. + return EsploraAsyncExt::scan( + self, + local_chain, + keychain_spks, + txids, + outpoints, + stop_gap, + parallel_requests, + ) + .await; + } + } + } + + for (keychain, spks) in keychain_spks { + let mut spks = spks.into_iter(); + let mut last_active_index = None; + let mut empty_scripts = 0; + type IndexWithTxs = (u32, Vec); + + loop { + let futures: FuturesOrdered<_> = (0..parallel_requests) + .filter_map(|_| { + let (index, script) = spks.next()?; + let client = self.clone(); + Some(async move { + let mut related_txs = client.scripthash_txs(&script, None).await?; + + let n_confirmed = + related_txs.iter().filter(|tx| tx.status.confirmed).count(); + // esplora pages on 25 confirmed transactions. If there's 25 or more we + // keep requesting to see if there's more. + if n_confirmed >= 25 { + loop { + let new_related_txs = client + .scripthash_txs( + &script, + Some(related_txs.last().unwrap().txid), + ) + .await?; + let n = new_related_txs.len(); + related_txs.extend(new_related_txs); + // we've reached the end + if n < 25 { + break; + } + } + } + + Result::<_, esplora_client::Error>::Ok((index, related_txs)) + }) + }) + .collect(); + + let n_futures = futures.len(); + + let idx_with_tx: Vec = futures.try_collect().await?; + + for (index, related_txs) in idx_with_tx { + if related_txs.is_empty() { + empty_scripts += 1; + } else { + last_active_index = Some(index); + empty_scripts = 0; + } + for tx in related_txs { + let confirmation_time = + map_confirmation_time(&tx.status, tip_at_start.height); + + if let Err(failure) = update.insert_tx(tx.to_tx(), confirmation_time) { + use bdk_chain::{ + chain_graph::InsertTxError, sparse_chain::InsertTxError::*, + }; + match failure { + InsertTxError::Chain(TxTooHigh { .. }) => { + unreachable!("chain position already checked earlier") + } + InsertTxError::Chain(TxMovedUnexpectedly { .. }) + | InsertTxError::UnresolvableConflict(_) => { + /* implies reorg during scan. We deal with that below */ + } + } + } + } + } + + if n_futures == 0 || empty_scripts >= stop_gap { + break; + } + } + + if let Some(last_active_index) = last_active_index { + last_active_indices.insert(keychain, last_active_index); + } + } + + for txid in txids { + let (tx, tx_status) = + match (self.get_tx(&txid).await?, self.get_tx_status(&txid).await?) { + (Some(tx), Some(tx_status)) => (tx, tx_status), + _ => continue, + }; + + let confirmation_time = map_confirmation_time(&tx_status, tip_at_start.height); + + if let Err(failure) = update.insert_tx(tx, confirmation_time) { + use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*}; + match failure { + InsertTxError::Chain(TxTooHigh { .. }) => { + unreachable!("chain position already checked earlier") + } + InsertTxError::Chain(TxMovedUnexpectedly { .. }) + | InsertTxError::UnresolvableConflict(_) => { + /* implies reorg during scan. We deal with that below */ + } + } + } + } + + for op in outpoints { + let mut op_txs = Vec::with_capacity(2); + if let (Some(tx), Some(tx_status)) = ( + self.get_tx(&op.txid).await?, + self.get_tx_status(&op.txid).await?, + ) { + op_txs.push((tx, tx_status)); + if let Some(OutputStatus { + txid: Some(txid), + status: Some(spend_status), + .. + }) = self.get_output_status(&op.txid, op.vout as _).await? + { + if let Some(spend_tx) = self.get_tx(&txid).await? { + op_txs.push((spend_tx, spend_status)); + } + } + } + + for (tx, status) in op_txs { + let confirmation_time = map_confirmation_time(&status, tip_at_start.height); + + if let Err(failure) = update.insert_tx(tx, confirmation_time) { + use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*}; + match failure { + InsertTxError::Chain(TxTooHigh { .. }) => { + unreachable!("chain position already checked earlier") + } + InsertTxError::Chain(TxMovedUnexpectedly { .. }) + | InsertTxError::UnresolvableConflict(_) => { + /* implies reorg during scan. We deal with that below */ + } + } + } + } + } + + let reorg_occurred = { + if let Some(checkpoint) = update.chain().latest_checkpoint() { + self.get_block_hash(checkpoint.height).await? != checkpoint.hash + } else { + false + } + }; + + if reorg_occurred { + // A reorg occurred so lets find out where all the txids we found are in the chain now. + // XXX: collect required because of weird type naming issues + let txids_found = update + .chain() + .txids() + .map(|(_, txid)| *txid) + .collect::>(); + scan.update = EsploraAsyncExt::scan_without_keychain( + self, + local_chain, + [], + txids_found, + [], + parallel_requests, + ) + .await?; + } + + Ok(scan) + } +} diff --git a/crates/esplora/src/blocking_ext.rs b/crates/esplora/src/blocking_ext.rs new file mode 100644 index 000000000..3f461c03b --- /dev/null +++ b/crates/esplora/src/blocking_ext.rs @@ -0,0 +1,290 @@ +use std::collections::BTreeMap; + +use bdk_chain::{ + bitcoin::{BlockHash, OutPoint, Script, Txid}, + chain_graph::ChainGraph, + keychain::KeychainScan, + sparse_chain, BlockId, ConfirmationTime, +}; +use esplora_client::{Error, OutputStatus}; + +use crate::map_confirmation_time; + +/// Trait to extend [`esplora_client::BlockingClient`] functionality. +/// +/// Refer to [crate-level documentation] for more. +/// +/// [crate-level documentation]: crate +pub trait EsploraExt { + /// Scan the blockchain (via esplora) for the data specified and returns a [`KeychainScan`]. + /// + /// - `local_chain`: the most recent block hashes present locally + /// - `keychain_spks`: keychains that we want to scan transactions for + /// - `txids`: transactions that we want updated [`ChainPosition`]s for + /// - `outpoints`: transactions associated with these outpoints (residing, spending) that we + /// want to included in the update + /// + /// The scan for each keychain stops after a gap of `stop_gap` script pubkeys with no associated + /// transactions. `parallel_requests` specifies the max number of HTTP requests to make in + /// parallel. + /// + /// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition + #[allow(clippy::result_large_err)] // FIXME + fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap>, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + stop_gap: usize, + parallel_requests: usize, + ) -> Result, Error>; + + /// Convenience method to call [`scan`] without requiring a keychain. + /// + /// [`scan`]: EsploraExt::scan + #[allow(clippy::result_large_err)] // FIXME + fn scan_without_keychain( + &self, + local_chain: &BTreeMap, + misc_spks: impl IntoIterator, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + parallel_requests: usize, + ) -> Result, Error> { + let wallet_scan = self.scan( + local_chain, + [( + (), + misc_spks + .into_iter() + .enumerate() + .map(|(i, spk)| (i as u32, spk)), + )] + .into(), + txids, + outpoints, + usize::MAX, + parallel_requests, + )?; + + Ok(wallet_scan.update) + } +} + +impl EsploraExt for esplora_client::BlockingClient { + fn scan( + &self, + local_chain: &BTreeMap, + keychain_spks: BTreeMap>, + txids: impl IntoIterator, + outpoints: impl IntoIterator, + stop_gap: usize, + parallel_requests: usize, + ) -> Result, Error> { + let parallel_requests = parallel_requests.max(1); + let mut scan = KeychainScan::default(); + let update = &mut scan.update; + let last_active_indices = &mut scan.last_active_indices; + + for (&height, &original_hash) in local_chain.iter().rev() { + let update_block_id = BlockId { + height, + hash: self.get_block_hash(height)?, + }; + let _ = update + .insert_checkpoint(update_block_id) + .expect("cannot repeat height here"); + if update_block_id.hash == original_hash { + break; + } + } + let tip_at_start = BlockId { + height: self.get_height()?, + hash: self.get_tip_hash()?, + }; + if let Err(failure) = update.insert_checkpoint(tip_at_start) { + match failure { + sparse_chain::InsertCheckpointError::HashNotMatching { .. } => { + // there has been a re-org before we started scanning. We haven't consumed any iterators so it's safe to recursively call. + return EsploraExt::scan( + self, + local_chain, + keychain_spks, + txids, + outpoints, + stop_gap, + parallel_requests, + ); + } + } + } + + for (keychain, spks) in keychain_spks { + let mut spks = spks.into_iter(); + let mut last_active_index = None; + let mut empty_scripts = 0; + type IndexWithTxs = (u32, Vec); + + loop { + let handles = (0..parallel_requests) + .filter_map( + |_| -> Option>> { + let (index, script) = spks.next()?; + let client = self.clone(); + Some(std::thread::spawn(move || { + let mut related_txs = client.scripthash_txs(&script, None)?; + + let n_confirmed = + related_txs.iter().filter(|tx| tx.status.confirmed).count(); + // esplora pages on 25 confirmed transactions. If there's 25 or more we + // keep requesting to see if there's more. + if n_confirmed >= 25 { + loop { + let new_related_txs = client.scripthash_txs( + &script, + Some(related_txs.last().unwrap().txid), + )?; + let n = new_related_txs.len(); + related_txs.extend(new_related_txs); + // we've reached the end + if n < 25 { + break; + } + } + } + + Result::<_, esplora_client::Error>::Ok((index, related_txs)) + })) + }, + ) + .collect::>(); + + let n_handles = handles.len(); + + for handle in handles { + let (index, related_txs) = handle.join().unwrap()?; // TODO: don't unwrap + if related_txs.is_empty() { + empty_scripts += 1; + } else { + last_active_index = Some(index); + empty_scripts = 0; + } + for tx in related_txs { + let confirmation_time = + map_confirmation_time(&tx.status, tip_at_start.height); + + if let Err(failure) = update.insert_tx(tx.to_tx(), confirmation_time) { + use bdk_chain::{ + chain_graph::InsertTxError, sparse_chain::InsertTxError::*, + }; + match failure { + InsertTxError::Chain(TxTooHigh { .. }) => { + unreachable!("chain position already checked earlier") + } + InsertTxError::Chain(TxMovedUnexpectedly { .. }) + | InsertTxError::UnresolvableConflict(_) => { + /* implies reorg during scan. We deal with that below */ + } + } + } + } + } + + if n_handles == 0 || empty_scripts >= stop_gap { + break; + } + } + + if let Some(last_active_index) = last_active_index { + last_active_indices.insert(keychain, last_active_index); + } + } + + for txid in txids.into_iter() { + let (tx, tx_status) = match (self.get_tx(&txid)?, self.get_tx_status(&txid)?) { + (Some(tx), Some(tx_status)) => (tx, tx_status), + _ => continue, + }; + + let confirmation_time = map_confirmation_time(&tx_status, tip_at_start.height); + + if let Err(failure) = update.insert_tx(tx, confirmation_time) { + use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*}; + match failure { + InsertTxError::Chain(TxTooHigh { .. }) => { + unreachable!("chain position already checked earlier") + } + InsertTxError::Chain(TxMovedUnexpectedly { .. }) + | InsertTxError::UnresolvableConflict(_) => { + /* implies reorg during scan. We deal with that below */ + } + } + } + } + + for op in outpoints.into_iter() { + let mut op_txs = Vec::with_capacity(2); + if let (Some(tx), Some(tx_status)) = + (self.get_tx(&op.txid)?, self.get_tx_status(&op.txid)?) + { + op_txs.push((tx, tx_status)); + if let Some(OutputStatus { + txid: Some(txid), + status: Some(spend_status), + .. + }) = self.get_output_status(&op.txid, op.vout as _)? + { + if let Some(spend_tx) = self.get_tx(&txid)? { + op_txs.push((spend_tx, spend_status)); + } + } + } + + for (tx, status) in op_txs { + let confirmation_time = map_confirmation_time(&status, tip_at_start.height); + + if let Err(failure) = update.insert_tx(tx, confirmation_time) { + use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*}; + match failure { + InsertTxError::Chain(TxTooHigh { .. }) => { + unreachable!("chain position already checked earlier") + } + InsertTxError::Chain(TxMovedUnexpectedly { .. }) + | InsertTxError::UnresolvableConflict(_) => { + /* implies reorg during scan. We deal with that below */ + } + } + } + } + } + + let reorg_occurred = { + if let Some(checkpoint) = update.chain().latest_checkpoint() { + self.get_block_hash(checkpoint.height)? != checkpoint.hash + } else { + false + } + }; + + if reorg_occurred { + // A reorg occurred so lets find out where all the txids we found are in the chain now. + // XXX: collect required because of weird type naming issues + let txids_found = update + .chain() + .txids() + .map(|(_, txid)| *txid) + .collect::>(); + scan.update = EsploraExt::scan_without_keychain( + self, + local_chain, + [], + txids_found, + [], + parallel_requests, + )?; + } + + Ok(scan) + } +} diff --git a/crates/esplora/src/lib.rs b/crates/esplora/src/lib.rs new file mode 100644 index 000000000..8398fcb35 --- /dev/null +++ b/crates/esplora/src/lib.rs @@ -0,0 +1,27 @@ +#![doc = include_str!("../README.md")] +use bdk_chain::ConfirmationTime; +use esplora_client::TxStatus; + +pub use esplora_client; + +#[cfg(feature = "blocking")] +mod blocking_ext; +#[cfg(feature = "blocking")] +pub use blocking_ext::*; + +#[cfg(feature = "async")] +mod async_ext; +#[cfg(feature = "async")] +pub use async_ext::*; + +pub(crate) fn map_confirmation_time( + tx_status: &TxStatus, + height_at_start: u32, +) -> ConfirmationTime { + match (tx_status.block_time, tx_status.block_height) { + (Some(time), Some(height)) if height <= height_at_start => { + ConfirmationTime::Confirmed { height, time } + } + _ => ConfirmationTime::Unconfirmed, + } +} diff --git a/crates/file_store/Cargo.toml b/crates/file_store/Cargo.toml new file mode 100644 index 000000000..55b04d4f9 --- /dev/null +++ b/crates/file_store/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "bdk_file_store" +version = "0.0.1" +edition = "2021" +license = "MIT OR Apache-2.0" +repository = "https://github.com/bitcoindevkit/bdk" +documentation = "https://docs.rs/bdk_file_store" +keywords = ["bitcoin", "persist", "persistence", "bdk", "file", "store"] +authors = ["Bitcoin Dev Kit Developers"] +readme = "README.md" + +[dependencies] +bdk_chain = { path = "../chain", version = "0.3.1", features = [ "serde", "miniscript" ] } +bincode = { version = "1" } +serde = { version = "1", features = ["derive"] } + +[dev-dependencies] +tempfile = "3" diff --git a/crates/file_store/README.md b/crates/file_store/README.md new file mode 100644 index 000000000..076142f89 --- /dev/null +++ b/crates/file_store/README.md @@ -0,0 +1,10 @@ +# BDK File Store + +This is a simple append-only flat file implementation of +[`Persist`](`bdk_chain::keychain::persist::Persist`). + +The main structure is [`KeychainStore`](`crate::KeychainStore`), which can be used with [`bdk`]'s +`Wallet` to persist wallet data into a flat file. + +[`bdk`]: https://docs.rs/bdk/latest +[`bdk_chain`]: https://docs.rs/bdk_chain/latest diff --git a/crates/file_store/src/file_store.rs b/crates/file_store/src/file_store.rs new file mode 100644 index 000000000..f9a8056b0 --- /dev/null +++ b/crates/file_store/src/file_store.rs @@ -0,0 +1,404 @@ +//! Module for persisting data on-disk. +//! +//! The star of the show is [`KeychainStore`] which maintains an append-only file of +//! [`KeychainChangeSet`]s which can be used to restore a [`KeychainTracker`]. +use bdk_chain::{ + keychain::{KeychainChangeSet, KeychainTracker}, + sparse_chain, +}; +use bincode::{DefaultOptions, Options}; +use core::marker::PhantomData; +use std::{ + fs::{File, OpenOptions}, + io::{self, Read, Seek, Write}, + path::Path, +}; + +/// BDK File Store magic bytes length. +const MAGIC_BYTES_LEN: usize = 12; + +/// BDK File Store magic bytes. +const MAGIC_BYTES: [u8; MAGIC_BYTES_LEN] = [98, 100, 107, 102, 115, 48, 48, 48, 48, 48, 48, 48]; + +/// Persists an append only list of `KeychainChangeSet` to a single file. +/// [`KeychainChangeSet`] record the changes made to a [`KeychainTracker`]. +#[derive(Debug)] +pub struct KeychainStore { + db_file: File, + changeset_type_params: core::marker::PhantomData<(K, P)>, +} + +fn bincode() -> impl bincode::Options { + DefaultOptions::new().with_varint_encoding() +} + +impl KeychainStore +where + K: Ord + Clone + core::fmt::Debug, + P: sparse_chain::ChainPosition, + KeychainChangeSet: serde::Serialize + serde::de::DeserializeOwned, +{ + /// Creates a new store from a [`File`]. + /// + /// The file must have been opened with read, write permissions. + /// + /// [`File`]: std::fs::File + pub fn new(mut file: File) -> Result { + file.rewind()?; + + let mut magic_bytes = [0_u8; MAGIC_BYTES_LEN]; + file.read_exact(&mut magic_bytes)?; + + if magic_bytes != MAGIC_BYTES { + return Err(FileError::InvalidMagicBytes(magic_bytes)); + } + + Ok(Self { + db_file: file, + changeset_type_params: Default::default(), + }) + } + + /// Creates or loads a a store from `db_path`. If no file exists there it will be created. + pub fn new_from_path>(db_path: D) -> Result { + let already_exists = db_path.as_ref().exists(); + + let mut db_file = OpenOptions::new() + .read(true) + .write(true) + .create(true) + .open(db_path)?; + + if !already_exists { + db_file.write_all(&MAGIC_BYTES)?; + } + + Self::new(db_file) + } + + /// Iterates over the stored changeset from first to last changing the seek position at each + /// iteration. + /// + /// The iterator may fail to read an entry and therefore return an error. However the first time + /// it returns an error will be the last. After doing so the iterator will always yield `None`. + /// + /// **WARNING**: This method changes the write position in the underlying file. You should + /// always iterate over all entries until `None` is returned if you want your next write to go + /// at the end, otherwise you will write over existing enties. + pub fn iter_changesets(&mut self) -> Result>, io::Error> { + self.db_file + .seek(io::SeekFrom::Start(MAGIC_BYTES_LEN as _))?; + + Ok(EntryIter::new(&mut self.db_file)) + } + + /// Loads all the changesets that have been stored as one giant changeset. + /// + /// This function returns a tuple of the aggregate changeset and a result which indicates + /// whether an error occurred while reading or deserializing one of the entries. If so the + /// changeset will consist of all of those it was able to read. + /// + /// You should usually check the error. In many applications it may make sense to do a full + /// wallet scan with a stop gap after getting an error since it is likely that one of the + /// changesets it was unable to read changed the derivation indicies of the tracker. + /// + /// **WARNING**: This method changes the write position of the underlying file. The next + /// changeset will be written over the erroring entry (or the end of the file if none existed). + pub fn aggregate_changeset(&mut self) -> (KeychainChangeSet, Result<(), IterError>) { + let mut changeset = KeychainChangeSet::default(); + let result = (|| { + let iter_changeset = self.iter_changesets()?; + for next_changeset in iter_changeset { + changeset.append(next_changeset?); + } + Ok(()) + })(); + + (changeset, result) + } + + /// Reads and applies all the changesets stored sequentially to tracker, stopping when it fails + /// to read the next one. + /// + /// **WARNING**: This method changes the write position of the underlying file. The next + /// changeset will be written over the erroring entry (or the end of the file if none existed). + pub fn load_into_keychain_tracker( + &mut self, + tracker: &mut KeychainTracker, + ) -> Result<(), IterError> { + for changeset in self.iter_changesets()? { + tracker.apply_changeset(changeset?) + } + Ok(()) + } + + /// Append a new changeset to the file and truncate file to the end of the appended changeset. + /// + /// The truncation is to avoid the possibility of having a valid, but inconsistent changeset + /// directly after the appended changeset. + pub fn append_changeset( + &mut self, + changeset: &KeychainChangeSet, + ) -> Result<(), io::Error> { + if changeset.is_empty() { + return Ok(()); + } + + bincode() + .serialize_into(&mut self.db_file, changeset) + .map_err(|e| match *e { + bincode::ErrorKind::Io(inner) => inner, + unexpected_err => panic!("unexpected bincode error: {}", unexpected_err), + })?; + + // truncate file after this changeset addition + // if this is not done, data after this changeset may represent valid changesets, however + // applying those changesets on top of this one may result in inconsistent state + let pos = self.db_file.stream_position()?; + self.db_file.set_len(pos)?; + + // We want to make sure that derivation indexe changes are written to disk as soon as + // possible so you know about the write failure before you give ou the address in the application. + if !changeset.derivation_indices.is_empty() { + self.db_file.sync_data()?; + } + + Ok(()) + } +} + +/// Error that occurs due to problems encountered with the file. +#[derive(Debug)] +pub enum FileError { + /// IO error, this may mean that the file is too short. + Io(io::Error), + /// Magic bytes do not match expected. + InvalidMagicBytes([u8; MAGIC_BYTES_LEN]), +} + +impl core::fmt::Display for FileError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Io(e) => write!(f, "io error trying to read file: {}", e), + Self::InvalidMagicBytes(b) => write!( + f, + "file has invalid magic bytes: expected={:?} got={:?}", + MAGIC_BYTES, b + ), + } + } +} + +impl From for FileError { + fn from(value: io::Error) -> Self { + Self::Io(value) + } +} + +impl std::error::Error for FileError {} + +/// Error type for [`EntryIter`]. +#[derive(Debug)] +pub enum IterError { + /// Failure to read from file. + Io(io::Error), + /// Failure to decode data from file. + Bincode(bincode::ErrorKind), +} + +impl core::fmt::Display for IterError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + IterError::Io(e) => write!(f, "io error trying to read entry {}", e), + IterError::Bincode(e) => write!(f, "bincode error while reading entry {}", e), + } + } +} + +impl std::error::Error for IterError {} + +/// Iterator over entries in a file store. +/// +/// Reads and returns an entry each time [`next`] is called. If an error occurs while reading the +/// iterator will yield a `Result::Err(_)` instead and then `None` for the next call to `next`. +/// +/// [`next`]: Self::next +pub struct EntryIter<'a, V> { + db_file: &'a mut File, + types: PhantomData, + error_exit: bool, +} + +impl<'a, V> EntryIter<'a, V> { + pub fn new(db_file: &'a mut File) -> Self { + Self { + db_file, + types: PhantomData, + error_exit: false, + } + } +} + +impl<'a, V> Iterator for EntryIter<'a, V> +where + V: serde::de::DeserializeOwned, +{ + type Item = Result; + + fn next(&mut self) -> Option { + let result = (|| { + let pos = self.db_file.stream_position()?; + + match bincode().deserialize_from(&mut self.db_file) { + Ok(changeset) => Ok(Some(changeset)), + Err(e) => { + if let bincode::ErrorKind::Io(inner) = &*e { + if inner.kind() == io::ErrorKind::UnexpectedEof { + let eof = self.db_file.seek(io::SeekFrom::End(0))?; + if pos == eof { + return Ok(None); + } + } + } + + self.db_file.seek(io::SeekFrom::Start(pos))?; + Err(IterError::Bincode(*e)) + } + } + })(); + + let result = result.transpose(); + + if let Some(Err(_)) = &result { + self.error_exit = true; + } + + result + } +} + +impl From for IterError { + fn from(value: io::Error) -> Self { + IterError::Io(value) + } +} + +#[cfg(test)] +mod test { + use super::*; + use bdk_chain::{ + keychain::{DerivationAdditions, KeychainChangeSet}, + TxHeight, + }; + use std::{ + io::{Read, Write}, + vec::Vec, + }; + use tempfile::NamedTempFile; + #[derive( + Debug, + Clone, + Copy, + PartialOrd, + Ord, + PartialEq, + Eq, + Hash, + serde::Serialize, + serde::Deserialize, + )] + enum TestKeychain { + External, + Internal, + } + + impl core::fmt::Display for TestKeychain { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::External => write!(f, "external"), + Self::Internal => write!(f, "internal"), + } + } + } + + #[test] + fn magic_bytes() { + assert_eq!(&MAGIC_BYTES, "bdkfs0000000".as_bytes()); + } + + #[test] + fn new_fails_if_file_is_too_short() { + let mut file = NamedTempFile::new().unwrap(); + file.write_all(&MAGIC_BYTES[..MAGIC_BYTES_LEN - 1]) + .expect("should write"); + + match KeychainStore::::new(file.reopen().unwrap()) { + Err(FileError::Io(e)) => assert_eq!(e.kind(), std::io::ErrorKind::UnexpectedEof), + unexpected => panic!("unexpected result: {:?}", unexpected), + }; + } + + #[test] + fn new_fails_if_magic_bytes_are_invalid() { + let invalid_magic_bytes = "ldkfs0000000"; + + let mut file = NamedTempFile::new().unwrap(); + file.write_all(invalid_magic_bytes.as_bytes()) + .expect("should write"); + + match KeychainStore::::new(file.reopen().unwrap()) { + Err(FileError::InvalidMagicBytes(b)) => { + assert_eq!(b, invalid_magic_bytes.as_bytes()) + } + unexpected => panic!("unexpected result: {:?}", unexpected), + }; + } + + #[test] + fn append_changeset_truncates_invalid_bytes() { + // initial data to write to file (magic bytes + invalid data) + let mut data = [255_u8; 2000]; + data[..MAGIC_BYTES_LEN].copy_from_slice(&MAGIC_BYTES); + + let changeset = KeychainChangeSet { + derivation_indices: DerivationAdditions( + vec![(TestKeychain::External, 42)].into_iter().collect(), + ), + chain_graph: Default::default(), + }; + + let mut file = NamedTempFile::new().unwrap(); + file.write_all(&data).expect("should write"); + + let mut store = KeychainStore::::new(file.reopen().unwrap()) + .expect("should open"); + match store.iter_changesets().expect("seek should succeed").next() { + Some(Err(IterError::Bincode(_))) => {} + unexpected_res => panic!("unexpected result: {:?}", unexpected_res), + } + + store.append_changeset(&changeset).expect("should append"); + + drop(store); + + let got_bytes = { + let mut buf = Vec::new(); + file.reopen() + .unwrap() + .read_to_end(&mut buf) + .expect("should read"); + buf + }; + + let expected_bytes = { + let mut buf = MAGIC_BYTES.to_vec(); + DefaultOptions::new() + .with_varint_encoding() + .serialize_into(&mut buf, &changeset) + .expect("should encode"); + buf + }; + + assert_eq!(got_bytes, expected_bytes); + } +} diff --git a/crates/file_store/src/lib.rs b/crates/file_store/src/lib.rs new file mode 100644 index 000000000..e33474194 --- /dev/null +++ b/crates/file_store/src/lib.rs @@ -0,0 +1,32 @@ +#![doc = include_str!("../README.md")] +mod file_store; +use bdk_chain::{ + keychain::{KeychainChangeSet, KeychainTracker, PersistBackend}, + sparse_chain::ChainPosition, +}; +pub use file_store::*; + +impl PersistBackend for KeychainStore +where + K: Ord + Clone + core::fmt::Debug, + P: ChainPosition, + KeychainChangeSet: serde::Serialize + serde::de::DeserializeOwned, +{ + type WriteError = std::io::Error; + + type LoadError = IterError; + + fn append_changeset( + &mut self, + changeset: &KeychainChangeSet, + ) -> Result<(), Self::WriteError> { + KeychainStore::append_changeset(self, changeset) + } + + fn load_into_keychain_tracker( + &mut self, + tracker: &mut KeychainTracker, + ) -> Result<(), Self::LoadError> { + KeychainStore::load_into_keychain_tracker(self, tracker) + } +} diff --git a/crates/file_store/tests/test_file_store.rs b/crates/file_store/tests/test_file_store.rs new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/crates/file_store/tests/test_file_store.rs @@ -0,0 +1 @@ + diff --git a/example-crates/keychain_tracker_electrum/.gitignore b/example-crates/keychain_tracker_electrum/.gitignore new file mode 100644 index 000000000..ea8c4bf7f --- /dev/null +++ b/example-crates/keychain_tracker_electrum/.gitignore @@ -0,0 +1 @@ +/target diff --git a/example-crates/keychain_tracker_electrum/Cargo.toml b/example-crates/keychain_tracker_electrum/Cargo.toml new file mode 100644 index 000000000..e2cbf6aff --- /dev/null +++ b/example-crates/keychain_tracker_electrum/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "keychain_tracker_electrum_example" +version = "0.1.0" +edition = "2021" + +[dependencies] +bdk_chain = { path = "../../crates/chain", version = "0.3.1", features = ["serde"] } +bdk_electrum = { path = "../../crates/electrum" } +keychain_tracker_example_cli = { path = "../keychain_tracker_example_cli"} diff --git a/example-crates/keychain_tracker_electrum/README.md b/example-crates/keychain_tracker_electrum/README.md new file mode 100644 index 000000000..b8bdea219 --- /dev/null +++ b/example-crates/keychain_tracker_electrum/README.md @@ -0,0 +1,6 @@ +# Keychain Tracker with electrum + +This example shows how you use the `KeychainTracker` from `bdk_chain` to create a simple command +line wallet. + + diff --git a/example-crates/keychain_tracker_electrum/src/main.rs b/example-crates/keychain_tracker_electrum/src/main.rs new file mode 100644 index 000000000..70c3441eb --- /dev/null +++ b/example-crates/keychain_tracker_electrum/src/main.rs @@ -0,0 +1,245 @@ +use bdk_chain::bitcoin::{Address, OutPoint, Txid}; +use bdk_electrum::bdk_chain::{self, bitcoin::Network, TxHeight}; +use bdk_electrum::{ + electrum_client::{self, ElectrumApi}, + ElectrumExt, ElectrumUpdate, +}; +use keychain_tracker_example_cli::{ + self as cli, + anyhow::{self, Context}, + clap::{self, Parser, Subcommand}, +}; +use std::{collections::BTreeMap, fmt::Debug, io, io::Write}; + +#[derive(Subcommand, Debug, Clone)] +enum ElectrumCommands { + /// Scans the addresses in the wallet using esplora API. + Scan { + /// When a gap this large has been found for a keychain it will stop. + #[clap(long, default_value = "5")] + stop_gap: usize, + #[clap(flatten)] + scan_options: ScanOptions, + }, + /// Scans particular addresses using esplora API + Sync { + /// Scan all the unused addresses + #[clap(long)] + unused_spks: bool, + /// Scan every address that you have derived + #[clap(long)] + all_spks: bool, + /// Scan unspent outpoints for spends or changes to confirmation status of residing tx + #[clap(long)] + utxos: bool, + /// Scan unconfirmed transactions for updates + #[clap(long)] + unconfirmed: bool, + #[clap(flatten)] + scan_options: ScanOptions, + }, +} + +#[derive(Parser, Debug, Clone, PartialEq)] +pub struct ScanOptions { + /// Set batch size for each script_history call to electrum client + #[clap(long, default_value = "25")] + pub batch_size: usize, +} + +fn main() -> anyhow::Result<()> { + let (args, keymap, tracker, db) = cli::init::()?; + + let electrum_url = match args.network { + Network::Bitcoin => "ssl://electrum.blockstream.info:50002", + Network::Testnet => "ssl://electrum.blockstream.info:60002", + Network::Regtest => "tcp://localhost:60401", + Network::Signet => "tcp://signet-electrumx.wakiyamap.dev:50001", + }; + let config = electrum_client::Config::builder() + .validate_domain(matches!(args.network, Network::Bitcoin)) + .build(); + + let client = electrum_client::Client::from_config(electrum_url, config)?; + + let electrum_cmd = match args.command.clone() { + cli::Commands::ChainSpecific(electrum_cmd) => electrum_cmd, + general_command => { + return cli::handle_commands( + general_command, + |transaction| { + let _txid = client.transaction_broadcast(transaction)?; + Ok(()) + }, + &tracker, + &db, + args.network, + &keymap, + ) + } + }; + + let response = match electrum_cmd { + ElectrumCommands::Scan { + stop_gap, + scan_options: scan_option, + } => { + let (spk_iterators, local_chain) = { + // Get a short lock on the tracker to get the spks iterators + // and local chain state + let tracker = &*tracker.lock().unwrap(); + let spk_iterators = tracker + .txout_index + .spks_of_all_keychains() + .into_iter() + .map(|(keychain, iter)| { + let mut first = true; + let spk_iter = iter.inspect(move |(i, _)| { + if first { + eprint!("\nscanning {}: ", keychain); + first = false; + } + + eprint!("{} ", i); + let _ = io::stdout().flush(); + }); + (keychain, spk_iter) + }) + .collect::>(); + let local_chain = tracker.chain().checkpoints().clone(); + (spk_iterators, local_chain) + }; + + // we scan the spks **without** a lock on the tracker + client.scan( + &local_chain, + spk_iterators, + core::iter::empty(), + core::iter::empty(), + stop_gap, + scan_option.batch_size, + )? + } + ElectrumCommands::Sync { + mut unused_spks, + mut utxos, + mut unconfirmed, + all_spks, + scan_options, + } => { + // Get a short lock on the tracker to get the spks we're interested in + let tracker = tracker.lock().unwrap(); + + if !(all_spks || unused_spks || utxos || unconfirmed) { + unused_spks = true; + unconfirmed = true; + utxos = true; + } else if all_spks { + unused_spks = false; + } + + let mut spks: Box> = + Box::new(core::iter::empty()); + if all_spks { + let all_spks = tracker + .txout_index + .all_spks() + .iter() + .map(|(k, v)| (*k, v.clone())) + .collect::>(); + spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| { + eprintln!("scanning {:?}", index); + script + }))); + } + if unused_spks { + let unused_spks = tracker + .txout_index + .unused_spks(..) + .map(|(k, v)| (*k, v.clone())) + .collect::>(); + spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| { + eprintln!( + "Checking if address {} {:?} has been used", + Address::from_script(&script, args.network).unwrap(), + index + ); + + script + }))); + } + + let mut outpoints: Box> = Box::new(core::iter::empty()); + + if utxos { + let utxos = tracker + .full_utxos() + .map(|(_, utxo)| utxo) + .collect::>(); + outpoints = Box::new( + utxos + .into_iter() + .inspect(|utxo| { + eprintln!( + "Checking if outpoint {} (value: {}) has been spent", + utxo.outpoint, utxo.txout.value + ); + }) + .map(|utxo| utxo.outpoint), + ); + }; + + let mut txids: Box> = Box::new(core::iter::empty()); + + if unconfirmed { + let unconfirmed_txids = tracker + .chain() + .range_txids_by_height(TxHeight::Unconfirmed..) + .map(|(_, txid)| *txid) + .collect::>(); + + txids = Box::new(unconfirmed_txids.into_iter().inspect(|txid| { + eprintln!("Checking if {} is confirmed yet", txid); + })); + } + + let local_chain = tracker.chain().checkpoints().clone(); + // drop lock on tracker + drop(tracker); + + // we scan the spks **without** a lock on the tracker + ElectrumUpdate { + chain_update: client + .scan_without_keychain( + &local_chain, + spks, + txids, + outpoints, + scan_options.batch_size, + ) + .context("scanning the blockchain")?, + ..Default::default() + } + } + }; + + let missing_txids = response.missing_full_txs(&*tracker.lock().unwrap()); + + // fetch the missing full transactions **without** a lock on the tracker + let new_txs = client + .batch_transaction_get(missing_txids) + .context("fetching full transactions")?; + + { + // Get a final short lock to apply the changes + let mut tracker = tracker.lock().unwrap(); + let changeset = { + let scan = response.into_keychain_scan(new_txs, &*tracker)?; + tracker.determine_changeset(&scan)? + }; + db.lock().unwrap().append_changeset(&changeset)?; + tracker.apply_changeset(changeset); + }; + + Ok(()) +} diff --git a/example-crates/keychain_tracker_esplora/.gitignore b/example-crates/keychain_tracker_esplora/.gitignore new file mode 100644 index 000000000..8359723ac --- /dev/null +++ b/example-crates/keychain_tracker_esplora/.gitignore @@ -0,0 +1,3 @@ +/target +Cargo.lock +.bdk_example_db diff --git a/example-crates/keychain_tracker_esplora/Cargo.toml b/example-crates/keychain_tracker_esplora/Cargo.toml new file mode 100644 index 000000000..20a703180 --- /dev/null +++ b/example-crates/keychain_tracker_esplora/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "keychain_tracker_esplora_example" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bdk_chain = { path = "../../crates/chain", version = "0.3.1", features = ["serde", "miniscript"] } +bdk_esplora = { path = "../../crates/esplora" } +keychain_tracker_example_cli = { path = "../keychain_tracker_example_cli" } diff --git a/example-crates/keychain_tracker_esplora/src/main.rs b/example-crates/keychain_tracker_esplora/src/main.rs new file mode 100644 index 000000000..88e97890b --- /dev/null +++ b/example-crates/keychain_tracker_esplora/src/main.rs @@ -0,0 +1,241 @@ +use bdk_chain::bitcoin::{Address, OutPoint, Txid}; +use bdk_chain::{bitcoin::Network, TxHeight}; +use bdk_esplora::esplora_client; +use bdk_esplora::EsploraExt; + +use std::io::{self, Write}; + +use keychain_tracker_example_cli::{ + self as cli, + anyhow::{self, Context}, + clap::{self, Parser, Subcommand}, +}; + +#[derive(Subcommand, Debug, Clone)] +enum EsploraCommands { + /// Scans the addresses in the wallet using esplora API. + Scan { + /// When a gap this large has been found for a keychain it will stop. + #[clap(long, default_value = "5")] + stop_gap: usize, + + #[clap(flatten)] + scan_options: ScanOptions, + }, + /// Scans particular addresses using esplora API + Sync { + /// Scan all the unused addresses + #[clap(long)] + unused_spks: bool, + /// Scan every address that you have derived + #[clap(long)] + all_spks: bool, + /// Scan unspent outpoints for spends or changes to confirmation status of residing tx + #[clap(long)] + utxos: bool, + /// Scan unconfirmed transactions for updates + #[clap(long)] + unconfirmed: bool, + + #[clap(flatten)] + scan_options: ScanOptions, + }, +} + +#[derive(Parser, Debug, Clone, PartialEq)] +pub struct ScanOptions { + #[clap(long, default_value = "5")] + pub parallel_requests: usize, +} + +fn main() -> anyhow::Result<()> { + let (args, keymap, keychain_tracker, db) = cli::init::()?; + let esplora_url = match args.network { + Network::Bitcoin => "https://mempool.space/api", + Network::Testnet => "https://mempool.space/testnet/api", + Network::Regtest => "http://localhost:3002", + Network::Signet => "https://mempool.space/signet/api", + }; + + let client = esplora_client::Builder::new(esplora_url).build_blocking()?; + + let esplora_cmd = match args.command { + cli::Commands::ChainSpecific(esplora_cmd) => esplora_cmd, + general_command => { + return cli::handle_commands( + general_command, + |transaction| Ok(client.broadcast(transaction)?), + &keychain_tracker, + &db, + args.network, + &keymap, + ) + } + }; + + match esplora_cmd { + EsploraCommands::Scan { + stop_gap, + scan_options, + } => { + let (spk_iterators, local_chain) = { + // Get a short lock on the tracker to get the spks iterators + // and local chain state + let tracker = &*keychain_tracker.lock().unwrap(); + let spk_iterators = tracker + .txout_index + .spks_of_all_keychains() + .into_iter() + .map(|(keychain, iter)| { + let mut first = true; + ( + keychain, + iter.inspect(move |(i, _)| { + if first { + eprint!("\nscanning {}: ", keychain); + first = false; + } + + eprint!("{} ", i); + let _ = io::stdout().flush(); + }), + ) + }) + .collect(); + + let local_chain = tracker.chain().checkpoints().clone(); + (spk_iterators, local_chain) + }; + + // we scan the iterators **without** a lock on the tracker + let wallet_scan = client + .scan( + &local_chain, + spk_iterators, + core::iter::empty(), + core::iter::empty(), + stop_gap, + scan_options.parallel_requests, + ) + .context("scanning the blockchain")?; + eprintln!(); + + { + // we take a short lock to apply results to tracker and db + let tracker = &mut *keychain_tracker.lock().unwrap(); + let db = &mut *db.lock().unwrap(); + let changeset = tracker.apply_update(wallet_scan)?; + db.append_changeset(&changeset)?; + } + } + EsploraCommands::Sync { + mut unused_spks, + mut utxos, + mut unconfirmed, + all_spks, + scan_options, + } => { + // Get a short lock on the tracker to get the spks we're interested in + let tracker = keychain_tracker.lock().unwrap(); + + if !(all_spks || unused_spks || utxos || unconfirmed) { + unused_spks = true; + unconfirmed = true; + utxos = true; + } else if all_spks { + unused_spks = false; + } + + let mut spks: Box> = + Box::new(core::iter::empty()); + if all_spks { + let all_spks = tracker + .txout_index + .all_spks() + .iter() + .map(|(k, v)| (*k, v.clone())) + .collect::>(); + spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| { + eprintln!("scanning {:?}", index); + script + }))); + } + if unused_spks { + let unused_spks = tracker + .txout_index + .unused_spks(..) + .map(|(k, v)| (*k, v.clone())) + .collect::>(); + spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| { + eprintln!( + "Checking if address {} {:?} has been used", + Address::from_script(&script, args.network).unwrap(), + index + ); + + script + }))); + } + + let mut outpoints: Box> = Box::new(core::iter::empty()); + + if utxos { + let utxos = tracker + .full_utxos() + .map(|(_, utxo)| utxo) + .collect::>(); + outpoints = Box::new( + utxos + .into_iter() + .inspect(|utxo| { + eprintln!( + "Checking if outpoint {} (value: {}) has been spent", + utxo.outpoint, utxo.txout.value + ); + }) + .map(|utxo| utxo.outpoint), + ); + }; + + let mut txids: Box> = Box::new(core::iter::empty()); + + if unconfirmed { + let unconfirmed_txids = tracker + .chain() + .range_txids_by_height(TxHeight::Unconfirmed..) + .map(|(_, txid)| *txid) + .collect::>(); + + txids = Box::new(unconfirmed_txids.into_iter().inspect(|txid| { + eprintln!("Checking if {} is confirmed yet", txid); + })); + } + + let local_chain = tracker.chain().checkpoints().clone(); + + // drop lock on tracker + drop(tracker); + + // we scan the desired spks **without** a lock on the tracker + let scan = client + .scan_without_keychain( + &local_chain, + spks, + txids, + outpoints, + scan_options.parallel_requests, + ) + .context("scanning the blockchain")?; + + { + // we take a short lock to apply the results to the tracker and db + let tracker = &mut *keychain_tracker.lock().unwrap(); + let changeset = tracker.apply_update(scan.into())?; + let db = &mut *db.lock().unwrap(); + db.append_changeset(&changeset)?; + } + } + } + + Ok(()) +} diff --git a/example-crates/keychain_tracker_example_cli/.gitignore b/example-crates/keychain_tracker_example_cli/.gitignore new file mode 100644 index 000000000..ea8c4bf7f --- /dev/null +++ b/example-crates/keychain_tracker_example_cli/.gitignore @@ -0,0 +1 @@ +/target diff --git a/example-crates/keychain_tracker_example_cli/Cargo.toml b/example-crates/keychain_tracker_example_cli/Cargo.toml new file mode 100644 index 000000000..72009b0c5 --- /dev/null +++ b/example-crates/keychain_tracker_example_cli/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "keychain_tracker_example_cli" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[dependencies] +bdk_chain = { path = "../../crates/chain", version = "0.3.1", features = ["serde", "miniscript"]} +bdk_file_store = { path = "../../crates/file_store" } +bdk_tmp_plan = { path = "../../nursery/tmp_plan" } +bdk_coin_select = { path = "../../nursery/coin_select" } + +clap = { version = "3.2.23", features = ["derive", "env"] } +anyhow = "1" +serde = { version = "1", features = ["derive"] } +serde_json = { version = "^1.0" } diff --git a/example-crates/keychain_tracker_example_cli/README.md b/example-crates/keychain_tracker_example_cli/README.md new file mode 100644 index 000000000..1d9370d1a --- /dev/null +++ b/example-crates/keychain_tracker_example_cli/README.md @@ -0,0 +1 @@ +Provides common command line processing logic between examples using the `KeychainTracker` diff --git a/example-crates/keychain_tracker_example_cli/src/lib.rs b/example-crates/keychain_tracker_example_cli/src/lib.rs new file mode 100644 index 000000000..b18016812 --- /dev/null +++ b/example-crates/keychain_tracker_example_cli/src/lib.rs @@ -0,0 +1,692 @@ +pub extern crate anyhow; +use anyhow::{anyhow, Context, Result}; +use bdk_chain::{ + bitcoin::{ + secp256k1::Secp256k1, + util::sighash::{Prevouts, SighashCache}, + Address, LockTime, Network, Sequence, Transaction, TxIn, TxOut, + }, + chain_graph::InsertTxError, + keychain::{DerivationAdditions, KeychainChangeSet, KeychainTracker}, + miniscript::{ + descriptor::{DescriptorSecretKey, KeyMap}, + Descriptor, DescriptorPublicKey, + }, + sparse_chain::{self, ChainPosition}, + DescriptorExt, FullTxOut, +}; +use bdk_coin_select::{coin_select_bnb, CoinSelector, CoinSelectorOpt, WeightedValue}; +use bdk_file_store::KeychainStore; +use clap::{Parser, Subcommand}; +use std::{ + cmp::Reverse, collections::HashMap, fmt::Debug, path::PathBuf, sync::Mutex, time::Duration, +}; + +pub use bdk_file_store; +pub use clap; + +#[derive(Parser)] +#[clap(author, version, about, long_about = None)] +#[clap(propagate_version = true)] +pub struct Args { + #[clap(env = "DESCRIPTOR")] + pub descriptor: String, + #[clap(env = "CHANGE_DESCRIPTOR")] + pub change_descriptor: Option, + + #[clap(env = "BITCOIN_NETWORK", long, default_value = "signet")] + pub network: Network, + + #[clap(env = "BDK_DB_PATH", long, default_value = ".bdk_example_db")] + pub db_path: PathBuf, + + #[clap(env = "BDK_CP_LIMIT", long, default_value = "20")] + pub cp_limit: usize, + + #[clap(subcommand)] + pub command: Commands, +} + +#[derive(Subcommand, Debug, Clone)] +pub enum Commands { + #[clap(flatten)] + ChainSpecific(C), + /// Address generation and inspection + Address { + #[clap(subcommand)] + addr_cmd: AddressCmd, + }, + /// Get the wallet balance + Balance, + /// TxOut related commands + #[clap(name = "txout")] + TxOut { + #[clap(subcommand)] + txout_cmd: TxOutCmd, + }, + /// Send coins to an address + Send { + value: u64, + address: Address, + #[clap(short, default_value = "largest-first")] + coin_select: CoinSelectionAlgo, + }, +} + +#[derive(Clone, Debug)] +pub enum CoinSelectionAlgo { + LargestFirst, + SmallestFirst, + OldestFirst, + NewestFirst, + BranchAndBound, +} + +impl Default for CoinSelectionAlgo { + fn default() -> Self { + Self::LargestFirst + } +} + +impl core::str::FromStr for CoinSelectionAlgo { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + use CoinSelectionAlgo::*; + Ok(match s { + "largest-first" => LargestFirst, + "smallest-first" => SmallestFirst, + "oldest-first" => OldestFirst, + "newest-first" => NewestFirst, + "bnb" => BranchAndBound, + unknown => return Err(anyhow!("unknown coin selection algorithm '{}'", unknown)), + }) + } +} + +impl core::fmt::Display for CoinSelectionAlgo { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use CoinSelectionAlgo::*; + write!( + f, + "{}", + match self { + LargestFirst => "largest-first", + SmallestFirst => "smallest-first", + OldestFirst => "oldest-first", + NewestFirst => "newest-first", + BranchAndBound => "bnb", + } + ) + } +} + +#[derive(Subcommand, Debug, Clone)] +pub enum AddressCmd { + /// Get the next unused address + Next, + /// Get a new address regardless if the existing ones haven't been used + New, + /// List all addresses + List { + #[clap(long)] + change: bool, + }, + Index, +} + +#[derive(Subcommand, Debug, Clone)] +pub enum TxOutCmd { + List { + /// Return only spent outputs + #[clap(short, long)] + spent: bool, + /// Return only unspent outputs + #[clap(short, long)] + unspent: bool, + /// Return only confirmed outputs + #[clap(long)] + confirmed: bool, + /// Return only unconfirmed outputs + #[clap(long)] + unconfirmed: bool, + }, +} + +#[derive( + Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq, serde::Deserialize, serde::Serialize, +)] +pub enum Keychain { + External, + Internal, +} + +impl core::fmt::Display for Keychain { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Keychain::External => write!(f, "external"), + Keychain::Internal => write!(f, "internal"), + } + } +} + +/// A structure defining output of a AddressCmd execution. +#[derive(serde::Serialize, serde::Deserialize)] +pub struct AddrsOutput { + keychain: String, + index: u32, + addrs: Address, + used: bool, +} + +pub fn run_address_cmd

( + tracker: &Mutex>, + db: &Mutex>, + addr_cmd: AddressCmd, + network: Network, +) -> Result<()> +where + P: bdk_chain::sparse_chain::ChainPosition, + KeychainChangeSet: serde::Serialize + serde::de::DeserializeOwned, +{ + let mut tracker = tracker.lock().unwrap(); + let txout_index = &mut tracker.txout_index; + + let addr_cmmd_output = match addr_cmd { + AddressCmd::Next => Some(txout_index.next_unused_spk(&Keychain::External)), + AddressCmd::New => Some(txout_index.reveal_next_spk(&Keychain::External)), + _ => None, + }; + + if let Some(((index, spk), additions)) = addr_cmmd_output { + let mut db = db.lock().unwrap(); + // update database since we're about to give out a new address + db.append_changeset(&additions.into())?; + + let spk = spk.clone(); + let address = + Address::from_script(&spk, network).expect("should always be able to derive address"); + eprintln!("This is the address at index {}", index); + println!("{}", address); + } + + match addr_cmd { + AddressCmd::Next | AddressCmd::New => { + /* covered */ + Ok(()) + } + AddressCmd::Index => { + for (keychain, derivation_index) in txout_index.last_revealed_indices() { + println!("{:?}: {}", keychain, derivation_index); + } + Ok(()) + } + AddressCmd::List { change } => { + let target_keychain = match change { + true => Keychain::Internal, + false => Keychain::External, + }; + for (index, spk) in txout_index.revealed_spks_of_keychain(&target_keychain) { + let address = Address::from_script(spk, network) + .expect("should always be able to derive address"); + println!( + "{:?} {} used:{}", + index, + address, + txout_index.is_used(&(target_keychain, index)) + ); + } + Ok(()) + } + } +} + +pub fn run_balance_cmd(tracker: &Mutex>) { + let tracker = tracker.lock().unwrap(); + let (confirmed, unconfirmed) = + tracker + .full_utxos() + .fold((0, 0), |(confirmed, unconfirmed), (_, utxo)| { + if utxo.chain_position.height().is_confirmed() { + (confirmed + utxo.txout.value, unconfirmed) + } else { + (confirmed, unconfirmed + utxo.txout.value) + } + }); + + println!("confirmed: {}", confirmed); + println!("unconfirmed: {}", unconfirmed); +} + +pub fn run_txo_cmd( + txout_cmd: TxOutCmd, + tracker: &Mutex>, + network: Network, +) { + match txout_cmd { + TxOutCmd::List { + unspent, + spent, + confirmed, + unconfirmed, + } => { + let tracker = tracker.lock().unwrap(); + #[allow(clippy::type_complexity)] // FIXME + let txouts: Box)>> = match (unspent, spent) + { + (true, false) => Box::new(tracker.full_utxos()), + (false, true) => Box::new( + tracker + .full_txouts() + .filter(|(_, txout)| txout.spent_by.is_some()), + ), + _ => Box::new(tracker.full_txouts()), + }; + + #[allow(clippy::type_complexity)] // FIXME + let txouts: Box)>> = + match (confirmed, unconfirmed) { + (true, false) => Box::new( + txouts.filter(|(_, txout)| txout.chain_position.height().is_confirmed()), + ), + (false, true) => Box::new( + txouts.filter(|(_, txout)| !txout.chain_position.height().is_confirmed()), + ), + _ => txouts, + }; + + for (spk_index, full_txout) in txouts { + let address = + Address::from_script(&full_txout.txout.script_pubkey, network).unwrap(); + + println!( + "{:?} {} {} {} spent:{:?}", + spk_index, + full_txout.txout.value, + full_txout.outpoint, + address, + full_txout.spent_by + ) + } + } + } +} + +#[allow(clippy::type_complexity)] // FIXME +pub fn create_tx( + value: u64, + address: Address, + coin_select: CoinSelectionAlgo, + keychain_tracker: &mut KeychainTracker, + keymap: &HashMap, +) -> Result<( + Transaction, + Option<(DerivationAdditions, (Keychain, u32))>, +)> { + let mut additions = DerivationAdditions::default(); + + let assets = bdk_tmp_plan::Assets { + keys: keymap.iter().map(|(pk, _)| pk.clone()).collect(), + ..Default::default() + }; + + // TODO use planning module + let mut candidates = planned_utxos(keychain_tracker, &assets).collect::>(); + + // apply coin selection algorithm + match coin_select { + CoinSelectionAlgo::LargestFirst => { + candidates.sort_by_key(|(_, utxo)| Reverse(utxo.txout.value)) + } + CoinSelectionAlgo::SmallestFirst => candidates.sort_by_key(|(_, utxo)| utxo.txout.value), + CoinSelectionAlgo::OldestFirst => { + candidates.sort_by_key(|(_, utxo)| utxo.chain_position.clone()) + } + CoinSelectionAlgo::NewestFirst => { + candidates.sort_by_key(|(_, utxo)| Reverse(utxo.chain_position.clone())) + } + CoinSelectionAlgo::BranchAndBound => {} + } + + // turn the txos we chose into a weight and value + let wv_candidates = candidates + .iter() + .map(|(plan, utxo)| { + WeightedValue::new( + utxo.txout.value, + plan.expected_weight() as _, + plan.witness_version().is_some(), + ) + }) + .collect(); + + let mut outputs = vec![TxOut { + value, + script_pubkey: address.script_pubkey(), + }]; + + let internal_keychain = if keychain_tracker + .txout_index + .keychains() + .get(&Keychain::Internal) + .is_some() + { + Keychain::Internal + } else { + Keychain::External + }; + + let ((change_index, change_script), change_additions) = keychain_tracker + .txout_index + .next_unused_spk(&internal_keychain); + additions.append(change_additions); + + // Clone to drop the immutable reference. + let change_script = change_script.clone(); + + let change_plan = bdk_tmp_plan::plan_satisfaction( + &keychain_tracker + .txout_index + .keychains() + .get(&internal_keychain) + .expect("must exist") + .at_derivation_index(change_index), + &assets, + ) + .expect("failed to obtain change plan"); + + let mut change_output = TxOut { + value: 0, + script_pubkey: change_script, + }; + + let cs_opts = CoinSelectorOpt { + target_feerate: 0.5, + min_drain_value: keychain_tracker + .txout_index + .keychains() + .get(&internal_keychain) + .expect("must exist") + .dust_value(), + ..CoinSelectorOpt::fund_outputs( + &outputs, + &change_output, + change_plan.expected_weight() as u32, + ) + }; + + // TODO: How can we make it easy to shuffle in order of inputs and outputs here? + // apply coin selection by saying we need to fund these outputs + let mut coin_selector = CoinSelector::new(&wv_candidates, &cs_opts); + + // just select coins in the order provided until we have enough + // only use first result (least waste) + let selection = match coin_select { + CoinSelectionAlgo::BranchAndBound => { + coin_select_bnb(Duration::from_secs(10), coin_selector.clone()) + .map_or_else(|| coin_selector.select_until_finished(), |cs| cs.finish())? + } + _ => coin_selector.select_until_finished()?, + }; + let (_, selection_meta) = selection.best_strategy(); + + // get the selected utxos + let selected_txos = selection.apply_selection(&candidates).collect::>(); + + if let Some(drain_value) = selection_meta.drain_value { + change_output.value = drain_value; + // if the selection tells us to use change and the change value is sufficient we add it as an output + outputs.push(change_output) + } + + let mut transaction = Transaction { + version: 0x02, + lock_time: keychain_tracker + .chain() + .latest_checkpoint() + .and_then(|block_id| LockTime::from_height(block_id.height).ok()) + .unwrap_or(LockTime::ZERO) + .into(), + input: selected_txos + .iter() + .map(|(_, utxo)| TxIn { + previous_output: utxo.outpoint, + sequence: Sequence::ENABLE_RBF_NO_LOCKTIME, + ..Default::default() + }) + .collect(), + output: outputs, + }; + + let prevouts = selected_txos + .iter() + .map(|(_, utxo)| utxo.txout.clone()) + .collect::>(); + let sighash_prevouts = Prevouts::All(&prevouts); + + // first set tx values for plan so that we don't change them while signing + for (i, (plan, _)) in selected_txos.iter().enumerate() { + if let Some(sequence) = plan.required_sequence() { + transaction.input[i].sequence = sequence + } + } + + // create a short lived transaction + let _sighash_tx = transaction.clone(); + let mut sighash_cache = SighashCache::new(&_sighash_tx); + + for (i, (plan, _)) in selected_txos.iter().enumerate() { + let requirements = plan.requirements(); + let mut auth_data = bdk_tmp_plan::SatisfactionMaterial::default(); + assert!( + !requirements.requires_hash_preimages(), + "can't have hash pre-images since we didn't provide any" + ); + assert!( + requirements.signatures.sign_with_keymap( + i, + keymap, + &sighash_prevouts, + None, + None, + &mut sighash_cache, + &mut auth_data, + &Secp256k1::default(), + )?, + "we should have signed with this input" + ); + + match plan.try_complete(&auth_data) { + bdk_tmp_plan::PlanState::Complete { + final_script_sig, + final_script_witness, + } => { + if let Some(witness) = final_script_witness { + transaction.input[i].witness = witness; + } + + if let Some(script_sig) = final_script_sig { + transaction.input[i].script_sig = script_sig; + } + } + bdk_tmp_plan::PlanState::Incomplete(_) => { + return Err(anyhow!( + "we weren't able to complete the plan with our keys" + )); + } + } + } + + let change_info = if selection_meta.drain_value.is_some() { + Some((additions, (internal_keychain, change_index))) + } else { + None + }; + + Ok((transaction, change_info)) +} + +pub fn handle_commands( + command: Commands, + broadcast: impl FnOnce(&Transaction) -> Result<()>, + // we Mutexes around these not because we need them for a simple CLI app but to demonsrate how + // all the stuff we're doing can be thread safe and also not keep locks up over an IO bound. + tracker: &Mutex>, + store: &Mutex>, + network: Network, + keymap: &HashMap, +) -> Result<()> +where + P: ChainPosition, + KeychainChangeSet: serde::Serialize + serde::de::DeserializeOwned, +{ + match command { + // TODO: Make these functions return stuffs + Commands::Address { addr_cmd } => run_address_cmd(tracker, store, addr_cmd, network), + Commands::Balance => { + run_balance_cmd(tracker); + Ok(()) + } + Commands::TxOut { txout_cmd } => { + run_txo_cmd(txout_cmd, tracker, network); + Ok(()) + } + Commands::Send { + value, + address, + coin_select, + } => { + let (transaction, change_index) = { + // take mutable ref to construct tx -- it is only open for a short time while building it. + let tracker = &mut *tracker.lock().unwrap(); + let (transaction, change_info) = + create_tx(value, address, coin_select, tracker, keymap)?; + + if let Some((change_derivation_changes, (change_keychain, index))) = change_info { + // We must first persist to disk the fact that we've got a new address from the + // change keychain so future scans will find the tx we're about to broadcast. + // If we're unable to persist this then we don't want to broadcast. + let store = &mut *store.lock().unwrap(); + store.append_changeset(&change_derivation_changes.into())?; + + // We don't want other callers/threads to use this address while we're using it + // but we also don't want to scan the tx we just created because it's not + // technically in the blockchain yet. + tracker.txout_index.mark_used(&change_keychain, index); + (transaction, Some((change_keychain, index))) + } else { + (transaction, None) + } + }; + + match (broadcast)(&transaction) { + Ok(_) => { + println!("Broadcasted Tx : {}", transaction.txid()); + let mut tracker = tracker.lock().unwrap(); + match tracker.insert_tx(transaction.clone(), P::unconfirmed()) { + Ok(changeset) => { + let store = &mut *store.lock().unwrap(); + // We know the tx is at least unconfirmed now. Note if persisting here + // fails it's not a big deal since we can always find it again form + // blockchain. + store.append_changeset(&changeset)?; + Ok(()) + } + Err(e) => match e { + InsertTxError::Chain(e) => match e { + // TODO: add insert_unconfirmed_tx to chain graph and sparse chain + sparse_chain::InsertTxError::TxTooHigh { .. } => unreachable!("we are inserting at unconfirmed position"), + sparse_chain::InsertTxError::TxMovedUnexpectedly { txid, original_pos, ..} => Err(anyhow!("the tx we created {} has already been confirmed at block {:?}", txid, original_pos)), + }, + InsertTxError::UnresolvableConflict(e) => Err(e).context("another tx that conflicts with the one we tried to create has been confirmed"), + } + } + } + Err(e) => { + let tracker = &mut *tracker.lock().unwrap(); + if let Some((keychain, index)) = change_index { + // We failed to broadcast so allow our change address to be used in the future + tracker.txout_index.unmark_used(&keychain, index); + } + Err(e) + } + } + } + Commands::ChainSpecific(_) => { + todo!("example code is meant to handle this!") + } + } +} + +#[allow(clippy::type_complexity)] // FIXME +pub fn init() -> anyhow::Result<( + Args, + KeyMap, + // These don't need to have mutexes around them but we want the cli example code to make it obvious how they + // are thread safe so this forces the example developer to show where they would lock and unlock things. + Mutex>, + Mutex>, +)> +where + P: sparse_chain::ChainPosition, + KeychainChangeSet: serde::Serialize + serde::de::DeserializeOwned, +{ + let args = Args::::parse(); + let secp = Secp256k1::default(); + let (descriptor, mut keymap) = + Descriptor::::parse_descriptor(&secp, &args.descriptor)?; + + let mut tracker = KeychainTracker::default(); + tracker.set_checkpoint_limit(Some(args.cp_limit)); + + tracker + .txout_index + .add_keychain(Keychain::External, descriptor); + + let internal = args + .change_descriptor + .clone() + .map(|descriptor| Descriptor::::parse_descriptor(&secp, &descriptor)) + .transpose()?; + if let Some((internal_descriptor, internal_keymap)) = internal { + keymap.extend(internal_keymap); + tracker + .txout_index + .add_keychain(Keychain::Internal, internal_descriptor); + }; + + let mut db = KeychainStore::::new_from_path(args.db_path.as_path())?; + + if let Err(e) = db.load_into_keychain_tracker(&mut tracker) { + match tracker.chain().latest_checkpoint() { + Some(checkpoint) => eprintln!("Failed to load all changesets from {}. Last checkpoint was at height {}. Error: {}", args.db_path.display(), checkpoint.height, e), + None => eprintln!("Failed to load any checkpoints from {}: {}", args.db_path.display(), e), + + } + eprintln!("⚠ Consider running a rescan of chain data."); + } + + Ok((args, keymap, Mutex::new(tracker), Mutex::new(db))) +} + +pub fn planned_utxos<'a, AK: bdk_tmp_plan::CanDerive + Clone, P: ChainPosition>( + tracker: &'a KeychainTracker, + assets: &'a bdk_tmp_plan::Assets, +) -> impl Iterator, FullTxOut

)> + 'a { + tracker + .full_utxos() + .filter_map(move |((keychain, derivation_index), full_txout)| { + Some(( + bdk_tmp_plan::plan_satisfaction( + &tracker + .txout_index + .keychains() + .get(keychain) + .expect("must exist since we have a utxo for it") + .at_derivation_index(*derivation_index), + assets, + )?, + full_txout, + )) + }) +} diff --git a/example-crates/wallet_electrum/Cargo.toml b/example-crates/wallet_electrum/Cargo.toml new file mode 100644 index 000000000..da84e85f1 --- /dev/null +++ b/example-crates/wallet_electrum/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "wallet_electrum_example" +version = "0.1.0" +edition = "2021" + +[dependencies] +bdk = { path = "../../crates/bdk" } +bdk_electrum = { path = "../../crates/electrum" } +bdk_file_store = { path = "../../crates/file_store" } diff --git a/example-crates/wallet_electrum/src/main.rs b/example-crates/wallet_electrum/src/main.rs new file mode 100644 index 000000000..5145d593b --- /dev/null +++ b/example-crates/wallet_electrum/src/main.rs @@ -0,0 +1,104 @@ +use std::{io::Write, str::FromStr}; + +use bdk::{ + bitcoin::{Address, Network}, + SignOptions, Wallet, +}; +use bdk_electrum::{ + electrum_client::{self, ElectrumApi}, + ElectrumExt, +}; +use bdk_file_store::KeychainStore; + +const SEND_AMOUNT: u64 = 5000; +const STOP_GAP: usize = 50; +const BATCH_SIZE: usize = 5; + +fn main() -> Result<(), Box> { + println!("Hello, world!"); + + let db_path = std::env::temp_dir().join("bdk-electrum-example"); + let db = KeychainStore::new_from_path(db_path)?; + let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; + let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; + + let mut wallet = Wallet::new( + external_descriptor, + Some(internal_descriptor), + db, + Network::Testnet, + )?; + + let address = wallet.get_address(bdk::wallet::AddressIndex::New); + println!("Generated Address: {}", address); + + let balance = wallet.get_balance(); + println!("Wallet balance before syncing: {} sats", balance.total()); + + print!("Syncing..."); + // Scanning the chain... + let electrum_url = "ssl://electrum.blockstream.info:60002"; + let client = electrum_client::Client::new(electrum_url)?; + let local_chain = wallet.checkpoints(); + let spks = wallet + .spks_of_all_keychains() + .into_iter() + .map(|(k, spks)| { + let mut first = true; + ( + k, + spks.inspect(move |(spk_i, _)| { + if first { + first = false; + print!("\nScanning keychain [{:?}]:", k); + } + print!(" {}", spk_i); + let _ = std::io::stdout().flush(); + }), + ) + }) + .collect(); + let electrum_update = client + .scan( + local_chain, + spks, + core::iter::empty(), + core::iter::empty(), + STOP_GAP, + BATCH_SIZE, + )? + .into_confirmation_time_update(&client)?; + println!(); + let new_txs = client.batch_transaction_get(electrum_update.missing_full_txs(&wallet))?; + let update = electrum_update.into_keychain_scan(new_txs, &wallet)?; + wallet.apply_update(update)?; + wallet.commit()?; + + let balance = wallet.get_balance(); + println!("Wallet balance after syncing: {} sats", balance.total()); + + if balance.total() < SEND_AMOUNT { + println!( + "Please send at least {} sats to the receiving address", + SEND_AMOUNT + ); + std::process::exit(0); + } + + let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; + + let mut tx_builder = wallet.build_tx(); + tx_builder + .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) + .enable_rbf(); + + let (mut psbt, _) = tx_builder.finish()?; + let finalized = wallet.sign(&mut psbt, SignOptions::default())?; + assert!(finalized); + + let tx = psbt.extract_tx(); + client.transaction_broadcast(&tx)?; + println!("Tx broadcasted! Txid: {}", tx.txid()); + + Ok(()) +} diff --git a/example-crates/wallet_esplora/Cargo.toml b/example-crates/wallet_esplora/Cargo.toml new file mode 100644 index 000000000..8e19cb7bd --- /dev/null +++ b/example-crates/wallet_esplora/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "bdk-esplora-wallet-example" +version = "0.1.0" +edition = "2021" +publish = false + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bdk = { path = "../../crates/bdk" } +bdk_esplora = { path = "../../crates/esplora", features = ["blocking"] } +bdk_file_store = { path = "../../crates/file_store" } diff --git a/example-crates/wallet_esplora/src/main.rs b/example-crates/wallet_esplora/src/main.rs new file mode 100644 index 000000000..d8eda32a2 --- /dev/null +++ b/example-crates/wallet_esplora/src/main.rs @@ -0,0 +1,96 @@ +use bdk::{ + bitcoin::{Address, Network}, + wallet::AddressIndex, + SignOptions, Wallet, +}; +use bdk_esplora::esplora_client; +use bdk_esplora::EsploraExt; +use bdk_file_store::KeychainStore; +use std::{io::Write, str::FromStr}; + +const SEND_AMOUNT: u64 = 5000; +const STOP_GAP: usize = 50; +const PARALLEL_REQUESTS: usize = 5; + +fn main() -> Result<(), Box> { + let db_path = std::env::temp_dir().join("bdk-esplora-example"); + let db = KeychainStore::new_from_path(db_path)?; + let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; + let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; + + let mut wallet = Wallet::new( + external_descriptor, + Some(internal_descriptor), + db, + Network::Testnet, + )?; + + let address = wallet.get_address(AddressIndex::New); + println!("Generated Address: {}", address); + + let balance = wallet.get_balance(); + println!("Wallet balance before syncing: {} sats", balance.total()); + + print!("Syncing..."); + // Scanning the chain... + let esplora_url = "https://mempool.space/testnet/api"; + let client = esplora_client::Builder::new(esplora_url).build_blocking()?; + let checkpoints = wallet.checkpoints(); + let spks = wallet + .spks_of_all_keychains() + .into_iter() + .map(|(k, spks)| { + let mut first = true; + ( + k, + spks.inspect(move |(spk_i, _)| { + if first { + first = false; + print!("\nScanning keychain [{:?}]:", k); + } + print!(" {}", spk_i); + let _ = std::io::stdout().flush(); + }), + ) + }) + .collect(); + let update = client.scan( + checkpoints, + spks, + core::iter::empty(), + core::iter::empty(), + STOP_GAP, + PARALLEL_REQUESTS, + )?; + println!(); + wallet.apply_update(update)?; + wallet.commit()?; + + let balance = wallet.get_balance(); + println!("Wallet balance after syncing: {} sats", balance.total()); + + if balance.total() < SEND_AMOUNT { + println!( + "Please send at least {} sats to the receiving address", + SEND_AMOUNT + ); + std::process::exit(0); + } + + let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; + + let mut tx_builder = wallet.build_tx(); + tx_builder + .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) + .enable_rbf(); + + let (mut psbt, _) = tx_builder.finish()?; + let finalized = wallet.sign(&mut psbt, SignOptions::default())?; + assert!(finalized); + + let tx = psbt.extract_tx(); + client.broadcast(&tx)?; + println!("Tx broadcasted! Txid: {}", tx.txid()); + + Ok(()) +} diff --git a/example-crates/wallet_esplora_async/Cargo.toml b/example-crates/wallet_esplora_async/Cargo.toml new file mode 100644 index 000000000..af368fc8d --- /dev/null +++ b/example-crates/wallet_esplora_async/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "wallet_esplora_async" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bdk = { path = "../../crates/bdk" } +bdk_esplora = { path = "../../crates/esplora", features = ["async-https"] } +bdk_file_store = { path = "../../crates/file_store" } +tokio = { version = "1", features = ["rt", "rt-multi-thread", "macros"] } diff --git a/example-crates/wallet_esplora_async/src/main.rs b/example-crates/wallet_esplora_async/src/main.rs new file mode 100644 index 000000000..b78b09dfa --- /dev/null +++ b/example-crates/wallet_esplora_async/src/main.rs @@ -0,0 +1,99 @@ +use std::{io::Write, str::FromStr}; + +use bdk::{ + bitcoin::{Address, Network}, + wallet::AddressIndex, + SignOptions, Wallet, +}; +use bdk_esplora::{esplora_client, EsploraAsyncExt}; +use bdk_file_store::KeychainStore; + +const SEND_AMOUNT: u64 = 5000; +const STOP_GAP: usize = 50; +const PARALLEL_REQUESTS: usize = 5; + +#[tokio::main] +async fn main() -> Result<(), Box> { + let db_path = std::env::temp_dir().join("bdk-esplora-example"); + let db = KeychainStore::new_from_path(db_path)?; + let external_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/0/*)"; + let internal_descriptor = "wpkh(tprv8ZgxMBicQKsPdy6LMhUtFHAgpocR8GC6QmwMSFpZs7h6Eziw3SpThFfczTDh5rW2krkqffa11UpX3XkeTTB2FvzZKWXqPY54Y6Rq4AQ5R8L/84'/0'/0'/1/*)"; + + let mut wallet = Wallet::new( + external_descriptor, + Some(internal_descriptor), + db, + Network::Testnet, + )?; + + let address = wallet.get_address(AddressIndex::New); + println!("Generated Address: {}", address); + + let balance = wallet.get_balance(); + println!("Wallet balance before syncing: {} sats", balance.total()); + + print!("Syncing..."); + // Scanning the blockchain + let esplora_url = "https://mempool.space/testnet/api"; + let client = esplora_client::Builder::new(esplora_url).build_async()?; + let checkpoints = wallet.checkpoints(); + let spks = wallet + .spks_of_all_keychains() + .into_iter() + .map(|(k, spks)| { + let mut first = true; + ( + k, + spks.inspect(move |(spk_i, _)| { + if first { + first = false; + print!("\nScanning keychain [{:?}]:", k); + } + print!(" {}", spk_i); + let _ = std::io::stdout().flush(); + }), + ) + }) + .collect(); + let update = client + .scan( + checkpoints, + spks, + std::iter::empty(), + std::iter::empty(), + STOP_GAP, + PARALLEL_REQUESTS, + ) + .await?; + println!(); + wallet.apply_update(update)?; + wallet.commit()?; + + let balance = wallet.get_balance(); + println!("Wallet balance after syncing: {} sats", balance.total()); + + if balance.total() < SEND_AMOUNT { + println!( + "Please send at least {} sats to the receiving address", + SEND_AMOUNT + ); + std::process::exit(0); + } + + let faucet_address = Address::from_str("mkHS9ne12qx9pS9VojpwU5xtRd4T7X7ZUt")?; + + let mut tx_builder = wallet.build_tx(); + tx_builder + .add_recipient(faucet_address.script_pubkey(), SEND_AMOUNT) + .enable_rbf(); + + let (mut psbt, _) = tx_builder.finish()?; + let finalized = wallet.sign(&mut psbt, SignOptions::default())?; + assert!(finalized); + + let tx = psbt.extract_tx(); + client.broadcast(&tx).await?; + println!("Tx broadcasted! Txid: {}", tx.txid()); + + Ok(()) +} diff --git a/examples/compact_filters_balance.rs b/examples/compact_filters_balance.rs deleted file mode 100644 index ce875b4d5..000000000 --- a/examples/compact_filters_balance.rs +++ /dev/null @@ -1,41 +0,0 @@ -// Bitcoin Dev Kit -// Written in 2020 by Alekos Filini -// -// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers -// -// This file is licensed under the Apache License, Version 2.0 or the MIT license -// , at your option. -// You may not use this file except in accordance with one or both of these -// licenses. - -use bdk::blockchain::compact_filters::*; -use bdk::database::MemoryDatabase; -use bdk::*; -use bitcoin::*; -use blockchain::compact_filters::CompactFiltersBlockchain; -use blockchain::compact_filters::CompactFiltersError; -use log::info; -use std::sync::Arc; - -/// This will return wallet balance using compact filters -/// Requires a synced local bitcoin node 0.21 running on testnet with blockfilterindex=1 and peerblockfilters=1 -fn main() -> Result<(), CompactFiltersError> { - env_logger::init(); - info!("start"); - - let num_threads = 4; - let mempool = Arc::new(Mempool::default()); - let peers = (0..num_threads) - .map(|_| Peer::connect("localhost:18333", Arc::clone(&mempool), Network::Testnet)) - .collect::>()?; - let blockchain = CompactFiltersBlockchain::new(peers, "./wallet-filters", Some(500_000))?; - info!("done {:?}", blockchain); - let descriptor = "wpkh(tpubD6NzVbkrYhZ4X2yy78HWrr1M9NT8dKeWfzNiQqDdMqqa9UmmGztGGz6TaLFGsLfdft5iu32gxq1T4eMNxExNNWzVCpf9Y6JZi5TnqoC9wJq/*)"; - - let database = MemoryDatabase::default(); - let wallet = Arc::new(Wallet::new(descriptor, None, Network::Testnet, database).unwrap()); - wallet.sync(&blockchain, SyncOptions::default()).unwrap(); - info!("balance: {}", wallet.get_balance()?); - Ok(()) -} diff --git a/examples/electrum_backend.rs b/examples/electrum_backend.rs deleted file mode 100644 index 5259865f3..000000000 --- a/examples/electrum_backend.rs +++ /dev/null @@ -1,87 +0,0 @@ -use std::str::FromStr; - -use bdk::bitcoin::util::bip32::ExtendedPrivKey; -use bdk::bitcoin::Network; -use bdk::blockchain::{Blockchain, ElectrumBlockchain}; -use bdk::database::MemoryDatabase; -use bdk::template::Bip84; -use bdk::wallet::export::FullyNodedExport; -use bdk::{KeychainKind, SyncOptions, Wallet}; - -use bdk::electrum_client::Client; -use bdk::wallet::AddressIndex; -use bitcoin::util::bip32; - -pub mod utils; - -use crate::utils::tx::build_signed_tx; - -/// This will create a wallet from an xpriv and get the balance by connecting to an Electrum server. -/// If enough amount is available, this will send a transaction to an address. -/// Otherwise, this will display a wallet address to receive funds. -/// -/// This can be run with `cargo run --example electrum_backend` in the root folder. -fn main() { - let network = Network::Testnet; - - let xpriv = "tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy"; - - let electrum_url = "ssl://electrum.blockstream.info:60002"; - - run(&network, electrum_url, xpriv); -} - -fn create_wallet(network: &Network, xpriv: &ExtendedPrivKey) -> Wallet { - Wallet::new( - Bip84(*xpriv, KeychainKind::External), - Some(Bip84(*xpriv, KeychainKind::Internal)), - *network, - MemoryDatabase::default(), - ) - .unwrap() -} - -fn run(network: &Network, electrum_url: &str, xpriv: &str) { - let xpriv = bip32::ExtendedPrivKey::from_str(xpriv).unwrap(); - - // Apparently it works only with Electrs (not EletrumX) - let blockchain = ElectrumBlockchain::from(Client::new(electrum_url).unwrap()); - - let wallet = create_wallet(network, &xpriv); - - wallet.sync(&blockchain, SyncOptions::default()).unwrap(); - - let address = wallet.get_address(AddressIndex::New).unwrap().address; - - println!("address: {}", address); - - let balance = wallet.get_balance().unwrap(); - - println!("Available coins in BDK wallet : {} sats", balance); - - if balance.confirmed > 6500 { - // the wallet sends the amount to itself. - let recipient_address = wallet - .get_address(AddressIndex::New) - .unwrap() - .address - .to_string(); - - let amount = 5359; - - let tx = build_signed_tx(&wallet, &recipient_address, amount); - - blockchain.broadcast(&tx).unwrap(); - - println!("tx id: {}", tx.txid()); - } else { - println!("Insufficient Funds. Fund the wallet with the address above"); - } - - let export = FullyNodedExport::export_wallet(&wallet, "exported wallet", true) - .map_err(ToString::to_string) - .map_err(bdk::Error::Generic) - .unwrap(); - - println!("------\nWallet Backup: {}", export.to_string()); -} diff --git a/examples/esplora_backend_asynchronous.rs b/examples/esplora_backend_asynchronous.rs deleted file mode 100644 index 4aa149ba3..000000000 --- a/examples/esplora_backend_asynchronous.rs +++ /dev/null @@ -1,93 +0,0 @@ -use std::str::FromStr; - -use bdk::blockchain::Blockchain; -use bdk::{ - blockchain::esplora::EsploraBlockchain, - database::MemoryDatabase, - template::Bip84, - wallet::{export::FullyNodedExport, AddressIndex}, - KeychainKind, SyncOptions, Wallet, -}; -use bitcoin::{ - util::bip32::{self, ExtendedPrivKey}, - Network, -}; - -pub mod utils; - -use crate::utils::tx::build_signed_tx; - -/// This will create a wallet from an xpriv and get the balance by connecting to an Esplora server, -/// using non blocking asynchronous calls with `reqwest`. -/// If enough amount is available, this will send a transaction to an address. -/// Otherwise, this will display a wallet address to receive funds. -/// -/// This can be run with `cargo run --no-default-features --features="use-esplora-reqwest, reqwest-default-tls, async-interface" --example esplora_backend_asynchronous` -/// in the root folder. -#[tokio::main(flavor = "current_thread")] -async fn main() { - let network = Network::Signet; - - let xpriv = "tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy"; - - let esplora_url = "https://explorer.bc-2.jp/api"; - - run(&network, esplora_url, xpriv).await; -} - -fn create_wallet(network: &Network, xpriv: &ExtendedPrivKey) -> Wallet { - Wallet::new( - Bip84(*xpriv, KeychainKind::External), - Some(Bip84(*xpriv, KeychainKind::Internal)), - *network, - MemoryDatabase::default(), - ) - .unwrap() -} - -async fn run(network: &Network, esplora_url: &str, xpriv: &str) { - let xpriv = bip32::ExtendedPrivKey::from_str(xpriv).unwrap(); - - let blockchain = EsploraBlockchain::new(esplora_url, 20); - - let wallet = create_wallet(network, &xpriv); - - wallet - .sync(&blockchain, SyncOptions::default()) - .await - .unwrap(); - - let address = wallet.get_address(AddressIndex::New).unwrap().address; - - println!("address: {}", address); - - let balance = wallet.get_balance().unwrap(); - - println!("Available coins in BDK wallet : {} sats", balance); - - if balance.confirmed > 10500 { - // the wallet sends the amount to itself. - let recipient_address = wallet - .get_address(AddressIndex::New) - .unwrap() - .address - .to_string(); - - let amount = 9359; - - let tx = build_signed_tx(&wallet, &recipient_address, amount); - - let _ = blockchain.broadcast(&tx); - - println!("tx id: {}", tx.txid()); - } else { - println!("Insufficient Funds. Fund the wallet with the address above"); - } - - let export = FullyNodedExport::export_wallet(&wallet, "exported wallet", true) - .map_err(ToString::to_string) - .map_err(bdk::Error::Generic) - .unwrap(); - - println!("------\nWallet Backup: {}", export.to_string()); -} diff --git a/examples/esplora_backend_synchronous.rs b/examples/esplora_backend_synchronous.rs deleted file mode 100644 index 31907f836..000000000 --- a/examples/esplora_backend_synchronous.rs +++ /dev/null @@ -1,89 +0,0 @@ -use std::str::FromStr; - -use bdk::blockchain::Blockchain; -use bdk::{ - blockchain::esplora::EsploraBlockchain, - database::MemoryDatabase, - template::Bip84, - wallet::{export::FullyNodedExport, AddressIndex}, - KeychainKind, SyncOptions, Wallet, -}; -use bitcoin::{ - util::bip32::{self, ExtendedPrivKey}, - Network, -}; - -pub mod utils; - -use crate::utils::tx::build_signed_tx; - -/// This will create a wallet from an xpriv and get the balance by connecting to an Esplora server, -/// using blocking calls with `ureq`. -/// If enough amount is available, this will send a transaction to an address. -/// Otherwise, this will display a wallet address to receive funds. -/// -/// This can be run with `cargo run --features=use-esplora-ureq --example esplora_backend_synchronous` -/// in the root folder. -fn main() { - let network = Network::Signet; - - let xpriv = "tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy"; - - let esplora_url = "https://explorer.bc-2.jp/api"; - - run(&network, esplora_url, xpriv); -} - -fn create_wallet(network: &Network, xpriv: &ExtendedPrivKey) -> Wallet { - Wallet::new( - Bip84(*xpriv, KeychainKind::External), - Some(Bip84(*xpriv, KeychainKind::Internal)), - *network, - MemoryDatabase::default(), - ) - .unwrap() -} - -fn run(network: &Network, esplora_url: &str, xpriv: &str) { - let xpriv = bip32::ExtendedPrivKey::from_str(xpriv).unwrap(); - - let blockchain = EsploraBlockchain::new(esplora_url, 20); - - let wallet = create_wallet(network, &xpriv); - - wallet.sync(&blockchain, SyncOptions::default()).unwrap(); - - let address = wallet.get_address(AddressIndex::New).unwrap().address; - - println!("address: {}", address); - - let balance = wallet.get_balance().unwrap(); - - println!("Available coins in BDK wallet : {} sats", balance); - - if balance.confirmed > 10500 { - // the wallet sends the amount to itself. - let recipient_address = wallet - .get_address(AddressIndex::New) - .unwrap() - .address - .to_string(); - - let amount = 9359; - - let tx = build_signed_tx(&wallet, &recipient_address, amount); - - blockchain.broadcast(&tx).unwrap(); - - println!("tx id: {}", tx.txid()); - } else { - println!("Insufficient Funds. Fund the wallet with the address above"); - } - - let export = FullyNodedExport::export_wallet(&wallet, "exported wallet", true) - .map_err(ToString::to_string) - .map_err(bdk::Error::Generic) - .unwrap(); - - println!("------\nWallet Backup: {}", export.to_string()); -} diff --git a/examples/hardware_signer.rs b/examples/hardware_signer.rs deleted file mode 100644 index d1c25f1ab..000000000 --- a/examples/hardware_signer.rs +++ /dev/null @@ -1,105 +0,0 @@ -use bdk::bitcoin::{Address, Network}; -use bdk::blockchain::{Blockchain, ElectrumBlockchain}; -use bdk::database::MemoryDatabase; -use bdk::hwi::{types::HWIChain, HWIClient}; -use bdk::miniscript::{Descriptor, DescriptorPublicKey}; -use bdk::signer::SignerOrdering; -use bdk::wallet::{hardwaresigner::HWISigner, AddressIndex}; -use bdk::{FeeRate, KeychainKind, SignOptions, SyncOptions, Wallet}; -use electrum_client::Client; -use std::str::FromStr; -use std::sync::Arc; - -// This example shows how to sync a wallet, create a transaction, sign it -// and broadcast it using an external hardware wallet. -// The hardware wallet must be connected to the computer and unlocked before -// running the example. Also, the `hwi` python package should be installed -// and available in the environment. -// -// To avoid loss of funds, consider using an hardware wallet simulator: -// * Coldcard: https://github.com/Coldcard/firmware -// * Ledger: https://github.com/LedgerHQ/speculos -// * Trezor: https://docs.trezor.io/trezor-firmware/core/emulator/index.html -fn main() -> Result<(), Box> { - println!("Hold tight, I'm connecting to your hardware wallet..."); - - // Listing all the available hardware wallet devices... - let mut devices = HWIClient::enumerate()?; - if devices.is_empty() { - panic!("No devices found. Either plug in a hardware wallet, or start a simulator."); - } - let first_device = devices.remove(0)?; - // ...and creating a client out of the first one - let client = HWIClient::get_client(&first_device, true, HWIChain::Test)?; - println!("Look what I found, a {}!", first_device.model); - - // Getting the HW's public descriptors - let descriptors = client.get_descriptors::>(None)?; - println!( - "The hardware wallet's descriptor is: {}", - descriptors.receive[0] - ); - - // Creating a custom signer from the device - let custom_signer = HWISigner::from_device(&first_device, HWIChain::Test)?; - let mut wallet = Wallet::new( - descriptors.receive[0].clone(), - Some(descriptors.internal[0].clone()), - Network::Testnet, - MemoryDatabase::default(), - )?; - - // Adding the hardware signer to the BDK wallet - wallet.add_signer( - KeychainKind::External, - SignerOrdering(200), - Arc::new(custom_signer), - ); - - // create client for Blockstream's testnet electrum server - let blockchain = - ElectrumBlockchain::from(Client::new("ssl://electrum.blockstream.info:60002")?); - - println!("Syncing the wallet..."); - wallet.sync(&blockchain, SyncOptions::default())?; - - // get deposit address - let deposit_address = wallet.get_address(AddressIndex::New)?; - - let balance = wallet.get_balance()?; - println!("Wallet balances in SATs: {}", balance); - - if balance.get_total() < 10000 { - println!( - "Send some sats from the u01.net testnet faucet to address '{addr}'.\nFaucet URL: https://bitcoinfaucet.uo1.net/?to={addr}", - addr = deposit_address.address - ); - return Ok(()); - } - - let return_address = Address::from_str("tb1ql7w62elx9ucw4pj5lgw4l028hmuw80sndtntxt")?; - let (mut psbt, _details) = { - let mut builder = wallet.build_tx(); - builder - .drain_wallet() - .drain_to(return_address.script_pubkey()) - .enable_rbf() - .fee_rate(FeeRate::from_sat_per_vb(5.0)); - builder.finish()? - }; - - // `sign` will call the hardware wallet asking for a signature - assert!( - wallet.sign(&mut psbt, SignOptions::default())?, - "The hardware wallet couldn't finalize the transaction :(" - ); - - println!("Let's broadcast your tx..."); - let raw_transaction = psbt.extract_tx(); - let txid = raw_transaction.txid(); - - blockchain.broadcast(&raw_transaction)?; - println!("Transaction broadcasted! TXID: {txid}.\nExplorer URL: https://mempool.space/testnet/tx/{txid}", txid = txid); - - Ok(()) -} diff --git a/examples/psbt_signer.rs b/examples/psbt_signer.rs deleted file mode 100644 index 35c539dad..000000000 --- a/examples/psbt_signer.rs +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright (c) 2020-2022 Bitcoin Dev Kit Developers -// -// This file is licensed under the Apache License, Version 2.0 or the MIT license -// , at your option. -// You may not use this file except in accordance with one or both of these -// licenses. - -use bdk::blockchain::{Blockchain, ElectrumBlockchain}; -use bdk::database::MemoryDatabase; -use bdk::wallet::AddressIndex; -use bdk::{descriptor, SyncOptions}; -use bdk::{FeeRate, SignOptions, Wallet}; -use bitcoin::secp256k1::Secp256k1; -use bitcoin::{Address, Network}; -use electrum_client::Client; -use miniscript::descriptor::DescriptorSecretKey; -use std::error::Error; -use std::str::FromStr; - -/// This example shows how to sign and broadcast the transaction for a PSBT (Partially Signed -/// Bitcoin Transaction) for a single key, witness public key hash (WPKH) based descriptor wallet. -/// The electrum protocol is used to sync blockchain data from the testnet bitcoin network and -/// wallet data is stored in an ephemeral in-memory database. The process steps are: -/// 1. Create a "signing" wallet and a "watch-only" wallet based on the same private keys. -/// 2. Deposit testnet funds into the watch only wallet. -/// 3. Sync the watch only wallet and create a spending transaction to return all funds to the testnet faucet. -/// 4. Sync the signing wallet and sign and finalize the PSBT created by the watch only wallet. -/// 5. Broadcast the transactions from the finalized PSBT. -fn main() -> Result<(), Box> { - // test key created with `bdk-cli key generate` and `bdk-cli key derive` commands - let external_secret_xkey = DescriptorSecretKey::from_str("[e9824965/84'/1'/0']tprv8fvem7qWxY3SGCQczQpRpqTKg455wf1zgixn6MZ4ze8gRfHjov5gXBQTadNfDgqs9ERbZZ3Bi1PNYrCCusFLucT39K525MWLpeURjHwUsfX/0/*").unwrap(); - let internal_secret_xkey = DescriptorSecretKey::from_str("[e9824965/84'/1'/0']tprv8fvem7qWxY3SGCQczQpRpqTKg455wf1zgixn6MZ4ze8gRfHjov5gXBQTadNfDgqs9ERbZZ3Bi1PNYrCCusFLucT39K525MWLpeURjHwUsfX/1/*").unwrap(); - - let secp = Secp256k1::new(); - let external_public_xkey = external_secret_xkey.to_public(&secp).unwrap(); - let internal_public_xkey = internal_secret_xkey.to_public(&secp).unwrap(); - - let signing_external_descriptor = descriptor!(wpkh(external_secret_xkey)).unwrap(); - let signing_internal_descriptor = descriptor!(wpkh(internal_secret_xkey)).unwrap(); - - let watch_only_external_descriptor = descriptor!(wpkh(external_public_xkey)).unwrap(); - let watch_only_internal_descriptor = descriptor!(wpkh(internal_public_xkey)).unwrap(); - - // create client for Blockstream's testnet electrum server - let blockchain = - ElectrumBlockchain::from(Client::new("ssl://electrum.blockstream.info:60002")?); - - // create watch only wallet - let watch_only_wallet: Wallet = Wallet::new( - watch_only_external_descriptor, - Some(watch_only_internal_descriptor), - Network::Testnet, - MemoryDatabase::default(), - )?; - - // create signing wallet - let signing_wallet: Wallet = Wallet::new( - signing_external_descriptor, - Some(signing_internal_descriptor), - Network::Testnet, - MemoryDatabase::default(), - )?; - - println!("Syncing watch only wallet."); - watch_only_wallet.sync(&blockchain, SyncOptions::default())?; - - // get deposit address - let deposit_address = watch_only_wallet.get_address(AddressIndex::New)?; - - let balance = watch_only_wallet.get_balance()?; - println!("Watch only wallet balances in SATs: {}", balance); - - if balance.get_total() < 10000 { - println!( - "Send at least 10000 SATs (0.0001 BTC) from the u01.net testnet faucet to address '{addr}'.\nFaucet URL: https://bitcoinfaucet.uo1.net/?to={addr}", - addr = deposit_address.address - ); - } else if balance.get_spendable() < 10000 { - println!( - "Wait for at least 10000 SATs of your wallet transactions to be confirmed...\nBe patient, this could take 10 mins or longer depending on how testnet is behaving." - ); - for tx_details in watch_only_wallet - .list_transactions(false)? - .iter() - .filter(|txd| txd.received > 0 && txd.confirmation_time.is_none()) - { - println!( - "See unconfirmed tx for {} SATs: https://mempool.space/testnet/tx/{}", - tx_details.received, tx_details.txid - ); - } - } else { - println!("Creating a PSBT sending 9800 SATs plus fee to the u01.net testnet faucet return address 'tb1ql7w62elx9ucw4pj5lgw4l028hmuw80sndtntxt'."); - let return_address = Address::from_str("tb1ql7w62elx9ucw4pj5lgw4l028hmuw80sndtntxt")?; - let mut builder = watch_only_wallet.build_tx(); - builder - .add_recipient(return_address.script_pubkey(), 9_800) - .enable_rbf() - .fee_rate(FeeRate::from_sat_per_vb(1.0)); - - let (mut psbt, details) = builder.finish()?; - println!("Transaction details: {:#?}", details); - println!("Unsigned PSBT: {}", psbt); - - // Sign and finalize the PSBT with the signing wallet - let finalized = signing_wallet.sign(&mut psbt, SignOptions::default())?; - assert!(finalized, "The PSBT was not finalized!"); - println!("The PSBT has been signed and finalized."); - - // Broadcast the transaction - let raw_transaction = psbt.extract_tx(); - let txid = raw_transaction.txid(); - - blockchain.broadcast(&raw_transaction)?; - println!("Transaction broadcast! TXID: {txid}.\nExplorer URL: https://mempool.space/testnet/tx/{txid}", txid = txid); - } - - Ok(()) -} diff --git a/examples/rpcwallet.rs b/examples/rpcwallet.rs deleted file mode 100644 index 24a555910..000000000 --- a/examples/rpcwallet.rs +++ /dev/null @@ -1,229 +0,0 @@ -// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers -// -// This file is licensed under the Apache License, Version 2.0 or the MIT license -// , at your option. -// You may not use this file except in accordance with one or both of these -// licenses. - -use bdk::bitcoin::secp256k1::Secp256k1; -use bdk::bitcoin::Amount; -use bdk::bitcoin::Network; -use bdk::bitcoincore_rpc::RpcApi; - -use bdk::blockchain::rpc::{Auth, RpcBlockchain, RpcConfig}; -use bdk::blockchain::ConfigurableBlockchain; - -use bdk::keys::bip39::{Language, Mnemonic, WordCount}; -use bdk::keys::{DerivableKey, GeneratableKey, GeneratedKey}; - -use bdk::miniscript::miniscript::Segwitv0; - -use bdk::sled; -use bdk::template::Bip84; -use bdk::wallet::{signer::SignOptions, wallet_name_from_descriptor, AddressIndex, SyncOptions}; -use bdk::KeychainKind; -use bdk::Wallet; - -use bdk::blockchain::Blockchain; - -use electrsd; - -use std::error::Error; -use std::path::PathBuf; -use std::str::FromStr; - -/// This example demonstrates a typical way to create a wallet and work with bdk. -/// -/// This example bdk wallet is connected to a bitcoin core rpc regtest node, -/// and will attempt to receive, create and broadcast transactions. -/// -/// To start a bitcoind regtest node programmatically, this example uses -/// `electrsd` library, which is also a bdk dev-dependency. -/// -/// But you can start your own bitcoind backend, and the rest of the example should work fine. - -fn main() -> Result<(), Box> { - // -- Setting up background bitcoind process - - println!(">> Setting up bitcoind"); - - // Start the bitcoind process - let bitcoind_conf = electrsd::bitcoind::Conf::default(); - - // electrsd will automatically download the bitcoin core binaries - let bitcoind_exe = - electrsd::bitcoind::downloaded_exe_path().expect("We should always have downloaded path"); - - // Launch bitcoind and gather authentication access - let bitcoind = electrsd::bitcoind::BitcoinD::with_conf(bitcoind_exe, &bitcoind_conf).unwrap(); - let bitcoind_auth = Auth::Cookie { - file: bitcoind.params.cookie_file.clone(), - }; - - // Get a new core address - let core_address = bitcoind.client.get_new_address(None, None)?; - - // Generate 101 blocks and use the above address as coinbase - bitcoind.client.generate_to_address(101, &core_address)?; - - println!(">> bitcoind setup complete"); - println!( - "Available coins in Core wallet : {}", - bitcoind.client.get_balance(None, None)? - ); - - // -- Setting up the Wallet - - println!("\n>> Setting up BDK wallet"); - - // Get a random private key - let xprv = generate_random_ext_privkey()?; - - // Use the derived descriptors from the privatekey to - // create unique wallet name. - // This is a special utility function exposed via `bdk::wallet_name_from_descriptor()` - let wallet_name = wallet_name_from_descriptor( - Bip84(xprv.clone(), KeychainKind::External), - Some(Bip84(xprv.clone(), KeychainKind::Internal)), - Network::Regtest, - &Secp256k1::new(), - )?; - - // Create a database (using default sled type) to store wallet data - let mut datadir = PathBuf::from_str("/tmp/")?; - datadir.push(".bdk-example"); - let database = sled::open(datadir)?; - let database = database.open_tree(wallet_name.clone())?; - - // Create a RPC configuration of the running bitcoind backend we created in last step - // Note: If you are using custom regtest node, use the appropriate url and auth - let rpc_config = RpcConfig { - url: bitcoind.params.rpc_socket.to_string(), - auth: bitcoind_auth, - network: Network::Regtest, - wallet_name, - sync_params: None, - }; - - // Use the above configuration to create a RPC blockchain backend - let blockchain = RpcBlockchain::from_config(&rpc_config)?; - - // Combine Database + Descriptor to create the final wallet - let wallet = Wallet::new( - Bip84(xprv.clone(), KeychainKind::External), - Some(Bip84(xprv.clone(), KeychainKind::Internal)), - Network::Regtest, - database, - )?; - - // The `wallet` and the `blockchain` are independent structs. - // The wallet will be used to do all wallet level actions - // The blockchain can be used to do all blockchain level actions. - // For certain actions (like sync) the wallet will ask for a blockchain. - - // Sync the wallet - // The first sync is important as this will instantiate the - // wallet files. - wallet.sync(&blockchain, SyncOptions::default())?; - - println!(">> BDK wallet setup complete."); - println!( - "Available initial coins in BDK wallet : {} sats", - wallet.get_balance()? - ); - - // -- Wallet transaction demonstration - - println!("\n>> Sending coins: Core --> BDK, 10 BTC"); - // Get a new address to receive coins - let bdk_new_addr = wallet.get_address(AddressIndex::New)?.address; - - // Send 10 BTC from core wallet to bdk wallet - bitcoind.client.send_to_address( - &bdk_new_addr, - Amount::from_btc(10.0)?, - None, - None, - None, - None, - None, - None, - )?; - - // Confirm transaction by generating 1 block - bitcoind.client.generate_to_address(1, &core_address)?; - - // Sync the BDK wallet - // This time the sync will fetch the new transaction and update it in - // wallet database - wallet.sync(&blockchain, SyncOptions::default())?; - - println!(">> Received coins in BDK wallet"); - println!( - "Available balance in BDK wallet: {} sats", - wallet.get_balance()? - ); - - println!("\n>> Sending coins: BDK --> Core, 5 BTC"); - // Attempt to send back 5.0 BTC to core address by creating a transaction - // - // Transactions are created using a `TxBuilder`. - // This helps us to systematically build a transaction with all - // required customization. - // A full list of APIs offered by `TxBuilder` can be found at - // https://docs.rs/bdk/latest/bdk/wallet/tx_builder/struct.TxBuilder.html - let mut tx_builder = wallet.build_tx(); - - // For a regular transaction, just set the recipient and amount - tx_builder.set_recipients(vec![(core_address.script_pubkey(), 500000000)]); - - // Finalize the transaction and extract the PSBT - let (mut psbt, _) = tx_builder.finish()?; - - // Set signing option - let signopt = SignOptions { - assume_height: None, - ..Default::default() - }; - - // Sign the psbt - wallet.sign(&mut psbt, signopt)?; - - // Extract the signed transaction - let tx = psbt.extract_tx(); - - // Broadcast the transaction - blockchain.broadcast(&tx)?; - - // Confirm transaction by generating some blocks - bitcoind.client.generate_to_address(1, &core_address)?; - - // Sync the BDK wallet - wallet.sync(&blockchain, SyncOptions::default())?; - - println!(">> Coins sent to Core wallet"); - println!( - "Remaining BDK wallet balance: {} sats", - wallet.get_balance()? - ); - println!("\nCongrats!! you made your first test transaction with bdk and bitcoin core."); - - Ok(()) -} - -// Helper function demonstrating privatekey extraction using bip39 mnemonic -// The mnemonic can be shown to user to safekeeping and the same wallet -// private descriptors can be recreated from it. -fn generate_random_ext_privkey() -> Result + Clone, Box> { - // a Bip39 passphrase can be set optionally - let password = Some("random password".to_string()); - - // Generate a random mnemonic, and use that to create a "DerivableKey" - let mnemonic: GeneratedKey<_, _> = Mnemonic::generate((WordCount::Words12, Language::English)) - .map_err(|e| e.expect("Unknown Error"))?; - - // `Ok(mnemonic)` would also work if there's no passphrase and it would - // yield the same result as this construct with `password` = `None`. - Ok((mnemonic, password)) -} diff --git a/examples/utils/mod.rs b/examples/utils/mod.rs deleted file mode 100644 index 25249fa7e..000000000 --- a/examples/utils/mod.rs +++ /dev/null @@ -1,30 +0,0 @@ -pub(crate) mod tx { - - use std::str::FromStr; - - use bdk::{database::BatchDatabase, SignOptions, Wallet}; - use bitcoin::{Address, Transaction}; - - pub fn build_signed_tx( - wallet: &Wallet, - recipient_address: &str, - amount: u64, - ) -> Transaction { - // Create a transaction builder - let mut tx_builder = wallet.build_tx(); - - let to_address = Address::from_str(recipient_address).unwrap(); - - // Set recipient of the transaction - tx_builder.set_recipients(vec![(to_address.script_pubkey(), amount)]); - - // Finalise the transaction and extract PSBT - let (mut psbt, _) = tx_builder.finish().unwrap(); - - // Sign the above psbt with signing option - wallet.sign(&mut psbt, SignOptions::default()).unwrap(); - - // Extract the final transaction - psbt.extract_tx() - } -} diff --git a/macros/Cargo.toml b/macros/Cargo.toml deleted file mode 100644 index d5b2f5ff3..000000000 --- a/macros/Cargo.toml +++ /dev/null @@ -1,24 +0,0 @@ -[package] -name = "bdk-macros" -version = "0.6.0" -authors = ["Alekos Filini "] -edition = "2018" -homepage = "https://bitcoindevkit.org" -repository = "https://github.com/bitcoindevkit/bdk" -documentation = "https://docs.rs/bdk-macros" -description = "Supporting macros for `bdk`" -keywords = ["bdk"] -license = "MIT OR Apache-2.0" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -syn = { version = "1.0", features = ["parsing", "full"] } -proc-macro2 = "1.0" -quote = "1.0" - -[features] -debug = ["syn/extra-traits"] - -[lib] -proc-macro = true diff --git a/macros/src/lib.rs b/macros/src/lib.rs deleted file mode 100644 index 2fabf2cfc..000000000 --- a/macros/src/lib.rs +++ /dev/null @@ -1,146 +0,0 @@ -// Bitcoin Dev Kit -// Written in 2020 by Alekos Filini -// -// Copyright (c) 2020-2021 Bitcoin Dev Kit Developers -// -// This file is licensed under the Apache License, Version 2.0 or the MIT license -// , at your option. -// You may not use this file except in accordance with one or both of these -// licenses. - -#[macro_use] -extern crate quote; - -use proc_macro::TokenStream; - -use syn::spanned::Spanned; -use syn::{parse, ImplItemMethod, ItemImpl, ItemTrait, Token}; - -fn add_async_trait(mut parsed: ItemTrait) -> TokenStream { - let output = quote! { - #[cfg(not(feature = "async-interface"))] - #parsed - }; - - for mut item in &mut parsed.items { - if let syn::TraitItem::Method(m) = &mut item { - m.sig.asyncness = Some(Token![async](m.span())); - } - } - - let output = quote! { - #output - - #[cfg(feature = "async-interface")] - #[async_trait(?Send)] - #parsed - }; - - output.into() -} - -fn add_async_method(mut parsed: ImplItemMethod) -> TokenStream { - let output = quote! { - #[cfg(not(feature = "async-interface"))] - #parsed - }; - - parsed.sig.asyncness = Some(Token![async](parsed.span())); - - let output = quote! { - #output - - #[cfg(feature = "async-interface")] - #parsed - }; - - output.into() -} - -fn add_async_impl_trait(mut parsed: ItemImpl) -> TokenStream { - let output = quote! { - #[cfg(not(feature = "async-interface"))] - #parsed - }; - - for mut item in &mut parsed.items { - if let syn::ImplItem::Method(m) = &mut item { - m.sig.asyncness = Some(Token![async](m.span())); - } - } - - let output = quote! { - #output - - #[cfg(feature = "async-interface")] - #[async_trait(?Send)] - #parsed - }; - - output.into() -} - -/// Makes a method or every method of a trait `async`, if the `async-interface` feature is enabled. -/// -/// Requires the `async-trait` crate as a dependency whenever this attribute is used on a trait -/// definition or trait implementation. -#[proc_macro_attribute] -pub fn maybe_async(_attr: TokenStream, item: TokenStream) -> TokenStream { - if let Ok(parsed) = parse(item.clone()) { - add_async_trait(parsed) - } else if let Ok(parsed) = parse(item.clone()) { - add_async_method(parsed) - } else if let Ok(parsed) = parse(item) { - add_async_impl_trait(parsed) - } else { - (quote! { - compile_error!("#[maybe_async] can only be used on methods, trait or trait impl blocks") - }) - .into() - } -} - -/// Awaits, if the `async-interface` feature is enabled. -#[proc_macro] -pub fn maybe_await(expr: TokenStream) -> TokenStream { - let expr: proc_macro2::TokenStream = expr.into(); - let quoted = quote! { - { - #[cfg(not(feature = "async-interface"))] - { - #expr - } - - #[cfg(feature = "async-interface")] - { - #expr.await - } - } - }; - - quoted.into() -} - -/// Awaits, if the `async-interface` feature is enabled, uses `tokio::Runtime::block_on()` otherwise -/// -/// Requires the `tokio` crate as a dependecy with `rt-core` or `rt-threaded` to build. -#[proc_macro] -pub fn await_or_block(expr: TokenStream) -> TokenStream { - let expr: proc_macro2::TokenStream = expr.into(); - let quoted = quote! { - { - #[cfg(not(feature = "async-interface"))] - { - tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(#expr) - } - - #[cfg(feature = "async-interface")] - { - #expr.await - } - } - }; - - quoted.into() -} diff --git a/nursery/README.md b/nursery/README.md new file mode 100644 index 000000000..e136a20d6 --- /dev/null +++ b/nursery/README.md @@ -0,0 +1,5 @@ +# Bitcoin Dev Kit Nursery + +This is a directory for crates that are experimental and have not been released yet. +Keep in mind that they may never be released. +Things in `/example-crates` may use them to demonsrate how things might look in the future. diff --git a/nursery/coin_select/Cargo.toml b/nursery/coin_select/Cargo.toml new file mode 100644 index 000000000..3c338d0e4 --- /dev/null +++ b/nursery/coin_select/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "bdk_coin_select" +version = "0.0.1" +authors = [ "LLFourn " ] + +[dependencies] +bdk_chain = { path = "../../crates/chain", version = "0.3.1" } + +[features] +default = ["std"] +std = [] diff --git a/nursery/coin_select/src/bnb.rs b/nursery/coin_select/src/bnb.rs new file mode 100644 index 000000000..75b0f332e --- /dev/null +++ b/nursery/coin_select/src/bnb.rs @@ -0,0 +1,645 @@ +use super::*; + +/// Strategy in which we should branch. +pub enum BranchStrategy { + /// We continue exploring subtrees of this node, starting with the inclusion branch. + Continue, + /// We continue exploring ONY the omission branch of this node, skipping the inclusion branch. + SkipInclusion, + /// We skip both the inclusion and omission branches of this node. + SkipBoth, +} + +impl BranchStrategy { + pub fn will_continue(&self) -> bool { + matches!(self, Self::Continue | Self::SkipInclusion) + } +} + +/// Closure to decide the branching strategy, alongside a score (if the current selection is a +/// candidate solution). +pub type DecideStrategy<'c, S> = dyn Fn(&Bnb<'c, S>) -> (BranchStrategy, Option); + +/// [`Bnb`] represents the current state of the BnB algorithm. +pub struct Bnb<'c, S> { + pub pool: Vec<(usize, &'c WeightedValue)>, + pub pool_pos: usize, + pub best_score: S, + + pub selection: CoinSelector<'c>, + pub rem_abs: u64, + pub rem_eff: i64, +} + +impl<'c, S: Ord> Bnb<'c, S> { + /// Creates a new [`Bnb`]. + pub fn new(selector: CoinSelector<'c>, pool: Vec<(usize, &'c WeightedValue)>, max: S) -> Self { + let (rem_abs, rem_eff) = pool.iter().fold((0, 0), |(abs, eff), (_, c)| { + ( + abs + c.value, + eff + c.effective_value(selector.opts.target_feerate), + ) + }); + + Self { + pool, + pool_pos: 0, + best_score: max, + selection: selector, + rem_abs, + rem_eff, + } + } + + /// Turns our [`Bnb`] state into an iterator. + /// + /// `strategy` should assess our current selection/node and determine the branching strategy and + /// whether this selection is a candidate solution (if so, return the score of the selection). + pub fn into_iter<'f>(self, strategy: &'f DecideStrategy<'c, S>) -> BnbIter<'c, 'f, S> { + BnbIter { + state: self, + done: false, + strategy, + } + } + + /// Attempt to backtrack to the previously selected node's omission branch, return false + /// otherwise (no more solutions). + pub fn backtrack(&mut self) -> bool { + (0..self.pool_pos).rev().any(|pos| { + let (index, candidate) = self.pool[pos]; + + if self.selection.is_selected(index) { + // deselect last `pos`, so next round will check omission branch + self.pool_pos = pos; + self.selection.deselect(index); + true + } else { + self.rem_abs += candidate.value; + self.rem_eff += candidate.effective_value(self.selection.opts.target_feerate); + false + } + }) + } + + /// Continue down this branch, skip inclusion branch if specified. + pub fn forward(&mut self, skip: bool) { + let (index, candidate) = self.pool[self.pool_pos]; + self.rem_abs -= candidate.value; + self.rem_eff -= candidate.effective_value(self.selection.opts.target_feerate); + + if !skip { + self.selection.select(index); + } + } + + /// Compare advertised score with current best. New best will be the smaller value. Return true + /// if best is replaced. + pub fn advertise_new_score(&mut self, score: S) -> bool { + if score <= self.best_score { + self.best_score = score; + return true; + } + false + } +} + +pub struct BnbIter<'c, 'f, S> { + state: Bnb<'c, S>, + done: bool, + + /// Check our current selection (node), and returns the branching strategy, alongside a score + /// (if the current selection is a candidate solution). + strategy: &'f DecideStrategy<'c, S>, +} + +impl<'c, 'f, S: Ord + Copy + Display> Iterator for BnbIter<'c, 'f, S> { + type Item = Option>; + + fn next(&mut self) -> Option { + if self.done { + return None; + } + + let (strategy, score) = (self.strategy)(&self.state); + + let mut found_best = Option::::None; + + if let Some(score) = score { + if self.state.advertise_new_score(score) { + found_best = Some(self.state.selection.clone()); + } + } + + debug_assert!( + !strategy.will_continue() || self.state.pool_pos < self.state.pool.len(), + "Faulty strategy implementation! Strategy suggested that we continue traversing, however we have already reached the end of the candidates pool! pool_len={}, pool_pos={}", + self.state.pool.len(), self.state.pool_pos, + ); + + match strategy { + BranchStrategy::Continue => { + self.state.forward(false); + } + BranchStrategy::SkipInclusion => { + self.state.forward(true); + } + BranchStrategy::SkipBoth => { + if !self.state.backtrack() { + self.done = true; + } + } + }; + + // increment selection pool position for next round + self.state.pool_pos += 1; + + if found_best.is_some() || !self.done { + Some(found_best) + } else { + // we have traversed all branches + None + } + } +} + +/// Determines how we should limit rounds of branch and bound. +pub enum BnbLimit { + Rounds(usize), + #[cfg(feature = "std")] + Duration(core::time::Duration), +} + +impl From for BnbLimit { + fn from(v: usize) -> Self { + Self::Rounds(v) + } +} + +#[cfg(feature = "std")] +impl From for BnbLimit { + fn from(v: core::time::Duration) -> Self { + Self::Duration(v) + } +} + +/// This is a variation of the Branch and Bound Coin Selection algorithm designed by Murch (as seen +/// in Bitcoin Core). +/// +/// The differences are as follows: +/// * In additional to working with effective values, we also work with absolute values. +/// This way, we can use bounds of absolute values to enforce `min_absolute_fee` (which is used by +/// RBF), and `max_extra_target` (which can be used to increase the possible solution set, given +/// that the sender is okay with sending extra to the receiver). +/// +/// Murch's Master Thesis: +/// Bitcoin Core Implementation: +/// +/// TODO: Another optimization we could do is figure out candidate with smallest waste, and +/// if we find a result with waste equal to this, we can just break. +pub fn coin_select_bnb(limit: L, selector: CoinSelector) -> Option +where + L: Into, +{ + let opts = selector.opts; + + // prepare pool of candidates to select from: + // * filter out candidates with negative/zero effective values + // * sort candidates by descending effective value + let pool = { + let mut pool = selector + .unselected() + .filter(|(_, c)| c.effective_value(opts.target_feerate) > 0) + .collect::>(); + pool.sort_unstable_by(|(_, a), (_, b)| { + let a = a.effective_value(opts.target_feerate); + let b = b.effective_value(opts.target_feerate); + b.cmp(&a) + }); + pool + }; + + let feerate_decreases = opts.target_feerate > opts.long_term_feerate(); + + let target_abs = opts.target_value.unwrap_or(0) + opts.min_absolute_fee; + let target_eff = selector.effective_target(); + + let upper_bound_abs = target_abs + (opts.drain_weight as f32 * opts.target_feerate) as u64; + let upper_bound_eff = target_eff + opts.drain_waste(); + + let strategy = move |bnb: &Bnb| -> (BranchStrategy, Option) { + let selected_abs = bnb.selection.selected_absolute_value(); + let selected_eff = bnb.selection.selected_effective_value(); + + // backtrack if remaining value is not enough to reach target + if selected_abs + bnb.rem_abs < target_abs || selected_eff + bnb.rem_eff < target_eff { + return (BranchStrategy::SkipBoth, None); + } + + // backtrack if selected value already surpassed upper bounds + if selected_abs > upper_bound_abs && selected_eff > upper_bound_eff { + return (BranchStrategy::SkipBoth, None); + } + + let selected_waste = bnb.selection.selected_waste(); + + // when feerate decreases, waste without excess is guaranteed to increase with each + // selection. So if we have already surpassed best score, we can backtrack. + if feerate_decreases && selected_waste > bnb.best_score { + return (BranchStrategy::SkipBoth, None); + } + + // solution? + if selected_abs >= target_abs && selected_eff >= target_eff { + let waste = selected_waste + bnb.selection.current_excess(); + return (BranchStrategy::SkipBoth, Some(waste)); + } + + // early bailout optimization: + // If the candidate at the previous position is NOT selected and has the same weight and + // value as the current candidate, we can skip selecting the current candidate. + if bnb.pool_pos > 0 && !bnb.selection.is_empty() { + let (_, candidate) = bnb.pool[bnb.pool_pos]; + let (prev_index, prev_candidate) = bnb.pool[bnb.pool_pos - 1]; + + if !bnb.selection.is_selected(prev_index) + && candidate.value == prev_candidate.value + && candidate.weight == prev_candidate.weight + { + return (BranchStrategy::SkipInclusion, None); + } + } + + // check out inclusion branch first + (BranchStrategy::Continue, None) + }; + + // determine sum of absolute and effective values for current selection + let (selected_abs, selected_eff) = selector.selected().fold((0, 0), |(abs, eff), (_, c)| { + ( + abs + c.value, + eff + c.effective_value(selector.opts.target_feerate), + ) + }); + + let bnb = Bnb::new(selector, pool, i64::MAX); + + // not enough to select anyway + if selected_abs + bnb.rem_abs < target_abs || selected_eff + bnb.rem_eff < target_eff { + return None; + } + + match limit.into() { + BnbLimit::Rounds(rounds) => { + bnb.into_iter(&strategy) + .take(rounds) + .reduce(|b, c| if c.is_some() { c } else { b }) + } + #[cfg(feature = "std")] + BnbLimit::Duration(duration) => { + let start = std::time::SystemTime::now(); + bnb.into_iter(&strategy) + .take_while(|_| start.elapsed().expect("failed to get system time") <= duration) + .reduce(|b, c| if c.is_some() { c } else { b }) + } + }? +} + +#[cfg(all(test, feature = "miniscript"))] +mod test { + use bitcoin::secp256k1::Secp256k1; + + use crate::coin_select::{evaluate_cs::evaluate, ExcessStrategyKind}; + + use super::{ + coin_select_bnb, + evaluate_cs::{Evaluation, EvaluationError}, + tester::Tester, + CoinSelector, CoinSelectorOpt, Vec, WeightedValue, + }; + + fn tester() -> Tester { + const DESC_STR: &str = "tr(xprv9uBuvtdjghkz8D1qzsSXS9Vs64mqrUnXqzNccj2xcvnCHPpXKYE1U2Gbh9CDHk8UPyF2VuXpVkDA7fk5ZP4Hd9KnhUmTscKmhee9Dp5sBMK)"; + Tester::new(&Secp256k1::default(), DESC_STR) + } + + fn evaluate_bnb( + initial_selector: CoinSelector, + max_tries: usize, + ) -> Result { + evaluate(initial_selector, |cs| { + coin_select_bnb(max_tries, cs.clone()).map_or(false, |new_cs| { + *cs = new_cs; + true + }) + }) + } + + #[test] + fn not_enough_coins() { + let t = tester(); + let candidates: Vec = vec![ + t.gen_candidate(0, 100_000).into(), + t.gen_candidate(1, 100_000).into(), + ]; + let opts = t.gen_opts(200_000); + let selector = CoinSelector::new(&candidates, &opts); + assert!(!coin_select_bnb(10_000, selector).is_some()); + } + + #[test] + fn exactly_enough_coins_preselected() { + let t = tester(); + let candidates: Vec = vec![ + t.gen_candidate(0, 100_000).into(), // to preselect + t.gen_candidate(1, 100_000).into(), // to preselect + t.gen_candidate(2, 100_000).into(), + ]; + let opts = CoinSelectorOpt { + target_feerate: 0.0, + ..t.gen_opts(200_000) + }; + let selector = { + let mut selector = CoinSelector::new(&candidates, &opts); + selector.select(0); // preselect + selector.select(1); // preselect + selector + }; + + let evaluation = evaluate_bnb(selector, 10_000).expect("eval failed"); + println!("{}", evaluation); + assert_eq!(evaluation.solution.selected, (0..=1).collect()); + assert_eq!(evaluation.solution.excess_strategies.len(), 1); + assert_eq!( + evaluation.feerate_offset(ExcessStrategyKind::ToFee).floor(), + 0.0 + ); + } + + /// `cost_of_change` acts as the upper-bound in Bnb, we check whether these boundaries are + /// enforced in code + #[test] + fn cost_of_change() { + let t = tester(); + let candidates: Vec = vec![ + t.gen_candidate(0, 200_000).into(), + t.gen_candidate(1, 200_000).into(), + t.gen_candidate(2, 200_000).into(), + ]; + + // lowest and highest possible `recipient_value` opts for derived `drain_waste`, assuming + // that we want 2 candidates selected + let (lowest_opts, highest_opts) = { + let opts = t.gen_opts(0); + + let fee_from_inputs = + (candidates[0].weight as f32 * opts.target_feerate).ceil() as u64 * 2; + let fee_from_template = + ((opts.base_weight + 2) as f32 * opts.target_feerate).ceil() as u64; + + let lowest_opts = CoinSelectorOpt { + target_value: Some( + 400_000 - fee_from_inputs - fee_from_template - opts.drain_waste() as u64, + ), + ..opts + }; + + let highest_opts = CoinSelectorOpt { + target_value: Some(400_000 - fee_from_inputs - fee_from_template), + ..opts + }; + + (lowest_opts, highest_opts) + }; + + // test lowest possible target we are able to select + let lowest_eval = evaluate_bnb(CoinSelector::new(&candidates, &lowest_opts), 10_000); + assert!(lowest_eval.is_ok()); + let lowest_eval = lowest_eval.unwrap(); + println!("LB {}", lowest_eval); + assert_eq!(lowest_eval.solution.selected.len(), 2); + assert_eq!(lowest_eval.solution.excess_strategies.len(), 1); + assert_eq!( + lowest_eval + .feerate_offset(ExcessStrategyKind::ToFee) + .floor(), + 0.0 + ); + + // test highest possible target we are able to select + let highest_eval = evaluate_bnb(CoinSelector::new(&candidates, &highest_opts), 10_000); + assert!(highest_eval.is_ok()); + let highest_eval = highest_eval.unwrap(); + println!("UB {}", highest_eval); + assert_eq!(highest_eval.solution.selected.len(), 2); + assert_eq!(highest_eval.solution.excess_strategies.len(), 1); + assert_eq!( + highest_eval + .feerate_offset(ExcessStrategyKind::ToFee) + .floor(), + 0.0 + ); + + // test lower out of bounds + let loob_opts = CoinSelectorOpt { + target_value: lowest_opts.target_value.map(|v| v - 1), + ..lowest_opts + }; + let loob_eval = evaluate_bnb(CoinSelector::new(&candidates, &loob_opts), 10_000); + assert!(loob_eval.is_err()); + println!("Lower OOB: {}", loob_eval.unwrap_err()); + + // test upper out of bounds + let uoob_opts = CoinSelectorOpt { + target_value: highest_opts.target_value.map(|v| v + 1), + ..highest_opts + }; + let uoob_eval = evaluate_bnb(CoinSelector::new(&candidates, &uoob_opts), 10_000); + assert!(uoob_eval.is_err()); + println!("Upper OOB: {}", uoob_eval.unwrap_err()); + } + + #[test] + fn try_select() { + let t = tester(); + let candidates: Vec = vec![ + t.gen_candidate(0, 300_000).into(), + t.gen_candidate(1, 300_000).into(), + t.gen_candidate(2, 300_000).into(), + t.gen_candidate(3, 200_000).into(), + t.gen_candidate(4, 200_000).into(), + ]; + let make_opts = |v: u64| -> CoinSelectorOpt { + CoinSelectorOpt { + target_feerate: 0.0, + ..t.gen_opts(v) + } + }; + + let test_cases = vec![ + (make_opts(100_000), false, 0), + (make_opts(200_000), true, 1), + (make_opts(300_000), true, 1), + (make_opts(500_000), true, 2), + (make_opts(1_000_000), true, 4), + (make_opts(1_200_000), false, 0), + (make_opts(1_300_000), true, 5), + (make_opts(1_400_000), false, 0), + ]; + + for (opts, expect_solution, expect_selected) in test_cases { + let res = evaluate_bnb(CoinSelector::new(&candidates, &opts), 10_000); + assert_eq!(res.is_ok(), expect_solution); + + match res { + Ok(eval) => { + println!("{}", eval); + assert_eq!(eval.feerate_offset(ExcessStrategyKind::ToFee), 0.0); + assert_eq!(eval.solution.selected.len(), expect_selected as _); + } + Err(err) => println!("expected failure: {}", err), + } + } + } + + #[test] + fn early_bailout_optimization() { + let t = tester(); + + // target: 300_000 + // candidates: 2x of 125_000, 1000x of 100_000, 1x of 50_000 + // expected solution: 2x 125_000, 1x 50_000 + // set bnb max tries: 1100, should succeed + let candidates = { + let mut candidates: Vec = vec![ + t.gen_candidate(0, 125_000).into(), + t.gen_candidate(1, 125_000).into(), + t.gen_candidate(2, 50_000).into(), + ]; + (3..3 + 1000_u32) + .for_each(|index| candidates.push(t.gen_candidate(index, 100_000).into())); + candidates + }; + let opts = CoinSelectorOpt { + target_feerate: 0.0, + ..t.gen_opts(300_000) + }; + + let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), 1100); + assert!(result.is_ok()); + + let eval = result.unwrap(); + println!("{}", eval); + assert_eq!(eval.solution.selected, (0..=2).collect()); + } + + #[test] + fn should_exhaust_iteration() { + static MAX_TRIES: usize = 1000; + let t = tester(); + let candidates = (0..MAX_TRIES + 1) + .map(|index| t.gen_candidate(index as _, 10_000).into()) + .collect::>(); + let opts = t.gen_opts(10_001 * MAX_TRIES as u64); + let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), MAX_TRIES); + assert!(result.is_err()); + println!("error as expected: {}", result.unwrap_err()); + } + + /// Solution should have fee >= min_absolute_fee (or no solution at all) + #[test] + fn min_absolute_fee() { + let t = tester(); + let candidates = { + let mut candidates = Vec::new(); + t.gen_weighted_values(&mut candidates, 5, 10_000); + t.gen_weighted_values(&mut candidates, 5, 20_000); + t.gen_weighted_values(&mut candidates, 5, 30_000); + t.gen_weighted_values(&mut candidates, 10, 10_300); + t.gen_weighted_values(&mut candidates, 10, 10_500); + t.gen_weighted_values(&mut candidates, 10, 10_700); + t.gen_weighted_values(&mut candidates, 10, 10_900); + t.gen_weighted_values(&mut candidates, 10, 11_000); + t.gen_weighted_values(&mut candidates, 10, 12_000); + t.gen_weighted_values(&mut candidates, 10, 13_000); + candidates + }; + let mut opts = CoinSelectorOpt { + min_absolute_fee: 1, + ..t.gen_opts(100_000) + }; + + (1..=120_u64).for_each(|fee_factor| { + opts.min_absolute_fee = fee_factor * 31; + + let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), 21_000); + match result { + Ok(result) => { + println!("Solution {}", result); + let fee = result.solution.excess_strategies[&ExcessStrategyKind::ToFee].fee; + assert!(fee >= opts.min_absolute_fee); + assert_eq!(result.solution.excess_strategies.len(), 1); + } + Err(err) => { + println!("No Solution: {}", err); + } + } + }); + } + + /// For a decreasing feerate (longterm feerate is lower than effective feerate), we should + /// select less. For increasing feerate (longterm feerate is higher than effective feerate), we + /// should select more. + #[test] + fn feerate_difference() { + let t = tester(); + let candidates = { + let mut candidates = Vec::new(); + t.gen_weighted_values(&mut candidates, 10, 2_000); + t.gen_weighted_values(&mut candidates, 10, 5_000); + t.gen_weighted_values(&mut candidates, 10, 20_000); + candidates + }; + + let decreasing_feerate_opts = CoinSelectorOpt { + target_feerate: 1.25, + long_term_feerate: Some(0.25), + ..t.gen_opts(100_000) + }; + + let increasing_feerate_opts = CoinSelectorOpt { + target_feerate: 0.25, + long_term_feerate: Some(1.25), + ..t.gen_opts(100_000) + }; + + let decreasing_res = evaluate_bnb( + CoinSelector::new(&candidates, &decreasing_feerate_opts), + 21_000, + ) + .expect("no result"); + let decreasing_len = decreasing_res.solution.selected.len(); + + let increasing_res = evaluate_bnb( + CoinSelector::new(&candidates, &increasing_feerate_opts), + 21_000, + ) + .expect("no result"); + let increasing_len = increasing_res.solution.selected.len(); + + println!("decreasing_len: {}", decreasing_len); + println!("increasing_len: {}", increasing_len); + assert!(decreasing_len < increasing_len); + } + + /// TODO: UNIMPLEMENTED TESTS: + /// * Excess strategies: + /// * We should always have `ExcessStrategy::ToFee`. + /// * We should only have `ExcessStrategy::ToRecipient` when `max_extra_target > 0`. + /// * We should only have `ExcessStrategy::ToDrain` when `drain_value >= min_drain_value`. + /// * Fuzz + /// * Solution feerate should never be lower than target feerate + /// * Solution fee should never be lower than `min_absolute_fee` + /// * Preselected should always remain selected + fn _todo() {} +} diff --git a/nursery/coin_select/src/coin_selector.rs b/nursery/coin_select/src/coin_selector.rs new file mode 100644 index 000000000..f4053ae2b --- /dev/null +++ b/nursery/coin_select/src/coin_selector.rs @@ -0,0 +1,616 @@ +use super::*; + +/// A [`WeightedValue`] represents an input candidate for [`CoinSelector`]. This can either be a +/// single UTXO, or a group of UTXOs that should be spent together. +#[derive(Debug, Clone, Copy)] +pub struct WeightedValue { + /// Total value of the UTXO(s) that this [`WeightedValue`] represents. + pub value: u64, + /// Total weight of including this/these UTXO(s). + /// `txin` fields: `prevout`, `nSequence`, `scriptSigLen`, `scriptSig`, `scriptWitnessLen`, + /// `scriptWitness` should all be included. + pub weight: u32, + /// Total number of inputs; so we can calculate extra `varint` weight due to `vin` len changes. + pub input_count: usize, + /// Whether this [`WeightedValue`] contains at least one segwit spend. + pub is_segwit: bool, +} + +impl WeightedValue { + /// Create a new [`WeightedValue`] that represents a single input. + /// + /// `satisfaction_weight` is the weight of `scriptSigLen + scriptSig + scriptWitnessLen + + /// scriptWitness`. + pub fn new(value: u64, satisfaction_weight: u32, is_segwit: bool) -> WeightedValue { + let weight = TXIN_BASE_WEIGHT + satisfaction_weight; + WeightedValue { + value, + weight, + input_count: 1, + is_segwit, + } + } + + /// Effective value of this input candidate: `actual_value - input_weight * feerate (sats/wu)`. + pub fn effective_value(&self, effective_feerate: f32) -> i64 { + // We prefer undershooting the candidate's effective value (so we over estimate the fee of a + // candidate). If we overshoot the candidate's effective value, it may be possible to find a + // solution which does not meet the target feerate. + self.value as i64 - (self.weight as f32 * effective_feerate).ceil() as i64 + } +} + +#[derive(Debug, Clone, Copy)] +pub struct CoinSelectorOpt { + /// The value we need to select. + /// If the value is `None` then the selection will be complete if it can pay for the drain + /// output and satisfy the other constraints (e.g. minimum fees). + pub target_value: Option, + /// Additional leeway for the target value. + pub max_extra_target: u64, // TODO: Maybe out of scope here? + + /// The feerate we should try and achieve in sats per weight unit. + pub target_feerate: f32, + /// The feerate + pub long_term_feerate: Option, // TODO: Maybe out of scope? (waste) + /// The minimum absolute fee. I.e. needed for RBF. + pub min_absolute_fee: u64, + + /// The weight of the template transaction including fixed fields and outputs. + pub base_weight: u32, + /// Additional weight if we include the drain (change) output. + pub drain_weight: u32, + /// Weight of spending the drain (change) output in the future. + pub spend_drain_weight: u32, // TODO: Maybe out of scope? (waste) + + /// Minimum value allowed for a drain (change) output. + pub min_drain_value: u64, +} + +impl CoinSelectorOpt { + fn from_weights(base_weight: u32, drain_weight: u32, spend_drain_weight: u32) -> Self { + // 0.25 sats/wu == 1 sat/vb + let target_feerate = 0.25_f32; + + // set `min_drain_value` to dust limit + let min_drain_value = + 3 * ((drain_weight + spend_drain_weight) as f32 * target_feerate) as u64; + + Self { + target_value: None, + max_extra_target: 0, + target_feerate, + long_term_feerate: None, + min_absolute_fee: 0, + base_weight, + drain_weight, + spend_drain_weight, + min_drain_value, + } + } + + pub fn fund_outputs( + txouts: &[TxOut], + drain_output: &TxOut, + drain_satisfaction_weight: u32, + ) -> Self { + let mut tx = Transaction { + input: vec![], + version: 1, + lock_time: LockTime::ZERO.into(), + output: txouts.to_vec(), + }; + let base_weight = tx.weight(); + // this awkward calculation is necessary since TxOut doesn't have \.weight() + let drain_weight = { + tx.output.push(drain_output.clone()); + tx.weight() - base_weight + }; + Self { + target_value: if txouts.is_empty() { + None + } else { + Some(txouts.iter().map(|txout| txout.value).sum()) + }, + ..Self::from_weights( + base_weight as u32, + drain_weight as u32, + TXIN_BASE_WEIGHT + drain_satisfaction_weight, + ) + } + } + + pub fn long_term_feerate(&self) -> f32 { + self.long_term_feerate.unwrap_or(self.target_feerate) + } + + pub fn drain_waste(&self) -> i64 { + (self.drain_weight as f32 * self.target_feerate + + self.spend_drain_weight as f32 * self.long_term_feerate()) as i64 + } +} + +/// [`CoinSelector`] is responsible for selecting and deselecting from a set of canididates. +#[derive(Debug, Clone)] +pub struct CoinSelector<'a> { + pub opts: &'a CoinSelectorOpt, + pub candidates: &'a Vec, + selected: BTreeSet, +} + +impl<'a> CoinSelector<'a> { + pub fn candidate(&self, index: usize) -> &WeightedValue { + &self.candidates[index] + } + + pub fn new(candidates: &'a Vec, opts: &'a CoinSelectorOpt) -> Self { + Self { + candidates, + selected: Default::default(), + opts, + } + } + + pub fn select(&mut self, index: usize) -> bool { + assert!(index < self.candidates.len()); + self.selected.insert(index) + } + + pub fn deselect(&mut self, index: usize) -> bool { + self.selected.remove(&index) + } + + pub fn is_selected(&self, index: usize) -> bool { + self.selected.contains(&index) + } + + pub fn is_empty(&self) -> bool { + self.selected.is_empty() + } + + /// Weight sum of all selected inputs. + pub fn selected_weight(&self) -> u32 { + self.selected + .iter() + .map(|&index| self.candidates[index].weight) + .sum() + } + + /// Effective value sum of all selected inputs. + pub fn selected_effective_value(&self) -> i64 { + self.selected + .iter() + .map(|&index| self.candidates[index].effective_value(self.opts.target_feerate)) + .sum() + } + + /// Absolute value sum of all selected inputs. + pub fn selected_absolute_value(&self) -> u64 { + self.selected + .iter() + .map(|&index| self.candidates[index].value) + .sum() + } + + /// Waste sum of all selected inputs. + pub fn selected_waste(&self) -> i64 { + (self.selected_weight() as f32 * (self.opts.target_feerate - self.opts.long_term_feerate())) + as i64 + } + + /// Current weight of template tx + selected inputs. + pub fn current_weight(&self) -> u32 { + let witness_header_extra_weight = self + .selected() + .find(|(_, wv)| wv.is_segwit) + .map(|_| 2) + .unwrap_or(0); + let vin_count_varint_extra_weight = { + let input_count = self.selected().map(|(_, wv)| wv.input_count).sum::(); + (varint_size(input_count) - 1) * 4 + }; + self.opts.base_weight + + self.selected_weight() + + witness_header_extra_weight + + vin_count_varint_extra_weight + } + + /// Current excess. + pub fn current_excess(&self) -> i64 { + self.selected_effective_value() - self.effective_target() + } + + /// This is the effective target value. + pub fn effective_target(&self) -> i64 { + let (has_segwit, max_input_count) = self + .candidates + .iter() + .fold((false, 0_usize), |(is_segwit, input_count), c| { + (is_segwit || c.is_segwit, input_count + c.input_count) + }); + + let effective_base_weight = self.opts.base_weight + + if has_segwit { 2_u32 } else { 0_u32 } + + (varint_size(max_input_count) - 1) * 4; + + self.opts.target_value.unwrap_or(0) as i64 + + (effective_base_weight as f32 * self.opts.target_feerate).ceil() as i64 + } + + pub fn selected_count(&self) -> usize { + self.selected.len() + } + + pub fn selected(&self) -> impl Iterator + '_ { + self.selected + .iter() + .map(move |&index| (index, &self.candidates[index])) + } + + pub fn unselected(&self) -> impl Iterator + '_ { + self.candidates + .iter() + .enumerate() + .filter(move |(index, _)| !self.selected.contains(index)) + } + + pub fn selected_indexes(&self) -> impl Iterator + '_ { + self.selected.iter().cloned() + } + + pub fn unselected_indexes(&self) -> impl Iterator + '_ { + (0..self.candidates.len()).filter(move |index| !self.selected.contains(index)) + } + + pub fn all_selected(&self) -> bool { + self.selected.len() == self.candidates.len() + } + + pub fn select_all(&mut self) { + self.selected = (0..self.candidates.len()).collect(); + } + + pub fn select_until_finished(&mut self) -> Result { + let mut selection = self.finish(); + + if selection.is_ok() { + return selection; + } + + let unselected = self.unselected_indexes().collect::>(); + + for index in unselected { + self.select(index); + selection = self.finish(); + + if selection.is_ok() { + break; + } + } + + selection + } + + pub fn finish(&self) -> Result { + let weight_without_drain = self.current_weight(); + let weight_with_drain = weight_without_drain + self.opts.drain_weight; + + let fee_without_drain = + (weight_without_drain as f32 * self.opts.target_feerate).ceil() as u64; + let fee_with_drain = (weight_with_drain as f32 * self.opts.target_feerate).ceil() as u64; + + let inputs_minus_outputs = { + let target_value = self.opts.target_value.unwrap_or(0); + let selected = self.selected_absolute_value(); + + // find the largest unsatisfied constraint (if any), and return error of that constraint + // "selected" should always be greater than or equal to these selected values + [ + ( + SelectionConstraint::TargetValue, + target_value.saturating_sub(selected), + ), + ( + SelectionConstraint::TargetFee, + (target_value + fee_without_drain).saturating_sub(selected), + ), + ( + SelectionConstraint::MinAbsoluteFee, + (target_value + self.opts.min_absolute_fee).saturating_sub(selected), + ), + ( + SelectionConstraint::MinDrainValue, + // when we have no target value (hence no recipient txouts), we need to ensure + // the selected amount can satisfy requirements for a drain output (so we at + // least have one txout) + if self.opts.target_value.is_none() { + (fee_with_drain + self.opts.min_drain_value).saturating_sub(selected) + } else { + 0 + }, + ), + ] + .iter() + .filter(|&(_, v)| v > &0) + .max_by_key(|&(_, v)| v) + .map_or(Ok(()), |(constraint, missing)| { + Err(SelectionError { + selected, + missing: *missing, + constraint: *constraint, + }) + })?; + + selected - target_value + }; + + let fee_without_drain = fee_without_drain.max(self.opts.min_absolute_fee); + let fee_with_drain = fee_with_drain.max(self.opts.min_absolute_fee); + + let excess_without_drain = inputs_minus_outputs - fee_without_drain; + let input_waste = self.selected_waste(); + + // begin preparing excess strategies for final selection + let mut excess_strategies = HashMap::new(); + + // only allow `ToFee` and `ToRecipient` excess strategies when we have a `target_value`, + // otherwise we will result in a result with no txouts, or attempt to add value to an output + // that does not exist + if self.opts.target_value.is_some() { + // no drain, excess to fee + excess_strategies.insert( + ExcessStrategyKind::ToFee, + ExcessStrategy { + recipient_value: self.opts.target_value, + drain_value: None, + fee: fee_without_drain + excess_without_drain, + weight: weight_without_drain, + waste: input_waste + excess_without_drain as i64, + }, + ); + + // no drain, excess to recipient + // if `excess == 0`, this result will be the same as the previous, so don't consider it + // if `max_extra_target == 0`, there is no leeway for this strategy + if excess_without_drain > 0 && self.opts.max_extra_target > 0 { + let extra_recipient_value = + core::cmp::min(self.opts.max_extra_target, excess_without_drain); + let extra_fee = excess_without_drain - extra_recipient_value; + excess_strategies.insert( + ExcessStrategyKind::ToRecipient, + ExcessStrategy { + recipient_value: self.opts.target_value.map(|v| v + extra_recipient_value), + drain_value: None, + fee: fee_without_drain + extra_fee, + weight: weight_without_drain, + waste: input_waste + extra_fee as i64, + }, + ); + } + } + + // with drain + if fee_with_drain >= self.opts.min_absolute_fee + && inputs_minus_outputs >= fee_with_drain + self.opts.min_drain_value + { + excess_strategies.insert( + ExcessStrategyKind::ToDrain, + ExcessStrategy { + recipient_value: self.opts.target_value, + drain_value: Some(inputs_minus_outputs.saturating_sub(fee_with_drain)), + fee: fee_with_drain, + weight: weight_with_drain, + waste: input_waste + self.opts.drain_waste(), + }, + ); + } + + debug_assert!( + !excess_strategies.is_empty(), + "should have at least one excess strategy" + ); + + Ok(Selection { + selected: self.selected.clone(), + excess: excess_without_drain, + excess_strategies, + }) + } +} + +#[derive(Clone, Debug)] +pub struct SelectionError { + selected: u64, + missing: u64, + constraint: SelectionConstraint, +} + +impl core::fmt::Display for SelectionError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let SelectionError { + selected, + missing, + constraint, + } = self; + write!( + f, + "insufficient coins selected; selected={}, missing={}, unsatisfied_constraint={:?}", + selected, missing, constraint + ) + } +} + +#[cfg(feature = "std")] +impl std::error::Error for SelectionError {} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum SelectionConstraint { + /// The target is not met + TargetValue, + /// The target fee (given the feerate) is not met + TargetFee, + /// Min absolute fee is not met + MinAbsoluteFee, + /// Min drain value is not met + MinDrainValue, +} + +impl core::fmt::Display for SelectionConstraint { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + SelectionConstraint::TargetValue => core::write!(f, "target_value"), + SelectionConstraint::TargetFee => core::write!(f, "target_fee"), + SelectionConstraint::MinAbsoluteFee => core::write!(f, "min_absolute_fee"), + SelectionConstraint::MinDrainValue => core::write!(f, "min_drain_value"), + } + } +} + +#[derive(Clone, Debug)] +pub struct Selection { + pub selected: BTreeSet, + pub excess: u64, + pub excess_strategies: HashMap, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, core::hash::Hash)] +pub enum ExcessStrategyKind { + ToFee, + ToRecipient, + ToDrain, +} + +#[derive(Clone, Copy, Debug)] +pub struct ExcessStrategy { + pub recipient_value: Option, + pub drain_value: Option, + pub fee: u64, + pub weight: u32, + pub waste: i64, +} + +impl Selection { + pub fn apply_selection<'a, T>( + &'a self, + candidates: &'a [T], + ) -> impl Iterator + 'a { + self.selected.iter().map(move |i| &candidates[*i]) + } + + /// Returns the [`ExcessStrategy`] that results in the least waste. + pub fn best_strategy(&self) -> (&ExcessStrategyKind, &ExcessStrategy) { + self.excess_strategies + .iter() + .min_by_key(|&(_, a)| a.waste) + .expect("selection has no excess strategy") + } +} + +impl core::fmt::Display for ExcessStrategyKind { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + ExcessStrategyKind::ToFee => core::write!(f, "to_fee"), + ExcessStrategyKind::ToRecipient => core::write!(f, "to_recipient"), + ExcessStrategyKind::ToDrain => core::write!(f, "to_drain"), + } + } +} + +impl ExcessStrategy { + /// Returns feerate in sats/wu. + pub fn feerate(&self) -> f32 { + self.fee as f32 / self.weight as f32 + } +} + +#[cfg(test)] +mod test { + use crate::{ExcessStrategyKind, SelectionConstraint}; + + use super::{CoinSelector, CoinSelectorOpt, WeightedValue}; + + /// Ensure `target_value` is respected. Can't have no disrespect. + #[test] + fn target_value_respected() { + let target_value = 1000_u64; + + let candidates = (500..1500_u64) + .map(|value| WeightedValue { + value, + weight: 100, + input_count: 1, + is_segwit: false, + }) + .collect::>(); + + let opts = CoinSelectorOpt { + target_value: Some(target_value), + max_extra_target: 0, + target_feerate: 0.00, + long_term_feerate: None, + min_absolute_fee: 0, + base_weight: 10, + drain_weight: 10, + spend_drain_weight: 10, + min_drain_value: 10, + }; + + for (index, v) in candidates.iter().enumerate() { + let mut selector = CoinSelector::new(&candidates, &opts); + assert!(selector.select(index)); + + let res = selector.finish(); + if v.value < opts.target_value.unwrap_or(0) { + let err = res.expect_err("should have failed"); + assert_eq!(err.selected, v.value); + assert_eq!(err.missing, target_value - v.value); + assert_eq!(err.constraint, SelectionConstraint::MinAbsoluteFee); + } else { + let sel = res.expect("should have succeeded"); + assert_eq!(sel.excess, v.value - opts.target_value.unwrap_or(0)); + } + } + } + + #[test] + fn drain_all() { + let candidates = (0..100) + .map(|_| WeightedValue { + value: 666, + weight: 166, + input_count: 1, + is_segwit: false, + }) + .collect::>(); + + let opts = CoinSelectorOpt { + target_value: None, + max_extra_target: 0, + target_feerate: 0.25, + long_term_feerate: None, + min_absolute_fee: 0, + base_weight: 10, + drain_weight: 100, + spend_drain_weight: 66, + min_drain_value: 1000, + }; + + let selection = CoinSelector::new(&candidates, &opts) + .select_until_finished() + .expect("should succeed"); + + assert!(selection.selected.len() > 1); + assert_eq!(selection.excess_strategies.len(), 1); + + let (kind, strategy) = selection.best_strategy(); + assert_eq!(*kind, ExcessStrategyKind::ToDrain); + assert!(strategy.recipient_value.is_none()); + assert!(strategy.drain_value.is_some()); + } + + /// TODO: Tests to add: + /// * `finish` should ensure at least `target_value` is selected. + /// * actual feerate should be equal or higher than `target_feerate`. + /// * actual drain value should be equal or higher than `min_drain_value` (or else no drain). + fn _todo() {} +} diff --git a/nursery/coin_select/src/lib.rs b/nursery/coin_select/src/lib.rs new file mode 100644 index 000000000..ff4d45399 --- /dev/null +++ b/nursery/coin_select/src/lib.rs @@ -0,0 +1,33 @@ +#![no_std] + +#[cfg(feature = "std")] +extern crate std; + +#[macro_use] +extern crate alloc; +extern crate bdk_chain; + +use alloc::vec::Vec; +use bdk_chain::{ + bitcoin, + collections::{BTreeSet, HashMap}, +}; +use bitcoin::{LockTime, Transaction, TxOut}; +use core::fmt::{Debug, Display}; + +mod coin_selector; +pub use coin_selector::*; + +mod bnb; +pub use bnb::*; + +/// Txin "base" fields include `outpoint` (32+4) and `nSequence` (4). This does not include +/// `scriptSigLen` or `scriptSig`. +pub const TXIN_BASE_WEIGHT: u32 = (32 + 4 + 4) * 4; + +/// Helper to calculate varint size. `v` is the value the varint represents. +// Shamelessly copied from +// https://github.com/rust-bitcoin/rust-miniscript/blob/d5615acda1a7fdc4041a11c1736af139b8c7ebe8/src/util.rs#L8 +pub(crate) fn varint_size(v: usize) -> u32 { + bitcoin::VarInt(v as u64).len() as u32 +} diff --git a/nursery/tmp_plan/Cargo.toml b/nursery/tmp_plan/Cargo.toml new file mode 100644 index 000000000..39a603c24 --- /dev/null +++ b/nursery/tmp_plan/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "bdk_tmp_plan" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bdk_chain = { path = "../../crates/chain", version = "0.3.1", features = ["miniscript"] } + +[features] +default = ["std"] +std = [] diff --git a/nursery/tmp_plan/README.md b/nursery/tmp_plan/README.md new file mode 100644 index 000000000..70cc100dc --- /dev/null +++ b/nursery/tmp_plan/README.md @@ -0,0 +1,3 @@ +# Temporary planning module + +A temporary place to hold the planning module until https://github.com/rust-bitcoin/rust-miniscript/pull/481 is merged and released diff --git a/nursery/tmp_plan/bdk_tmp_plan/Cargo.toml b/nursery/tmp_plan/bdk_tmp_plan/Cargo.toml new file mode 100644 index 000000000..c2d615df8 --- /dev/null +++ b/nursery/tmp_plan/bdk_tmp_plan/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "bdk_tmp_plan" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bdk_chain = { path = "../../../crates/chain", version = "0.3.1", features = ["miniscript"] } + +[features] +default = ["std"] +std = [] diff --git a/nursery/tmp_plan/bdk_tmp_plan/README.md b/nursery/tmp_plan/bdk_tmp_plan/README.md new file mode 100644 index 000000000..70cc100dc --- /dev/null +++ b/nursery/tmp_plan/bdk_tmp_plan/README.md @@ -0,0 +1,3 @@ +# Temporary planning module + +A temporary place to hold the planning module until https://github.com/rust-bitcoin/rust-miniscript/pull/481 is merged and released diff --git a/nursery/tmp_plan/bdk_tmp_plan/src/lib.rs b/nursery/tmp_plan/bdk_tmp_plan/src/lib.rs new file mode 100644 index 000000000..a64d44922 --- /dev/null +++ b/nursery/tmp_plan/bdk_tmp_plan/src/lib.rs @@ -0,0 +1,436 @@ +#![allow(unused)] +#![allow(missing_docs)] +//! A spending plan or *plan* for short is a representation of a particular spending path on a +//! descriptor. This allows us to analayze a choice of spending path without producing any +//! signatures or other witness data for it. +//! +//! To make a plan you provide the descriptor with "assets" like which keys you are able to use, hash +//! pre-images you have access to, the current block height etc. +//! +//! Once you've got a plan it can tell you its expected satisfaction weight which can be useful for +//! doing coin selection. Furthermore it provides which subset of those keys and hash pre-images you +//! will actually need as well as what locktime or sequence number you need to set. +//! +//! Once you've obstained signatures, hash pre-images etc required by the plan, it can create a +//! witness/script_sig for the input. +use bdk_chain::{bitcoin, collections::*, miniscript}; +use bitcoin::{ + blockdata::{locktime::LockTime, transaction::Sequence}, + hashes::{hash160, ripemd160, sha256}, + secp256k1::Secp256k1, + util::{ + address::WitnessVersion, + bip32::{DerivationPath, Fingerprint, KeySource}, + taproot::{LeafVersion, TapBranchHash, TapLeafHash}, + }, + EcdsaSig, SchnorrSig, Script, TxIn, Witness, +}; +use miniscript::{ + descriptor::{InnerXKey, Tr}, + hash256, DefiniteDescriptorKey, Descriptor, DescriptorPublicKey, ScriptContext, ToPublicKey, +}; + +pub(crate) fn varint_len(v: usize) -> usize { + bitcoin::VarInt(v as u64).len() as usize +} + +mod plan_impls; +mod requirements; +mod template; +pub use requirements::*; +pub use template::PlanKey; +use template::TemplateItem; + +#[derive(Clone, Debug)] +enum TrSpend { + KeySpend, + LeafSpend { + script: Script, + leaf_version: LeafVersion, + }, +} + +#[derive(Clone, Debug)] +enum Target { + Legacy, + Segwitv0 { + script_code: Script, + }, + Segwitv1 { + tr: Tr, + tr_plan: TrSpend, + }, +} + +impl Target {} + +#[derive(Clone, Debug)] +/// A plan represents a particular spending path for a descriptor. +/// +/// See the module level documentation for more info. +pub struct Plan { + template: Vec>, + target: Target, + set_locktime: Option, + set_sequence: Option, +} + +impl Default for Target { + fn default() -> Self { + Target::Legacy + } +} + +#[derive(Clone, Debug, Default)] +/// Signatures and hash pre-images that can be used to complete a plan. +pub struct SatisfactionMaterial { + /// Schnorr signautres under their keys + pub schnorr_sigs: BTreeMap, + /// ECDSA signatures under their keys + pub ecdsa_sigs: BTreeMap, + /// SHA256 pre-images under their images + pub sha256_preimages: BTreeMap>, + /// hash160 pre-images under their images + pub hash160_preimages: BTreeMap>, + /// hash256 pre-images under their images + pub hash256_preimages: BTreeMap>, + /// ripemd160 pre-images under their images + pub ripemd160_preimages: BTreeMap>, +} + +impl Plan +where + Ak: Clone, +{ + /// The expected satisfaction weight for the plan if it is completed. + pub fn expected_weight(&self) -> usize { + let script_sig_size = match self.target { + Target::Legacy => unimplemented!(), // self + // .template + // .iter() + // .map(|step| { + // let size = step.expected_size(); + // size + push_opcode_size(size) + // }) + // .sum() + Target::Segwitv0 { .. } | Target::Segwitv1 { .. } => 1, + }; + let witness_elem_sizes: Option> = match &self.target { + Target::Legacy => None, + Target::Segwitv0 { .. } => Some( + self.template + .iter() + .map(|step| step.expected_size()) + .collect(), + ), + Target::Segwitv1 { tr, tr_plan } => { + let mut witness_elems = self + .template + .iter() + .map(|step| step.expected_size()) + .collect::>(); + + if let TrSpend::LeafSpend { + script, + leaf_version, + } = tr_plan + { + let control_block = tr + .spend_info() + .control_block(&(script.clone(), *leaf_version)) + .expect("must exist"); + witness_elems.push(script.len()); + witness_elems.push(control_block.size()); + } + + Some(witness_elems) + } + }; + + let witness_size: usize = match witness_elem_sizes { + Some(elems) => { + varint_len(elems.len()) + + elems + .into_iter() + .map(|elem| varint_len(elem) + elem) + .sum::() + } + None => 0, + }; + + script_sig_size * 4 + witness_size + } + + pub fn requirements(&self) -> Requirements { + match self.try_complete(&SatisfactionMaterial::default()) { + PlanState::Complete { .. } => Requirements::default(), + PlanState::Incomplete(requirements) => requirements, + } + } + + pub fn try_complete(&self, auth_data: &SatisfactionMaterial) -> PlanState { + let unsatisfied_items = self + .template + .iter() + .filter(|step| match step { + TemplateItem::Sign(key) => { + !auth_data.schnorr_sigs.contains_key(&key.descriptor_key) + } + TemplateItem::Hash160(image) => !auth_data.hash160_preimages.contains_key(image), + TemplateItem::Hash256(image) => !auth_data.hash256_preimages.contains_key(image), + TemplateItem::Sha256(image) => !auth_data.sha256_preimages.contains_key(image), + TemplateItem::Ripemd160(image) => { + !auth_data.ripemd160_preimages.contains_key(image) + } + TemplateItem::Pk { .. } | TemplateItem::One | TemplateItem::Zero => false, + }) + .collect::>(); + + if unsatisfied_items.is_empty() { + let mut witness = self + .template + .iter() + .flat_map(|step| step.to_witness_stack(&auth_data)) + .collect::>(); + match &self.target { + Target::Segwitv0 { .. } => todo!(), + Target::Legacy => todo!(), + Target::Segwitv1 { + tr_plan: TrSpend::KeySpend, + .. + } => PlanState::Complete { + final_script_sig: None, + final_script_witness: Some(Witness::from_vec(witness)), + }, + Target::Segwitv1 { + tr, + tr_plan: + TrSpend::LeafSpend { + script, + leaf_version, + }, + } => { + let spend_info = tr.spend_info(); + let control_block = spend_info + .control_block(&(script.clone(), *leaf_version)) + .expect("must exist"); + witness.push(script.clone().into_bytes()); + witness.push(control_block.serialize()); + + PlanState::Complete { + final_script_sig: None, + final_script_witness: Some(Witness::from_vec(witness)), + } + } + } + } else { + let mut requirements = Requirements::default(); + + match &self.target { + Target::Legacy => { + todo!() + } + Target::Segwitv0 { .. } => { + todo!() + } + Target::Segwitv1 { tr, tr_plan } => { + let spend_info = tr.spend_info(); + match tr_plan { + TrSpend::KeySpend => match &self.template[..] { + [TemplateItem::Sign(ref plan_key)] => { + requirements.signatures = RequiredSignatures::TapKey { + merkle_root: spend_info.merkle_root(), + plan_key: plan_key.clone(), + }; + } + _ => unreachable!("tapkey spend will always have only one sign step"), + }, + TrSpend::LeafSpend { + script, + leaf_version, + } => { + let leaf_hash = TapLeafHash::from_script(&script, *leaf_version); + requirements.signatures = RequiredSignatures::TapScript { + leaf_hash, + plan_keys: vec![], + } + } + } + } + } + + let required_signatures = match requirements.signatures { + RequiredSignatures::Legacy { .. } => todo!(), + RequiredSignatures::Segwitv0 { .. } => todo!(), + RequiredSignatures::TapKey { .. } => return PlanState::Incomplete(requirements), + RequiredSignatures::TapScript { + plan_keys: ref mut keys, + .. + } => keys, + }; + + for step in unsatisfied_items { + match step { + TemplateItem::Sign(plan_key) => { + required_signatures.push(plan_key.clone()); + } + TemplateItem::Hash160(image) => { + requirements.hash160_images.insert(image.clone()); + } + TemplateItem::Hash256(image) => { + requirements.hash256_images.insert(image.clone()); + } + TemplateItem::Sha256(image) => { + requirements.sha256_images.insert(image.clone()); + } + TemplateItem::Ripemd160(image) => { + requirements.ripemd160_images.insert(image.clone()); + } + TemplateItem::Pk { .. } | TemplateItem::One | TemplateItem::Zero => { /* no requirements */ + } + } + } + + PlanState::Incomplete(requirements) + } + } + + /// Witness version for the plan + pub fn witness_version(&self) -> Option { + match self.target { + Target::Legacy => None, + Target::Segwitv0 { .. } => Some(WitnessVersion::V0), + Target::Segwitv1 { .. } => Some(WitnessVersion::V1), + } + } + + /// The minimum required locktime height or time on the transaction using the plan. + pub fn required_locktime(&self) -> Option { + self.set_locktime.clone() + } + + /// The minimum required sequence (height or time) on the input to satisfy the plan + pub fn required_sequence(&self) -> Option { + self.set_sequence.clone() + } + + /// The minmum required transaction version required on the transaction using the plan. + pub fn min_version(&self) -> Option { + if let Some(_) = self.set_sequence { + Some(2) + } else { + Some(1) + } + } +} + +/// The returned value from [`Plan::try_complete`]. +pub enum PlanState { + /// The plan is complete + Complete { + /// The script sig that should be set on the input + final_script_sig: Option