Skip to content

Commit

Permalink
Merge branch 'develop' into feat/c32-optimizations
Browse files Browse the repository at this point in the history
  • Loading branch information
zone117x authored Apr 20, 2022
2 parents 773026e + 1e0b80b commit b1061ef
Show file tree
Hide file tree
Showing 10 changed files with 271 additions and 10 deletions.
7 changes: 5 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to the versioning scheme outlined in the [README.md](README.md).

## [2.05.0.2.0]

WARNING: Please be aware that using this node on chainstate prior to this release will cause
the node to spend up to 30 minutes migrating the data to a new schema.

## [Unreleased]

### Changed
- The MARF implementation will now defer calculating the root hash of a new trie
until the moment the trie is committed to disk. This avoids gratuitous hash
Expand All @@ -29,6 +29,9 @@ minutes when the node starts up.
an event is sent through the event dispatcher. This fixes #3015.
- Expose a node's public key and public key hash160 (i.e. what appears in
/v2/neighbors) via the /v2/info API endpoint (#3046)
- Reduced the default subsequent block attempt timeout from 180 seconds to 30
seconds, based on benchmarking the new MARF performance data during a period
of network congestion (#3098)

## [2.05.0.1.0]

Expand Down
2 changes: 1 addition & 1 deletion build-scripts/Dockerfile.linux-arm64
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ RUN apt-get update && apt-get install -y git gcc-aarch64-linux-gnu
RUN CC=aarch64-linux-gnu-gcc \
CC_aarch64_unknown_linux_gnu=aarch64-linux-gnu-gcc \
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc \
cargo build --release --workspace=./ --target aarch64-unknown-linux-gnu
cargo build --release --workspace --target aarch64-unknown-linux-gnu

RUN mkdir /out && cp -R /src/target/aarch64-unknown-linux-gnu/release/. /out

Expand Down
2 changes: 1 addition & 1 deletion build-scripts/Dockerfile.linux-armv7
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ RUN apt-get update && apt-get install -y git gcc-arm-linux-gnueabihf
RUN CC=arm-linux-gnueabihf-gcc \
CC_armv7_unknown_linux_gnueabihf=arm-linux-gnueabihf-gcc \
CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER=arm-linux-gnueabihf-gcc \
cargo build --release --workspace=./ --target armv7-unknown-linux-gnueabihf
cargo build --release --workspace --target armv7-unknown-linux-gnueabihf

RUN mkdir /out && cp -R /src/target/armv7-unknown-linux-gnueabihf/release/. /out

Expand Down
2 changes: 1 addition & 1 deletion build-scripts/Dockerfile.linux-musl-x64
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ RUN apt-get update && apt-get install -y git musl-tools
RUN CC=musl-gcc \
CC_x86_64_unknown_linux_musl=musl-gcc \
CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER=musl-gcc \
cargo build --release --workspace=./ --target x86_64-unknown-linux-musl
cargo build --release --workspace --target x86_64-unknown-linux-musl

RUN mkdir /out && cp -R /src/target/x86_64-unknown-linux-musl/release/. /out

Expand Down
2 changes: 1 addition & 1 deletion build-scripts/Dockerfile.linux-x64
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ RUN apt-get update && apt-get install -y git

RUN rustup target add x86_64-unknown-linux-gnu

RUN cargo build --release --workspace=./ --target x86_64-unknown-linux-gnu
RUN cargo build --release --workspace --target x86_64-unknown-linux-gnu

RUN mkdir /out && cp -R /src/target/x86_64-unknown-linux-gnu/release/. /out

Expand Down
2 changes: 1 addition & 1 deletion build-scripts/Dockerfile.macos-arm64
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ RUN wget -nc -O /tmp/osxcross.tar.zst "https://github.com/hirosystems/docker-osx
RUN mkdir /opt/osxcross && tar -xaf /tmp/osxcross.tar.zst -C /opt/osxcross

RUN . /opt/osxcross/env-macos-aarch64 && \
cargo build --target aarch64-apple-darwin --release --workspace=./
cargo build --target aarch64-apple-darwin --release --workspace

RUN mkdir /out && cp -R /src/target/aarch64-apple-darwin/release/. /out

Expand Down
2 changes: 1 addition & 1 deletion build-scripts/Dockerfile.macos-x64
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ RUN wget -nc -O /tmp/osxcross.tar.zst "https://github.com/hirosystems/docker-osx
RUN mkdir /opt/osxcross && tar -xaf /tmp/osxcross.tar.zst -C /opt/osxcross

RUN . /opt/osxcross/env-macos-x86_64 && \
cargo build --target x86_64-apple-darwin --release --workspace=./
cargo build --target x86_64-apple-darwin --release --workspace

RUN mkdir /out && cp -R /src/target/x86_64-apple-darwin/release/. /out

Expand Down
2 changes: 1 addition & 1 deletion build-scripts/Dockerfile.windows-x64
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ RUN apt-get update && apt-get install -y git gcc-mingw-w64-x86-64

RUN CC_x86_64_pc_windows_gnu=x86_64-w64-mingw32-gcc \
CARGO_TARGET_X86_64_PC_WINDOWS_GNU_LINKER=x86_64-w64-mingw32-gcc \
cargo build --release --workspace=./ --target x86_64-pc-windows-gnu
cargo build --release --workspace --target x86_64-pc-windows-gnu

RUN mkdir /out && cp -R /src/target/x86_64-pc-windows-gnu/release/. /out

Expand Down
2 changes: 1 addition & 1 deletion src/chainstate/stacks/db/blocks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4156,7 +4156,7 @@ impl StacksChainState {
/// not orphaned.
/// Return Ok(Some(microblocks)) if we got microblocks (even if it's an empty stream)
/// Return Ok(None) if there are no staging microblocks yet
fn find_parent_microblock_stream(
pub fn find_parent_microblock_stream(
blocks_conn: &DBConn,
staging_block: &StagingBlock,
) -> Result<Option<Vec<StacksMicroblock>>, Error> {
Expand Down
258 changes: 258 additions & 0 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ extern crate rusqlite;
#[macro_use]
extern crate stacks_common;

#[macro_use]
extern crate serde_json;

#[macro_use(o, slog_log, slog_trace, slog_debug, slog_info, slog_warn, slog_error)]
extern crate slog;

Expand All @@ -49,6 +52,7 @@ use blockstack_lib::burnchains::bitcoin::BitcoinNetworkType;
use blockstack_lib::burnchains::db::BurnchainDB;
use blockstack_lib::burnchains::Address;
use blockstack_lib::burnchains::Burnchain;
use blockstack_lib::burnchains::Txid;
use blockstack_lib::chainstate::burn::ConsensusHash;
use blockstack_lib::chainstate::stacks::db::blocks::DummyEventDispatcher;
use blockstack_lib::chainstate::stacks::db::blocks::StagingBlock;
Expand Down Expand Up @@ -223,6 +227,260 @@ fn main() {
process::exit(0);
}

if argv[1] == "get-tenure" {
if argv.len() < 4 {
eprintln!("Usage: {} get-tenure CHAIN_STATE_DIR BLOCK_HASH", argv[0]);
process::exit(1);
}

let index_block_hash = &argv[3];
let index_block_hash = StacksBlockId::from_hex(&index_block_hash).unwrap();
let chain_state_path = format!("{}/mainnet/chainstate/", &argv[2]);

let (chainstate, _) =
StacksChainState::open(true, CHAIN_ID_MAINNET, &chain_state_path, None).unwrap();

let (consensus_hash, block_hash) = chainstate
.get_block_header_hashes(&index_block_hash)
.unwrap()
.expect("FATAL: no such block");
let mut block_info =
StacksChainState::load_staging_block_info(chainstate.db(), &index_block_hash)
.unwrap()
.expect("No such block");
block_info.block_data = StacksChainState::load_block_bytes(
&chainstate.blocks_path,
&consensus_hash,
&block_hash,
)
.unwrap()
.expect("No such block");

let block =
StacksBlock::consensus_deserialize(&mut io::Cursor::new(&block_info.block_data))
.map_err(|_e| {
eprintln!("Failed to decode block");
process::exit(1);
})
.unwrap();

let microblocks =
StacksChainState::find_parent_microblock_stream(chainstate.db(), &block_info)
.unwrap()
.unwrap_or(vec![]);

let mut mblock_report = vec![];
for mblock in microblocks.iter() {
let mut tx_report = vec![];
for tx in mblock.txs.iter() {
tx_report.push(json!({
"txid": format!("{}", tx.txid()),
"fee": format!("{}", tx.get_tx_fee()),
"tx": format!("{}", to_hex(&tx.serialize_to_vec())),
}));
}
mblock_report.push(json!({
"microblock": format!("{}", mblock.block_hash()),
"txs": tx_report
}));
}

let mut block_tx_report = vec![];
for tx in block.txs.iter() {
block_tx_report.push(json!({
"txid": format!("{}", tx.txid()),
"fee": format!("{}", tx.get_tx_fee()),
"tx": format!("{}", to_hex(&tx.serialize_to_vec()))
}));
}

let report = json!({
"block": {
"block_id": format!("{}", index_block_hash),
"block_hash": format!("{}", block.block_hash()),
"height": format!("{}", block.header.total_work.work),
"txs": block_tx_report
},
"microblocks": mblock_report
});

println!("{}", &report.to_string());

process::exit(0);
}

if argv[1] == "analyze-fees" {
if argv.len() < 4 {
eprintln!("Usage: {} analyze-fees CHAIN_STATE_DIR NUM_BLOCKS", argv[0]);
process::exit(1);
}

let chain_state_path = format!("{}/mainnet/chainstate/", &argv[2]);
let sort_db_path = format!("{}/mainnet/burnchain/sortition", &argv[2]);
let (chainstate, _) =
StacksChainState::open(true, CHAIN_ID_MAINNET, &chain_state_path, None).unwrap();
let sort_db = SortitionDB::open(&sort_db_path, false)
.expect(&format!("Failed to open {}", &sort_db_path));

let num_blocks = argv[3].parse::<u64>().unwrap();

let mut block_info = chainstate
.get_stacks_chain_tip(&sort_db)
.unwrap()
.expect("FATAL: no chain tip");
block_info.block_data = StacksChainState::load_block_bytes(
&chainstate.blocks_path,
&block_info.consensus_hash,
&block_info.anchored_block_hash,
)
.unwrap()
.expect("No such block");

let mut tx_fees = HashMap::new();
let mut tx_mined_heights = HashMap::new();
let mut tx_mined_deltas: HashMap<u64, Vec<Txid>> = HashMap::new();

for _i in 0..num_blocks {
let block_hash = StacksBlockHeader::make_index_block_hash(
&block_info.consensus_hash,
&block_info.anchored_block_hash,
);
debug!("Consider block {} ({} of {})", &block_hash, _i, num_blocks);

let block =
StacksBlock::consensus_deserialize(&mut io::Cursor::new(&block_info.block_data))
.map_err(|_e| {
eprintln!("Failed to decode block {}", &block_hash);
process::exit(1);
})
.unwrap();

let microblocks =
StacksChainState::find_parent_microblock_stream(chainstate.db(), &block_info)
.unwrap()
.unwrap_or(vec![]);

let mut txids_at_height = vec![];

for mblock in microblocks.iter() {
for tx in mblock.txs.iter() {
tx_fees.insert(tx.txid(), tx.get_tx_fee());
txids_at_height.push(tx.txid());
}
}

for tx in block.txs.iter() {
if tx.get_tx_fee() > 0 {
// not a coinbase
tx_fees.insert(tx.txid(), tx.get_tx_fee());
txids_at_height.push(tx.txid());
}
}

tx_mined_heights.insert(block_info.height, txids_at_height);

// next block
block_info = match StacksChainState::load_staging_block_info(
chainstate.db(),
&StacksBlockHeader::make_index_block_hash(
&block_info.parent_consensus_hash,
&block_info.parent_anchored_block_hash,
),
)
.unwrap()
{
Some(blk) => blk,
None => {
break;
}
};
block_info.block_data = StacksChainState::load_block_bytes(
&chainstate.blocks_path,
&block_info.consensus_hash,
&block_info.anchored_block_hash,
)
.unwrap()
.expect("No such block");
}

let estimator = Box::new(UnitEstimator);
let metric = Box::new(UnitMetric);
let mempool_db =
MemPoolDB::open(true, CHAIN_ID_MAINNET, &chain_state_path, estimator, metric)
.expect("Failed to open mempool db");

let mut total_txs = 0;
for (_, txids) in tx_mined_heights.iter() {
total_txs += txids.len();
}

let mut tx_cnt = 0;
for (mined_height, txids) in tx_mined_heights.iter() {
for txid in txids.iter() {
tx_cnt += 1;
if tx_cnt % 100 == 0 {
debug!("Check tx {} of {}", tx_cnt, total_txs);
}

if let Some(txinfo) = MemPoolDB::get_tx(&mempool_db.db, txid).unwrap() {
let delta = mined_height.saturating_sub(txinfo.metadata.block_height);
if let Some(txids_at_delta) = tx_mined_deltas.get_mut(&delta) {
txids_at_delta.push(txid.clone());
} else {
tx_mined_deltas.insert(delta, vec![txid.clone()]);
}
}
}
}

let mut deltas: Vec<_> = tx_mined_deltas.keys().collect();
deltas.sort();

let mut reports = vec![];
for delta in deltas {
let mut delta_tx_fees = vec![];
let empty_txids = vec![];
let txids = tx_mined_deltas.get(&delta).unwrap_or(&empty_txids);
if txids.len() == 0 {
continue;
}
for txid in txids.iter() {
delta_tx_fees.push(*tx_fees.get(txid).unwrap_or(&0));
}
delta_tx_fees.sort();
let total_tx_fees = delta_tx_fees.iter().fold(0, |acc, x| acc + x);

let avg_tx_fee = if delta_tx_fees.len() > 0 {
total_tx_fees / (delta_tx_fees.len() as u64)
} else {
0
};
let min_tx_fee = *delta_tx_fees.iter().min().unwrap_or(&0);
let median_tx_fee = delta_tx_fees[delta_tx_fees.len() / 2];
let percent_90_tx_fee = delta_tx_fees[(delta_tx_fees.len() * 90) / 100];
let percent_95_tx_fee = delta_tx_fees[(delta_tx_fees.len() * 95) / 100];
let percent_99_tx_fee = delta_tx_fees[(delta_tx_fees.len() * 99) / 100];
let max_tx_fee = *delta_tx_fees.iter().max().unwrap_or(&0);

reports.push(json!({
"delta": format!("{}", delta),
"tx_total": format!("{}", delta_tx_fees.len()),
"tx_fees": json!({
"avg": format!("{}", avg_tx_fee),
"min": format!("{}", min_tx_fee),
"max": format!("{}", max_tx_fee),
"p50": format!("{}", median_tx_fee),
"p90": format!("{}", percent_90_tx_fee),
"p95": format!("{}", percent_95_tx_fee),
"p99": format!("{}", percent_99_tx_fee),
}),
}));
}

println!("{}", serde_json::Value::Array(reports).to_string());
process::exit(0);
}

if argv[1] == "get-block-inventory" {
if argv.len() < 3 {
eprintln!(
Expand Down

0 comments on commit b1061ef

Please sign in to comment.