diff --git a/.github/actions/docsgen/Dockerfile.docsgen b/.github/actions/docsgen/Dockerfile.docsgen index 9bed9ff462..61c95fb70a 100644 --- a/.github/actions/docsgen/Dockerfile.docsgen +++ b/.github/actions/docsgen/Dockerfile.docsgen @@ -10,8 +10,8 @@ RUN cargo build RUN mkdir /out -RUN /src/target/debug/blockstack-core docgen | jq . > /out/clarity-reference.json -RUN /src/target/debug/blockstack-core docgen_boot | jq . > /out/boot-contracts-reference.json +RUN /src/target/debug/stacks-inspect docgen | jq . > /out/clarity-reference.json +RUN /src/target/debug/stacks-inspect docgen_boot | jq . > /out/boot-contracts-reference.json FROM scratch AS export-stage COPY --from=build /out/clarity-reference.json / diff --git a/.vscode/launch.json b/.vscode/launch.json index 59cd020acf..225e311dea 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -8,11 +8,11 @@ "cargo": { "args": [ "build", - "--bin=blockstack-core", + "--bin=stacks-inspect", "--package=blockstack-core" ], "filter": { - "name": "blockstack-core", + "name": "stacks-inspect", "kind": "bin" } }, @@ -118,11 +118,11 @@ "args": [ "test", "--no-run", - "--bin=blockstack-core", + "--bin=stacks-inspect", "--package=blockstack-core" ], "filter": { - "name": "blockstack-core", + "name": "stacks-inspect", "kind": "bin" } }, diff --git a/CHANGELOG.md b/CHANGELOG.md index 88f7ac1c83..1e9a757a46 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,25 +21,27 @@ the node to spend up to 30 minutes migrating the data to a new schema. ### Changed - The MARF implementation will now defer calculating the root hash of a new trie until the moment the trie is committed to disk. This avoids gratuitous hash -calculations, and yields a performance improvement of anywhere between 10x and -200x (#3041). + calculations, and yields a performance improvement of anywhere between 10x and + 200x (#3041). - The MARF implementation will now store tries to an external file for instances where the tries are expected to exceed the SQLite page size (namely, the -Clarity database). This improves read performance by a factor of 10x to 14x -(#3059). + Clarity database). This improves read performance by a factor of 10x to 14x + (#3059). - The MARF implementation may now cache trie nodes in RAM if directed to do so by an environment variable (#3042). - Sortition processing performance has been improved by about an order of magnitude, by avoiding a slew of expensive database reads (#3045). WARNING: -applying this change to an existing chainstate directory will take a few -minutes when the node starts up. + applying this change to an existing chainstate directory will take a few + minutes when the node starts up. - Updated chains coordinator so that before a Stacks block or a burn block is processed, an event is sent through the event dispatcher. This fixes #3015. - Expose a node's public key and public key hash160 (i.e. what appears in /v2/neighbors) via the /v2/info API endpoint (#3046) - Reduced the default subsequent block attempt timeout from 180 seconds to 30 seconds, based on benchmarking the new MARF performance data during a period -of network congestion (#3098) + of network congestion (#3098) +- The `blockstack-core` binary has been renamed to `stacks-inspect`. + This binary provides CLI tools for chain and mempool inspection. ## [2.05.0.1.0] diff --git a/Cargo.toml b/Cargo.toml index e199569c84..0d75d64b0a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,7 +21,7 @@ name = "blockstack_lib" path = "src/lib.rs" [[bin]] -name = "blockstack-core" +name = "stacks-inspect" path = "src/main.rs" [[bin]] diff --git a/build-scripts/Dockerfile.linux-arm64 b/build-scripts/Dockerfile.linux-arm64 index 66a61bf5f5..7acc30f6bf 100644 --- a/build-scripts/Dockerfile.linux-arm64 +++ b/build-scripts/Dockerfile.linux-arm64 @@ -20,4 +20,4 @@ RUN CC=aarch64-linux-gnu-gcc \ RUN mkdir /out && cp -R /src/target/aarch64-unknown-linux-gnu/release/. /out FROM scratch AS export-stage -COPY --from=build /out/blockstack-core /out/blockstack-cli /out/clarity-cli /out/stacks-node / \ No newline at end of file +COPY --from=build /out/stacks-inspect /out/blockstack-cli /out/clarity-cli /out/stacks-node / \ No newline at end of file diff --git a/build-scripts/Dockerfile.linux-armv7 b/build-scripts/Dockerfile.linux-armv7 index b422761193..9fb50d18bc 100644 --- a/build-scripts/Dockerfile.linux-armv7 +++ b/build-scripts/Dockerfile.linux-armv7 @@ -20,4 +20,4 @@ RUN CC=arm-linux-gnueabihf-gcc \ RUN mkdir /out && cp -R /src/target/armv7-unknown-linux-gnueabihf/release/. /out FROM scratch AS export-stage -COPY --from=build /out/blockstack-core /out/blockstack-cli /out/clarity-cli /out/stacks-node / \ No newline at end of file +COPY --from=build /out/stacks-inspect /out/blockstack-cli /out/clarity-cli /out/stacks-node / \ No newline at end of file diff --git a/build-scripts/Dockerfile.linux-musl-x64 b/build-scripts/Dockerfile.linux-musl-x64 index 6bbbc85fa7..9c6c604341 100644 --- a/build-scripts/Dockerfile.linux-musl-x64 +++ b/build-scripts/Dockerfile.linux-musl-x64 @@ -20,4 +20,4 @@ RUN CC=musl-gcc \ RUN mkdir /out && cp -R /src/target/x86_64-unknown-linux-musl/release/. /out FROM scratch AS export-stage -COPY --from=build /out/blockstack-core /out/blockstack-cli /out/clarity-cli /out/stacks-node / \ No newline at end of file +COPY --from=build /out/stacks-inspect /out/blockstack-cli /out/clarity-cli /out/stacks-node / \ No newline at end of file diff --git a/build-scripts/Dockerfile.linux-x64 b/build-scripts/Dockerfile.linux-x64 index 65cb6dfe83..b4abb08aed 100644 --- a/build-scripts/Dockerfile.linux-x64 +++ b/build-scripts/Dockerfile.linux-x64 @@ -17,4 +17,4 @@ RUN cargo build --release --workspace --target x86_64-unknown-linux-gnu RUN mkdir /out && cp -R /src/target/x86_64-unknown-linux-gnu/release/. /out FROM scratch AS export-stage -COPY --from=build /out/blockstack-core /out/blockstack-cli /out/clarity-cli /out/stacks-node / \ No newline at end of file +COPY --from=build /out/stacks-inspect /out/blockstack-cli /out/clarity-cli /out/stacks-node / \ No newline at end of file diff --git a/build-scripts/Dockerfile.macos-arm64 b/build-scripts/Dockerfile.macos-arm64 index 3692d41ad1..56cfe684a3 100644 --- a/build-scripts/Dockerfile.macos-arm64 +++ b/build-scripts/Dockerfile.macos-arm64 @@ -21,4 +21,4 @@ RUN . /opt/osxcross/env-macos-aarch64 && \ RUN mkdir /out && cp -R /src/target/aarch64-apple-darwin/release/. /out FROM scratch AS export-stage -COPY --from=build /out/blockstack-core /out/blockstack-cli /out/clarity-cli /out/stacks-node / +COPY --from=build /out/stacks-inspect /out/blockstack-cli /out/clarity-cli /out/stacks-node / diff --git a/build-scripts/Dockerfile.macos-x64 b/build-scripts/Dockerfile.macos-x64 index 56a37d7a97..29038b6967 100644 --- a/build-scripts/Dockerfile.macos-x64 +++ b/build-scripts/Dockerfile.macos-x64 @@ -21,4 +21,4 @@ RUN . /opt/osxcross/env-macos-x86_64 && \ RUN mkdir /out && cp -R /src/target/x86_64-apple-darwin/release/. /out FROM scratch AS export-stage -COPY --from=build /out/blockstack-core /out/blockstack-cli /out/clarity-cli /out/stacks-node / \ No newline at end of file +COPY --from=build /out/stacks-inspect /out/blockstack-cli /out/clarity-cli /out/stacks-node / \ No newline at end of file diff --git a/build-scripts/Dockerfile.windows-x64 b/build-scripts/Dockerfile.windows-x64 index 82bef0cf0b..58785ccba7 100644 --- a/build-scripts/Dockerfile.windows-x64 +++ b/build-scripts/Dockerfile.windows-x64 @@ -19,4 +19,4 @@ RUN CC_x86_64_pc_windows_gnu=x86_64-w64-mingw32-gcc \ RUN mkdir /out && cp -R /src/target/x86_64-pc-windows-gnu/release/. /out FROM scratch AS export-stage -COPY --from=build /out/blockstack-core.exe /out/blockstack-cli.exe /out/clarity-cli.exe /out/stacks-node.exe / \ No newline at end of file +COPY --from=build /out/stacks-inspect.exe /out/blockstack-cli.exe /out/clarity-cli.exe /out/stacks-node.exe / \ No newline at end of file diff --git a/circle.yml b/circle.yml index 6c8a68878d..131712a04a 100644 --- a/circle.yml +++ b/circle.yml @@ -15,15 +15,15 @@ jobs: cargo build - run: command: | - ./target/debug/blockstack-core local initialize db && - ./target/debug/blockstack-core local check sample-contracts/tokens.clar db && - ./target/debug/blockstack-core local launch S1G2081040G2081040G2081040G208105NK8PE5.tokens sample-contracts/tokens.clar db && - ./target/debug/blockstack-core local check sample-contracts/names.clar db && - ./target/debug/blockstack-core local launch S1G2081040G2081040G2081040G208105NK8PE5.names sample-contracts/names.clar db && - ./target/debug/blockstack-core local execute db S1G2081040G2081040G2081040G208105NK8PE5.tokens mint! SZ2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQ9H6DPR u100000 + ./target/debug/stacks-inspect local initialize db && + ./target/debug/stacks-inspect local check sample-contracts/tokens.clar db && + ./target/debug/stacks-inspect local launch S1G2081040G2081040G2081040G208105NK8PE5.tokens sample-contracts/tokens.clar db && + ./target/debug/stacks-inspect local check sample-contracts/names.clar db && + ./target/debug/stacks-inspect local launch S1G2081040G2081040G2081040G208105NK8PE5.names sample-contracts/names.clar db && + ./target/debug/stacks-inspect local execute db S1G2081040G2081040G2081040G208105NK8PE5.tokens mint! SZ2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQ9H6DPR u100000 - run: command: | - echo "(get-balance 'SZ2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQ9H6DPR)" | ./target/debug/blockstack-core local eval S1G2081040G2081040G2081040G208105NK8PE5.tokens db + echo "(get-balance 'SZ2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQ9H6DPR)" | ./target/debug/stacks-inspect local eval S1G2081040G2081040G2081040G208105NK8PE5.tokens db unit_tests_with_cov: machine: true working_directory: ~/blockstack diff --git a/clarity/src/vm/analysis/arithmetic_checker/mod.rs b/clarity/src/vm/analysis/arithmetic_checker/mod.rs index d2bd0d957c..b898178574 100644 --- a/clarity/src/vm/analysis/arithmetic_checker/mod.rs +++ b/clarity/src/vm/analysis/arithmetic_checker/mod.rs @@ -151,7 +151,9 @@ impl<'a> ArithmeticOnlyChecker<'a> { { match native_var { ContractCaller | TxSender | TotalLiquidMicroSTX | BlockHeight | BurnBlockHeight - | Regtest | TxSponsor | Mainnet => Err(Error::VariableForbidden(native_var)), + | Regtest | TxSponsor | Mainnet | ChainId => { + Err(Error::VariableForbidden(native_var)) + } NativeNone | NativeTrue | NativeFalse => Ok(()), } } else { diff --git a/clarity/src/vm/analysis/arithmetic_checker/tests.rs b/clarity/src/vm/analysis/arithmetic_checker/tests.rs index 2c096749d9..d4a236f3f0 100644 --- a/clarity/src/vm/analysis/arithmetic_checker/tests.rs +++ b/clarity/src/vm/analysis/arithmetic_checker/tests.rs @@ -113,6 +113,7 @@ fn test_variables_fail_arithmetic_check_clarity1() { ), ("(define-private (foo) tx-sponsor?)", Ok(())), ("(define-private (foo) is-in-mainnet)", Ok(())), + ("(define-private (foo) chain-id)", Ok(())), ]; for (contract, result) in tests.iter() { @@ -170,6 +171,10 @@ fn test_variables_fail_arithmetic_check_clarity2() { "(define-private (foo) is-in-mainnet)", Err(VariableForbidden(NativeVariables::Mainnet)), ), + ( + "(define-private (foo) chain-id)", + Err(VariableForbidden(NativeVariables::ChainId)), + ), ]; for (contract, result) in tests.iter() { diff --git a/clarity/src/vm/analysis/type_checker/mod.rs b/clarity/src/vm/analysis/type_checker/mod.rs index 8355eb859a..7056ae9d36 100644 --- a/clarity/src/vm/analysis/type_checker/mod.rs +++ b/clarity/src/vm/analysis/type_checker/mod.rs @@ -360,6 +360,7 @@ fn type_reserved_variable(variable_name: &str, version: &ClarityVersion) -> Opti TotalLiquidMicroSTX => TypeSignature::UIntType, Regtest => TypeSignature::BoolType, Mainnet => TypeSignature::BoolType, + ChainId => TypeSignature::UIntType, }; Some(var_type) } else { diff --git a/clarity/src/vm/clarity.rs b/clarity/src/vm/clarity.rs index 04e167edc7..65b2fc3ba9 100644 --- a/clarity/src/vm/clarity.rs +++ b/clarity/src/vm/clarity.rs @@ -119,6 +119,7 @@ pub trait ClarityConnection { fn with_readonly_clarity_env( &mut self, mainnet: bool, + chain_id: u32, sender: PrincipalData, sponsor: Option, cost_track: LimitedCostTracker, @@ -129,8 +130,9 @@ pub trait ClarityConnection { { let epoch_id = self.get_epoch(); self.with_clarity_db_readonly_owned(|clarity_db| { - let mut vm_env = - OwnedEnvironment::new_cost_limited(mainnet, clarity_db, cost_track, epoch_id); + let mut vm_env = OwnedEnvironment::new_cost_limited( + mainnet, chain_id, clarity_db, cost_track, epoch_id, + ); let result = vm_env .execute_in_env(sender, sponsor, to_do) .map(|(result, _, _)| result); diff --git a/clarity/src/vm/contexts.rs b/clarity/src/vm/contexts.rs index dbd96ff0ff..bd3a937267 100644 --- a/clarity/src/vm/contexts.rs +++ b/clarity/src/vm/contexts.rs @@ -51,6 +51,8 @@ use crate::vm::version::ClarityVersion; use crate::vm::coverage::CoverageReporter; +use stacks_common::consts::CHAIN_ID_TESTNET; + use serde::Serialize; pub const MAX_CONTEXT_DEPTH: u16 = 256; @@ -204,6 +206,8 @@ pub struct GlobalContext<'a> { pub coverage_reporting: Option, /// This is the epoch of the the block that this transaction is executing within. epoch_id: StacksEpochId, + /// This is the chain ID of the transaction + pub chain_id: u32, } #[derive(Serialize, Deserialize, Clone)] @@ -543,7 +547,13 @@ impl<'a> OwnedEnvironment<'a> { pub fn new(database: ClarityDatabase<'a>) -> OwnedEnvironment<'a> { let epoch = StacksEpochId::Epoch2_05; OwnedEnvironment { - context: GlobalContext::new(false, database, LimitedCostTracker::new_free(), epoch), + context: GlobalContext::new( + false, + CHAIN_ID_TESTNET, + database, + LimitedCostTracker::new_free(), + epoch, + ), default_contract: ContractContext::new( QualifiedContractIdentifier::transient(), ClarityVersion::Clarity1, @@ -558,10 +568,14 @@ impl<'a> OwnedEnvironment<'a> { epoch: StacksEpochId, use_mainnet: bool, ) -> OwnedEnvironment<'a> { + use crate::vm::tests::test_only_mainnet_to_chain_id; + let cost_track = LimitedCostTracker::new_max_limit(&mut database, epoch, use_mainnet) .expect("FAIL: problem instantiating cost tracking"); + let chain_id = test_only_mainnet_to_chain_id(use_mainnet); + OwnedEnvironment { - context: GlobalContext::new(use_mainnet, database, cost_track, epoch), + context: GlobalContext::new(use_mainnet, chain_id, database, cost_track, epoch), default_contract: ContractContext::new( QualifiedContractIdentifier::transient(), ClarityVersion::Clarity1, @@ -580,12 +594,14 @@ impl<'a> OwnedEnvironment<'a> { pub fn new_free( mainnet: bool, + chain_id: u32, database: ClarityDatabase<'a>, epoch_id: StacksEpochId, ) -> OwnedEnvironment<'a> { OwnedEnvironment { context: GlobalContext::new( mainnet, + chain_id, database, LimitedCostTracker::new_free(), epoch_id, @@ -600,12 +616,13 @@ impl<'a> OwnedEnvironment<'a> { pub fn new_cost_limited( mainnet: bool, + chain_id: u32, database: ClarityDatabase<'a>, cost_tracker: LimitedCostTracker, epoch_id: StacksEpochId, ) -> OwnedEnvironment<'a> { OwnedEnvironment { - context: GlobalContext::new(mainnet, database, cost_tracker, epoch_id), + context: GlobalContext::new(mainnet, chain_id, database, cost_tracker, epoch_id), default_contract: ContractContext::new( QualifiedContractIdentifier::transient(), ClarityVersion::Clarity1, @@ -1426,6 +1443,7 @@ impl<'a> GlobalContext<'a> { // Instantiate a new Global Context pub fn new( mainnet: bool, + chain_id: u32, database: ClarityDatabase, cost_track: LimitedCostTracker, epoch_id: StacksEpochId, @@ -1438,6 +1456,7 @@ impl<'a> GlobalContext<'a> { event_batches: Vec::new(), mainnet, epoch_id, + chain_id, coverage_reporting: None, } } diff --git a/clarity/src/vm/costs/mod.rs b/clarity/src/vm/costs/mod.rs index eb3e5cee6e..52972023e4 100644 --- a/clarity/src/vm/costs/mod.rs +++ b/clarity/src/vm/costs/mod.rs @@ -241,6 +241,7 @@ pub struct TrackerData { /// evaluated, so no epoch identifier is necessary. epoch: StacksEpochId, mainnet: bool, + chain_id: u32, } #[derive(Clone)] @@ -623,6 +624,7 @@ fn load_cost_functions( impl LimitedCostTracker { pub fn new( mainnet: bool, + chain_id: u32, limit: ExecutionCost, clarity_db: &mut ClarityDatabase, epoch: StacksEpochId, @@ -637,6 +639,7 @@ impl LimitedCostTracker { memory: 0, epoch, mainnet, + chain_id, }; assert!(clarity_db.is_stack_empty()); cost_tracker.load_costs(clarity_db, true)?; @@ -645,6 +648,7 @@ impl LimitedCostTracker { pub fn new_mid_block( mainnet: bool, + chain_id: u32, limit: ExecutionCost, clarity_db: &mut ClarityDatabase, epoch: StacksEpochId, @@ -659,6 +663,7 @@ impl LimitedCostTracker { memory: 0, epoch, mainnet, + chain_id, }; cost_tracker.load_costs(clarity_db, false)?; Ok(Self::Limited(cost_tracker)) @@ -670,8 +675,16 @@ impl LimitedCostTracker { epoch: StacksEpochId, use_mainnet: bool, ) -> Result { + use crate::vm::tests::test_only_mainnet_to_chain_id; + let chain_id = test_only_mainnet_to_chain_id(use_mainnet); assert!(clarity_db.is_stack_empty()); - LimitedCostTracker::new(use_mainnet, ExecutionCost::max_value(), clarity_db, epoch) + LimitedCostTracker::new( + use_mainnet, + chain_id, + ExecutionCost::max_value(), + clarity_db, + epoch, + ) } pub fn new_free() -> LimitedCostTracker { @@ -840,10 +853,16 @@ fn compute_cost( eval_in_epoch: StacksEpochId, ) -> Result { let mainnet = cost_tracker.mainnet; + let chain_id = cost_tracker.chain_id; let mut null_store = NullBackingStore::new(); let conn = null_store.as_clarity_db(); - let mut global_context = - GlobalContext::new(mainnet, conn, LimitedCostTracker::new_free(), eval_in_epoch); + let mut global_context = GlobalContext::new( + mainnet, + chain_id, + conn, + LimitedCostTracker::new_free(), + eval_in_epoch, + ); let cost_contract = cost_tracker .cost_contracts diff --git a/clarity/src/vm/docs/contracts.rs b/clarity/src/vm/docs/contracts.rs index 0dda93b89b..7e3eb0534f 100644 --- a/clarity/src/vm/docs/contracts.rs +++ b/clarity/src/vm/docs/contracts.rs @@ -13,6 +13,8 @@ use crate::vm::types::QualifiedContractIdentifier; use crate::vm::version::ClarityVersion; use crate::vm::{self, ContractContext}; +use stacks_common::consts::CHAIN_ID_TESTNET; + const DOCS_GENERATION_EPOCH: StacksEpochId = StacksEpochId::Epoch2_05; #[derive(Serialize)] @@ -72,6 +74,7 @@ fn doc_execute(program: &str) -> Result, vm::Error> { let conn = marf.as_clarity_db(); let mut global_context = GlobalContext::new( false, + CHAIN_ID_TESTNET, conn, LimitedCostTracker::new_free(), DOCS_GENERATION_EPOCH, diff --git a/clarity/src/vm/docs/mod.rs b/clarity/src/vm/docs/mod.rs index 42dc502803..ceef24269d 100644 --- a/clarity/src/vm/docs/mod.rs +++ b/clarity/src/vm/docs/mod.rs @@ -133,6 +133,13 @@ const MAINNET_KEYWORD: KeywordAPI = KeywordAPI { example: "(print is-in-mainnet) ;; Will print 'true' if the code is running on the mainnet", }; +const CHAINID_KEYWORD: KeywordAPI = KeywordAPI { + name: "chain-id", + output_type: "uint", + description: "Returns the 32-bit chain ID of the blockchain running this transaction", + example: "(print chain-id) ;; Will print 'u1' if the code is running on mainnet, and 'u2147483648' on testnet, and other values on different chains." +}; + const NONE_KEYWORD: KeywordAPI = KeywordAPI { name: "none", output_type: "(optional ?)", @@ -1975,6 +1982,7 @@ fn make_keyword_reference(variable: &NativeVariables) -> Option { NativeVariables::TotalLiquidMicroSTX => Some(TOTAL_LIQUID_USTX_KEYWORD.clone()), NativeVariables::Regtest => Some(REGTEST_KEYWORD.clone()), NativeVariables::Mainnet => Some(MAINNET_KEYWORD.clone()), + NativeVariables::ChainId => Some(CHAINID_KEYWORD.clone()), NativeVariables::TxSponsor => Some(TX_SPONSOR_KEYWORD.clone()), } } @@ -2075,6 +2083,7 @@ mod test { }; use crate::vm::costs::ExecutionCost; + use stacks_common::consts::CHAIN_ID_TESTNET; struct DocHeadersDB {} const DOC_HEADER_DB: DocHeadersDB = DocHeadersDB {}; @@ -2221,6 +2230,7 @@ mod test { ContractContext::new(contract_id.clone(), ClarityVersion::latest()); let mut global_context = GlobalContext::new( false, + CHAIN_ID_TESTNET, conn, LimitedCostTracker::new_free(), StacksEpochId::Epoch2_05, diff --git a/clarity/src/vm/mod.rs b/clarity/src/vm/mod.rs index 178a67e144..6ea313d01e 100644 --- a/clarity/src/vm/mod.rs +++ b/clarity/src/vm/mod.rs @@ -423,13 +423,20 @@ pub fn execute_with_parameters( use_mainnet: bool, ) -> Result> { use crate::vm::database::MemoryBackingStore; + use crate::vm::tests::test_only_mainnet_to_chain_id; let contract_id = QualifiedContractIdentifier::transient(); let mut contract_context = ContractContext::new(contract_id.clone(), clarity_version); let mut marf = MemoryBackingStore::new(); let conn = marf.as_clarity_db(); - let mut global_context = - GlobalContext::new(use_mainnet, conn, LimitedCostTracker::new_free(), epoch); + let chain_id = test_only_mainnet_to_chain_id(use_mainnet); + let mut global_context = GlobalContext::new( + use_mainnet, + chain_id, + conn, + LimitedCostTracker::new_free(), + epoch, + ); global_context.execute(|g| { let parsed = ast::build_ast(&contract_id, program, &mut (), clarity_version)?.expressions; eval_all(&parsed, &mut contract_context, g, None) @@ -482,6 +489,8 @@ mod test { use super::ClarityVersion; + use stacks_common::consts::CHAIN_ID_TESTNET; + #[test] fn test_simple_user_function() { // @@ -519,6 +528,7 @@ mod test { let mut marf = MemoryBackingStore::new(); let mut global_context = GlobalContext::new( false, + CHAIN_ID_TESTNET, marf.as_clarity_db(), LimitedCostTracker::new_free(), StacksEpochId::Epoch2_05, diff --git a/clarity/src/vm/tests/mod.rs b/clarity/src/vm/tests/mod.rs index 5e9f03b600..df9c08e28e 100644 --- a/clarity/src/vm/tests/mod.rs +++ b/clarity/src/vm/tests/mod.rs @@ -41,6 +41,8 @@ use crate::vm::{ database::{BurnStateDB, HeadersDB}, }; +use stacks_common::consts::{CHAIN_ID_MAINNET, CHAIN_ID_TESTNET}; + use super::events::StacksTransactionEvent; pub use super::test_util::*; @@ -66,3 +68,23 @@ where f(&mut owned_env) } + +/// Determine whether or not to use the testnet or mainnet chain ID, given whether or not the +/// caller expects to use mainnet or testnet. +/// +/// WARNING TO THE READER: This is *test-only* code. The existence of this method does *not* +/// imply that there is a canonical, supported way to convert a `bool` into a chain ID. The fact +/// that Stacks has a separate chain ID for its testnet (0x80000000) is an accident. In general, a +/// Stacks blockchain instance only needs _one_ chain ID, and can use the mainnet/testnet field in +/// its transactions to determine whether or not a transaction should be mined in a given chain. +/// Going forward, you should *never* use a different chain ID for your testnet. +/// +/// So, do *not* refactor this code to use this conversion in production. +pub fn test_only_mainnet_to_chain_id(mainnet: bool) -> u32 { + // seriously -- don't even think about it. + if mainnet { + CHAIN_ID_MAINNET + } else { + CHAIN_ID_TESTNET + } +} diff --git a/clarity/src/vm/tests/simple_apply_eval.rs b/clarity/src/vm/tests/simple_apply_eval.rs index 8539c31fb6..3ecab6903a 100644 --- a/clarity/src/vm/tests/simple_apply_eval.rs +++ b/clarity/src/vm/tests/simple_apply_eval.rs @@ -39,6 +39,7 @@ use stacks_common::address::c32; use stacks_common::address::AddressHashMode; use stacks_common::address::C32_ADDRESS_VERSION_MAINNET_SINGLESIG; use stacks_common::address::C32_ADDRESS_VERSION_TESTNET_SINGLESIG; +use stacks_common::consts::{CHAIN_ID_MAINNET, CHAIN_ID_TESTNET}; use stacks_common::types::chainstate::StacksAddress; use stacks_common::types::chainstate::StacksPrivateKey; use stacks_common::types::chainstate::StacksPublicKey; @@ -491,6 +492,7 @@ fn test_simple_if_functions(#[case] version: ClarityVersion) { let mut marf = MemoryBackingStore::new(); let mut global_context = GlobalContext::new( false, + CHAIN_ID_TESTNET, marf.as_clarity_db(), LimitedCostTracker::new_free(), StacksEpochId::Epoch20, @@ -1424,3 +1426,46 @@ fn test_is_mainnet() { ) }); } + +#[test] +fn test_chain_id() { + let tests = ["chain-id"]; + + let mainnet_expectations = [Value::UInt(CHAIN_ID_MAINNET.into())]; + + tests + .iter() + .zip(mainnet_expectations.iter()) + .for_each(|(program, expectation)| { + assert_eq!( + expectation.clone(), + execute_with_parameters( + program, + ClarityVersion::Clarity2, + StacksEpochId::Epoch21, + true + ) + .unwrap() + .unwrap() + ) + }); + + let testnet_expectations = [Value::UInt(CHAIN_ID_TESTNET.into())]; + + tests + .iter() + .zip(testnet_expectations.iter()) + .for_each(|(program, expectation)| { + assert_eq!( + expectation.clone(), + execute_with_parameters( + program, + ClarityVersion::Clarity2, + StacksEpochId::Epoch21, + false + ) + .unwrap() + .unwrap() + ) + }); +} diff --git a/clarity/src/vm/variables.rs b/clarity/src/vm/variables.rs index a267663c9e..e88d963e3e 100644 --- a/clarity/src/vm/variables.rs +++ b/clarity/src/vm/variables.rs @@ -36,6 +36,7 @@ define_versioned_named_enum!(NativeVariables(ClarityVersion) { Regtest("is-in-regtest", ClarityVersion::Clarity1), TxSponsor("tx-sponsor?", ClarityVersion::Clarity2), Mainnet("is-in-mainnet", ClarityVersion::Clarity2), + ChainId("chain-id", ClarityVersion::Clarity2), }); impl NativeVariables { @@ -115,6 +116,10 @@ pub fn lookup_reserved_variable( let mainnet = env.global_context.mainnet; Ok(Some(Value::Bool(mainnet))) } + NativeVariables::ChainId => { + let chain_id = env.global_context.chain_id; + Ok(Some(Value::UInt(chain_id.into()))) + } } } else { Ok(None) diff --git a/src/chainstate/burn/db/sortdb.rs b/src/chainstate/burn/db/sortdb.rs index a53d9b8da8..7407bfa2eb 100644 --- a/src/chainstate/burn/db/sortdb.rs +++ b/src/chainstate/burn/db/sortdb.rs @@ -66,6 +66,7 @@ use crate::net::neighbors::MAX_NEIGHBOR_BLOCK_DELAY; use crate::net::{Error as NetError, Error}; use crate::util_lib::db::tx_begin_immediate; use crate::util_lib::db::tx_busy_handler; +use crate::util_lib::db::DBTx; use crate::util_lib::db::Error as db_error; use crate::util_lib::db::{ db_mkdirs, query_count, query_row, query_row_columns, query_row_panic, query_rows, sql_pragma, @@ -2510,7 +2511,7 @@ impl SortitionDB { Ok(version) } - fn apply_schema_2(tx: &SortitionDBTx, epochs: &[StacksEpoch]) -> Result<(), db_error> { + fn apply_schema_2(tx: &DBTx, epochs: &[StacksEpoch]) -> Result<(), db_error> { for sql_exec in SORTITION_DB_SCHEMA_2 { tx.execute_batch(sql_exec)?; } @@ -2525,7 +2526,7 @@ impl SortitionDB { Ok(()) } - fn apply_schema_3(tx: &SortitionDBTx) -> Result<(), db_error> { + fn apply_schema_3(tx: &DBTx) -> Result<(), db_error> { for sql_exec in SORTITION_DB_SCHEMA_3 { tx.execute_batch(sql_exec)?; } @@ -2543,10 +2544,8 @@ impl SortitionDB { if version == expected_version { Ok(()) } else { - Err(db_error::Other(format!( - "The version of the sortition DB {} does not match the expected {} and cannot be updated from SortitionDB::open()", - version, expected_version - ))) + let version_u64 = version.parse::().unwrap(); + Err(db_error::OldSchema(version_u64)) } } Ok(None) => panic!("The schema version of the sortition DB is not recorded."), @@ -2554,19 +2553,23 @@ impl SortitionDB { } } - fn check_schema_version_and_update(&mut self, epochs: &[StacksEpoch]) -> Result<(), db_error> { + /// Migrate the sortition DB to its latest version, given the set of system epochs + pub fn check_schema_version_and_update( + &mut self, + epochs: &[StacksEpoch], + ) -> Result<(), db_error> { let expected_version = SORTITION_DB_VERSION.to_string(); loop { match SortitionDB::get_schema_version(self.conn()) { Ok(Some(version)) => { if version == "1" { let tx = self.tx_begin()?; - SortitionDB::apply_schema_2(&tx, epochs)?; + SortitionDB::apply_schema_2(&tx.deref(), epochs)?; tx.commit()?; } else if version == "2" { // add the tables of schema 3, but do not populate them. let tx = self.tx_begin()?; - SortitionDB::apply_schema_3(&tx)?; + SortitionDB::apply_schema_3(&tx.deref())?; tx.commit()?; } else if version == expected_version { return Ok(()); @@ -2580,6 +2583,29 @@ impl SortitionDB { } } + /// Open and migrate the sortition DB if it exists. + pub fn migrate_if_exists(path: &str, epochs: &[StacksEpoch]) -> Result<(), db_error> { + // NOTE: the sortition DB created here will not be used for anything, so it's safe to use + // the mainnet_default PoX constants + if let Err(db_error::OldSchema(_)) = + SortitionDB::open(path, false, PoxConstants::mainnet_default()) + { + let index_path = db_mkdirs(path)?; + let marf = SortitionDB::open_index(&index_path)?; + let mut db = SortitionDB { + marf, + readwrite: true, + // not used by migration logic + first_block_height: 0, + first_burn_header_hash: BurnchainHeaderHash([0xff; 32]), + pox_constants: PoxConstants::mainnet_default(), + }; + db.check_schema_version_and_update(epochs) + } else { + Ok(()) + } + } + fn add_indexes(&mut self) -> Result<(), db_error> { // do we need to instantiate indexes? // only do a transaction if we need to, since this gets called each time the sortition DB diff --git a/src/chainstate/coordinator/mod.rs b/src/chainstate/coordinator/mod.rs index 57c8305ff0..dc375f8bb4 100644 --- a/src/chainstate/coordinator/mod.rs +++ b/src/chainstate/coordinator/mod.rs @@ -59,6 +59,8 @@ use crate::types::chainstate::{ }; use clarity::vm::database::BurnStateDB; +use crate::chainstate::stacks::index::marf::MARFOpenOpts; + pub use self::comm::CoordinatorCommunication; pub mod comm; @@ -945,3 +947,35 @@ pub fn check_chainstate_db_versions( Ok(true) } + +/// Migrate all databases to their latest schemas. +/// Verifies that this is possible as well +pub fn migrate_chainstate_dbs( + epochs: &[StacksEpoch], + sortdb_path: &str, + chainstate_path: &str, + chainstate_marf_opts: Option, +) -> Result<(), Error> { + if !check_chainstate_db_versions(epochs, sortdb_path, chainstate_path)? { + warn!("Unable to migrate chainstate DBs to the latest schemas in the current epoch"); + return Err(DBError::TooOldForEpoch.into()); + } + + if fs::metadata(&sortdb_path).is_ok() { + info!("Migrating sortition DB to the latest schema version"); + SortitionDB::migrate_if_exists(&sortdb_path, epochs)?; + } + if fs::metadata(&chainstate_path).is_ok() { + info!("Migrating chainstate DB to the latest schema version"); + let db_config = StacksChainState::get_db_config_from_path(&chainstate_path)?; + + // this does the migration internally + let _ = StacksChainState::open( + db_config.mainnet, + db_config.chain_id, + chainstate_path, + chainstate_marf_opts, + )?; + } + Ok(()) +} diff --git a/src/chainstate/coordinator/tests.rs b/src/chainstate/coordinator/tests.rs index 68c4b0e8e6..62ddbce030 100644 --- a/src/chainstate/coordinator/tests.rs +++ b/src/chainstate/coordinator/tests.rs @@ -48,6 +48,7 @@ use clarity::vm::{ Value, }; use stacks_common::address; +use stacks_common::consts::CHAIN_ID_TESTNET; use stacks_common::util::hash::{to_hex, Hash160}; use stacks_common::util::vrf::*; @@ -1044,6 +1045,7 @@ fn missed_block_commits() { |conn| conn .with_readonly_clarity_env( false, + CHAIN_ID_TESTNET, PrincipalData::parse("SP3Q4A5WWZ80REGBN0ZXNE540ECJ9JZ4A765Q5K2Q").unwrap(), None, LimitedCostTracker::new_free(), @@ -1213,6 +1215,7 @@ fn test_simple_setup() { |conn| conn .with_readonly_clarity_env( false, + CHAIN_ID_TESTNET, PrincipalData::parse("SP3Q4A5WWZ80REGBN0ZXNE540ECJ9JZ4A765Q5K2Q").unwrap(), None, LimitedCostTracker::new_free(), @@ -1519,6 +1522,7 @@ fn test_sortition_with_reward_set() { |conn| conn .with_readonly_clarity_env( false, + CHAIN_ID_TESTNET, PrincipalData::parse("SP3Q4A5WWZ80REGBN0ZXNE540ECJ9JZ4A765Q5K2Q").unwrap(), None, LimitedCostTracker::new_free(), @@ -1760,6 +1764,7 @@ fn test_sortition_with_burner_reward_set() { |conn| conn .with_readonly_clarity_env( false, + CHAIN_ID_TESTNET, PrincipalData::parse("SP3Q4A5WWZ80REGBN0ZXNE540ECJ9JZ4A765Q5K2Q").unwrap(), None, LimitedCostTracker::new_free(), @@ -2034,6 +2039,7 @@ fn test_pox_btc_ops() { |conn| conn .with_readonly_clarity_env( false, + CHAIN_ID_TESTNET, PrincipalData::parse("SP3Q4A5WWZ80REGBN0ZXNE540ECJ9JZ4A765Q5K2Q").unwrap(), None, LimitedCostTracker::new_free(), @@ -2347,6 +2353,7 @@ fn test_stx_transfer_btc_ops() { |conn| conn .with_readonly_clarity_env( false, + CHAIN_ID_TESTNET, PrincipalData::parse("SP3Q4A5WWZ80REGBN0ZXNE540ECJ9JZ4A765Q5K2Q").unwrap(), None, LimitedCostTracker::new_free(), @@ -2584,6 +2591,7 @@ fn test_initial_coinbase_reward_distributions() { |conn| conn .with_readonly_clarity_env( false, + CHAIN_ID_TESTNET, PrincipalData::parse("SP3Q4A5WWZ80REGBN0ZXNE540ECJ9JZ4A765Q5K2Q").unwrap(), None, LimitedCostTracker::new_free(), @@ -3063,6 +3071,7 @@ fn test_sortition_with_sunset() { |conn| conn .with_readonly_clarity_env( false, + CHAIN_ID_TESTNET, PrincipalData::parse("SP3Q4A5WWZ80REGBN0ZXNE540ECJ9JZ4A765Q5K2Q").unwrap(), None, LimitedCostTracker::new_free(), @@ -3909,6 +3918,7 @@ fn eval_at_chain_tip(chainstate_path: &str, sort_db: &SortitionDB, eval: &str) - |conn| { conn.with_readonly_clarity_env( false, + CHAIN_ID_TESTNET, PrincipalData::parse("SP3Q4A5WWZ80REGBN0ZXNE540ECJ9JZ4A765Q5K2Q").unwrap(), None, LimitedCostTracker::new_free(), diff --git a/src/chainstate/stacks/boot/mod.rs b/src/chainstate/stacks/boot/mod.rs index a34579ad0c..8168a83e91 100644 --- a/src/chainstate/stacks/boot/mod.rs +++ b/src/chainstate/stacks/boot/mod.rs @@ -205,19 +205,27 @@ impl StacksChainState { ) -> Result { let function = "get-total-ustx-stacked"; let mainnet = self.mainnet; + let chain_id = self.chain_id; let contract_identifier = boot::boot_code_id(pox_contract, mainnet); let cost_track = LimitedCostTracker::new_free(); let sender = PrincipalData::Standard(StandardPrincipalData::transient()); let result = self .maybe_read_only_clarity_tx(&sortdb.index_conn(), tip, |clarity_tx| { - clarity_tx.with_readonly_clarity_env(mainnet, sender, None, cost_track, |env| { - env.execute_contract( - &contract_identifier, - function, - &vec![SymbolicExpression::atom_value(Value::UInt(reward_cycle))], - true, - ) - }) + clarity_tx.with_readonly_clarity_env( + mainnet, + chain_id, + sender, + None, + cost_track, + |env| { + env.execute_contract( + &contract_identifier, + function, + &vec![SymbolicExpression::atom_value(Value::UInt(reward_cycle))], + true, + ) + }, + ) })? .ok_or_else(|| Error::NoSuchBlockError)?? .expect_u128(); diff --git a/src/chainstate/stacks/db/mod.rs b/src/chainstate/stacks/db/mod.rs index dd7c950f96..442323d030 100644 --- a/src/chainstate/stacks/db/mod.rs +++ b/src/chainstate/stacks/db/mod.rs @@ -1629,7 +1629,7 @@ impl StacksChainState { ) .map_err(|e| Error::ClarityError(e.into()))?; - let clarity_state = ClarityInstance::new(mainnet, vm_state); + let clarity_state = ClarityInstance::new(mainnet, chain_id, vm_state); let mut chainstate = StacksChainState { mainnet: mainnet, diff --git a/src/chainstate/stacks/db/unconfirmed.rs b/src/chainstate/stacks/db/unconfirmed.rs index c96b0a8cd7..44f7411c31 100644 --- a/src/chainstate/stacks/db/unconfirmed.rs +++ b/src/chainstate/stacks/db/unconfirmed.rs @@ -99,7 +99,7 @@ impl UnconfirmedState { chainstate.marf_opts.clone(), )?; - let clarity_instance = ClarityInstance::new(chainstate.mainnet, marf); + let clarity_instance = ClarityInstance::new(chainstate.mainnet, chainstate.chain_id, marf); let unconfirmed_tip = MARF::make_unconfirmed_chain_tip(&tip); let cost_so_far = StacksChainState::get_stacks_block_anchored_cost(chainstate.db(), &tip)? .ok_or(Error::NoSuchBlockError)?; @@ -136,7 +136,7 @@ impl UnconfirmedState { chainstate.marf_opts.clone(), )?; - let clarity_instance = ClarityInstance::new(chainstate.mainnet, marf); + let clarity_instance = ClarityInstance::new(chainstate.mainnet, chainstate.chain_id, marf); let unconfirmed_tip = MARF::make_unconfirmed_chain_tip(&tip); let cost_so_far = StacksChainState::get_stacks_block_anchored_cost(chainstate.db(), &tip)? .ok_or(Error::NoSuchBlockError)?; diff --git a/src/clarity_cli.rs b/src/clarity_cli.rs index bcc48f63d2..48f84bfabb 100644 --- a/src/clarity_cli.rs +++ b/src/clarity_cli.rs @@ -82,6 +82,8 @@ use crate::clarity_vm::database::marf::MarfedKV; use crate::clarity_vm::database::marf::WritableMarfStore; use crate::clarity_vm::database::MemoryBackingStore; use crate::core::StacksEpochId; +use stacks_common::consts::CHAIN_ID_MAINNET; +use stacks_common::consts::CHAIN_ID_TESTNET; use stacks_common::types::chainstate::BlockHeaderHash; use stacks_common::types::chainstate::BurnchainHeaderHash; use stacks_common::types::chainstate::StacksAddress; @@ -224,6 +226,7 @@ fn run_analysis( let clarity_version = ClarityVersion::default_for_epoch(DEFAULT_CLI_EPOCH); let cost_track = LimitedCostTracker::new( mainnet, + default_chain_id(mainnet), if mainnet { BLOCK_LIMIT_MAINNET_20.clone() } else { @@ -387,6 +390,15 @@ where result } +fn default_chain_id(mainnet: bool) -> u32 { + let chain_id = if mainnet { + CHAIN_ID_MAINNET + } else { + CHAIN_ID_TESTNET + }; + chain_id +} + fn with_env_costs( mainnet: bool, header_db: &CLIHeadersDB, @@ -399,6 +411,7 @@ where let mut db = marf.as_clarity_db(header_db, &NULL_BURN_STATE_DB); let cost_track = LimitedCostTracker::new( mainnet, + default_chain_id(mainnet), if mainnet { BLOCK_LIMIT_MAINNET_20.clone() } else { @@ -408,7 +421,13 @@ where DEFAULT_CLI_EPOCH, ) .unwrap(); - let mut vm_env = OwnedEnvironment::new_cost_limited(mainnet, db, cost_track, DEFAULT_CLI_EPOCH); + let mut vm_env = OwnedEnvironment::new_cost_limited( + mainnet, + default_chain_id(mainnet), + db, + cost_track, + DEFAULT_CLI_EPOCH, + ); let result = f(&mut vm_env); let cost = vm_env.get_cost_total(); (result, cost) @@ -423,6 +442,7 @@ pub fn vm_execute(program: &str, clarity_version: ClarityVersion) -> Result(header_db: &CLIHeadersDB, marf: &mut C) match analysis_result { Ok(_) => { let db = marf.get_clarity_db(header_db, &NULL_BURN_STATE_DB); - let mut vm_env = OwnedEnvironment::new_free(mainnet, db, DEFAULT_CLI_EPOCH); + let mut vm_env = OwnedEnvironment::new_free( + mainnet, + default_chain_id(mainnet), + db, + DEFAULT_CLI_EPOCH, + ); vm_env .initialize_contract(contract_identifier, &contract_content, None) .unwrap(); @@ -784,7 +809,8 @@ fn install_boot_code(header_db: &CLIHeadersDB, marf: &mut C) ]; let db = marf.get_clarity_db(header_db, &NULL_BURN_STATE_DB); - let mut vm_env = OwnedEnvironment::new_free(mainnet, db, DEFAULT_CLI_EPOCH); + let mut vm_env = + OwnedEnvironment::new_free(mainnet, default_chain_id(mainnet), db, DEFAULT_CLI_EPOCH); vm_env .execute_transaction( sender, @@ -1079,8 +1105,12 @@ pub fn invoke_command(invoked_by: &str, args: &[String]) -> (i32, Option (i32, Option { burn_state_db: &'b dyn BurnStateDB, cost_track: Option, mainnet: bool, + chain_id: u32, epoch: StacksEpochId, } @@ -148,6 +152,7 @@ pub struct ClarityTransactionConnection<'a, 'b> { burn_state_db: &'a dyn BurnStateDB, cost_track: &'a mut Option, mainnet: bool, + chain_id: u32, epoch: StacksEpochId, } @@ -199,6 +204,7 @@ impl<'a, 'b> ClarityBlockConnection<'a, 'b> { burn_state_db, cost_track: Some(LimitedCostTracker::new_free()), mainnet: false, + chain_id: CHAIN_ID_TESTNET, epoch: epoch, } } @@ -238,8 +244,12 @@ impl<'a, 'b> ClarityBlockConnection<'a, 'b> { } impl ClarityInstance { - pub fn new(mainnet: bool, datastore: MarfedKV) -> ClarityInstance { - ClarityInstance { datastore, mainnet } + pub fn new(mainnet: bool, chain_id: u32, datastore: MarfedKV) -> ClarityInstance { + ClarityInstance { + datastore, + mainnet, + chain_id, + } } pub fn with_marf(&mut self, f: F) -> R @@ -294,6 +304,7 @@ impl ClarityInstance { Some( LimitedCostTracker::new( self.mainnet, + self.chain_id, epoch.block_limit.clone(), &mut clarity_db, epoch.epoch_id, @@ -308,6 +319,7 @@ impl ClarityInstance { burn_state_db, cost_track, mainnet: self.mainnet, + chain_id: self.chain_id, epoch: epoch.epoch_id, } } @@ -331,6 +343,7 @@ impl ClarityInstance { burn_state_db, cost_track, mainnet: self.mainnet, + chain_id: self.chain_id, epoch, } } @@ -356,6 +369,7 @@ impl ClarityInstance { burn_state_db, cost_track, mainnet: self.mainnet, + chain_id: self.chain_id, epoch, }; @@ -435,6 +449,7 @@ impl ClarityInstance { Some( LimitedCostTracker::new( self.mainnet, + self.chain_id, epoch.block_limit.clone(), &mut clarity_db, epoch.epoch_id, @@ -449,6 +464,7 @@ impl ClarityInstance { burn_state_db, cost_track, mainnet: self.mainnet, + chain_id: self.chain_id, epoch: epoch.epoch_id, } } @@ -514,7 +530,7 @@ impl ClarityInstance { result }; - let mut env = OwnedEnvironment::new_free(self.mainnet, clarity_db, epoch_id); + let mut env = OwnedEnvironment::new_free(self.mainnet, self.chain_id, clarity_db, epoch_id); env.eval_read_only(contract, program) .map(|(x, _, _)| x) .map_err(Error::from) @@ -890,6 +906,7 @@ impl<'a, 'b> ClarityBlockConnection<'a, 'b> { let header_db = &self.header_db; let burn_state_db = &self.burn_state_db; let mainnet = self.mainnet; + let chain_id = self.chain_id; let mut log = RollbackWrapperPersistedLog::new(); log.nest(); ClarityTransactionConnection { @@ -899,6 +916,7 @@ impl<'a, 'b> ClarityBlockConnection<'a, 'b> { burn_state_db, log: Some(log), mainnet, + chain_id, epoch: self.epoch, } } @@ -1001,8 +1019,13 @@ impl<'a, 'b> TransactionConnection for ClarityTransactionConnection<'a, 'b> { // wrap the whole contract-call in a claritydb transaction, // so we can abort on call_back's boolean retun db.begin(); - let mut vm_env = - OwnedEnvironment::new_cost_limited(self.mainnet, db, cost_track, self.epoch); + let mut vm_env = OwnedEnvironment::new_cost_limited( + self.mainnet, + self.chain_id, + db, + cost_track, + self.epoch, + ); let result = to_do(&mut vm_env); let (mut db, cost_track) = vm_env .destruct() @@ -1169,11 +1192,12 @@ mod tests { use crate::clarity_vm::database::marf::MarfedKV; use super::*; + use stacks_common::consts::CHAIN_ID_TESTNET; #[test] pub fn bad_syntax_test() { let marf = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(false, marf); + let mut clarity_instance = ClarityInstance::new(false, CHAIN_ID_TESTNET, marf); let contract_identifier = QualifiedContractIdentifier::local("foo").unwrap(); @@ -1213,7 +1237,7 @@ mod tests { #[test] pub fn test_initialize_contract_tx_sender_contract_caller() { let marf = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(false, marf); + let mut clarity_instance = ClarityInstance::new(false, CHAIN_ID_TESTNET, marf); let contract_identifier = QualifiedContractIdentifier::local("foo").unwrap(); clarity_instance @@ -1266,7 +1290,7 @@ mod tests { #[test] pub fn tx_rollback() { let marf = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(false, marf); + let mut clarity_instance = ClarityInstance::new(false, CHAIN_ID_TESTNET, marf); let contract_identifier = QualifiedContractIdentifier::local("foo").unwrap(); let contract = "(define-public (foo (x int) (y int)) (ok (+ x y)))"; @@ -1361,7 +1385,7 @@ mod tests { #[test] pub fn simple_test() { let marf = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(false, marf); + let mut clarity_instance = ClarityInstance::new(false, CHAIN_ID_TESTNET, marf); let contract_identifier = QualifiedContractIdentifier::local("foo").unwrap(); @@ -1425,7 +1449,7 @@ mod tests { #[test] pub fn test_block_roll_back() { let marf = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(false, marf); + let mut clarity_instance = ClarityInstance::new(false, CHAIN_ID_TESTNET, marf); let contract_identifier = QualifiedContractIdentifier::local("foo").unwrap(); { @@ -1483,7 +1507,8 @@ mod tests { } let confirmed_marf = MarfedKV::open(test_name, None, None).unwrap(); - let mut confirmed_clarity_instance = ClarityInstance::new(false, confirmed_marf); + let mut confirmed_clarity_instance = + ClarityInstance::new(false, CHAIN_ID_TESTNET, confirmed_marf); let contract_identifier = QualifiedContractIdentifier::local("foo").unwrap(); let contract = " @@ -1513,7 +1538,7 @@ mod tests { ) .unwrap(); - let mut clarity_instance = ClarityInstance::new(false, marf); + let mut clarity_instance = ClarityInstance::new(false, CHAIN_ID_TESTNET, marf); // make an unconfirmed block off of the confirmed block { @@ -1621,7 +1646,7 @@ mod tests { #[test] pub fn test_tx_roll_backs() { let marf = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(false, marf); + let mut clarity_instance = ClarityInstance::new(false, CHAIN_ID_TESTNET, marf); let contract_identifier = QualifiedContractIdentifier::local("foo").unwrap(); let sender = StandardPrincipalData::transient().into(); @@ -1769,7 +1794,7 @@ mod tests { use stacks_common::util::secp256k1::MessageSignature; let marf = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(false, marf); + let mut clarity_instance = ClarityInstance::new(false, CHAIN_ID_TESTNET, marf); let sender = StandardPrincipalData::transient().into(); let spending_cond = TransactionSpendingCondition::Singlesig(SinglesigSpendingCondition { @@ -1874,7 +1899,7 @@ mod tests { #[test] pub fn test_block_limit() { let marf = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(false, marf); + let mut clarity_instance = ClarityInstance::new(false, CHAIN_ID_TESTNET, marf); let contract_identifier = QualifiedContractIdentifier::local("foo").unwrap(); let sender = StandardPrincipalData::transient().into(); diff --git a/src/clarity_vm/tests/analysis_costs.rs b/src/clarity_vm/tests/analysis_costs.rs index 4ab11a6ffe..4dc1f016d4 100644 --- a/src/clarity_vm/tests/analysis_costs.rs +++ b/src/clarity_vm/tests/analysis_costs.rs @@ -42,11 +42,13 @@ use crate::clarity_vm::clarity::ClarityConnection; use crate::clarity_vm::database::marf::MarfedKV; use crate::types::chainstate::{BlockHeaderHash, StacksBlockId}; use crate::types::StacksEpochId; +use clarity::vm::tests::test_only_mainnet_to_chain_id; use clarity::vm::ClarityVersion; pub fn test_tracked_costs(prog: &str, use_mainnet: bool, epoch: StacksEpochId) -> ExecutionCost { let marf = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(use_mainnet, marf); + let chain_id = test_only_mainnet_to_chain_id(use_mainnet); + let mut clarity_instance = ClarityInstance::new(use_mainnet, chain_id, marf); let p1 = vm_execute("'SZ2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQ9H6DPR") .unwrap() diff --git a/src/clarity_vm/tests/ast.rs b/src/clarity_vm/tests/ast.rs index 43aed842fc..827b50a842 100644 --- a/src/clarity_vm/tests/ast.rs +++ b/src/clarity_vm/tests/ast.rs @@ -6,6 +6,7 @@ use stacks_common::types::chainstate::StacksBlockId; use crate::chainstate::stacks::index::ClarityMarfTrieId; use clarity::vm::version::ClarityVersion; +use stacks_common::consts::CHAIN_ID_TESTNET; #[cfg(test)] use rstest::rstest; @@ -29,7 +30,7 @@ fn dependency_edge_counting_runtime(iters: usize, version: ClarityVersion) -> u6 } let marf = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(false, marf); + let mut clarity_instance = ClarityInstance::new(false, CHAIN_ID_TESTNET, marf); clarity_instance .begin_test_genesis_block( diff --git a/src/clarity_vm/tests/costs.rs b/src/clarity_vm/tests/costs.rs index f423c17dfe..a9218fd967 100644 --- a/src/clarity_vm/tests/costs.rs +++ b/src/clarity_vm/tests/costs.rs @@ -47,6 +47,7 @@ use std::collections::HashMap; use crate::clarity_vm::database::marf::MarfedKV; use clarity::vm::database::MemoryBackingStore; +use clarity::vm::tests::test_only_mainnet_to_chain_id; lazy_static! { static ref COST_VOTING_MAINNET_CONTRACT: QualifiedContractIdentifier = @@ -170,7 +171,8 @@ where F: Fn(OwnedEnvironment) -> R, { let marf_kv = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(use_mainnet, marf_kv); + let chain_id = test_only_mainnet_to_chain_id(use_mainnet); + let mut clarity_instance = ClarityInstance::new(use_mainnet, chain_id, marf_kv); let first_block = StacksBlockId::new(&FIRST_BURNCHAIN_CONSENSUS_HASH, &FIRST_STACKS_BLOCK_HASH); clarity_instance @@ -951,7 +953,8 @@ fn epoch_21_test_all_testnet() { fn test_cost_contract_short_circuits(use_mainnet: bool) { let marf_kv = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(use_mainnet, marf_kv); + let chain_id = test_only_mainnet_to_chain_id(use_mainnet); + let mut clarity_instance = ClarityInstance::new(use_mainnet, chain_id, marf_kv); clarity_instance .begin_test_genesis_block( &StacksBlockId::sentinel(), @@ -981,7 +984,7 @@ fn test_cost_contract_short_circuits(use_mainnet: bool) { let caller = QualifiedContractIdentifier::new(p1_principal.clone(), "caller".into()); let mut marf_kv = { - let mut clarity_inst = ClarityInstance::new(use_mainnet, marf_kv); + let mut clarity_inst = ClarityInstance::new(use_mainnet, chain_id, marf_kv); let mut block_conn = clarity_inst.begin_block( &StacksBlockId::new(&FIRST_BURNCHAIN_CONSENSUS_HASH, &FIRST_STACKS_BLOCK_HASH), &StacksBlockId([1 as u8; 32]), @@ -1176,7 +1179,8 @@ fn test_cost_contract_short_circuits_testnet() { fn test_cost_voting_integration(use_mainnet: bool) { let marf_kv = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(use_mainnet, marf_kv); + let chain_id = test_only_mainnet_to_chain_id(use_mainnet); + let mut clarity_instance = ClarityInstance::new(use_mainnet, chain_id, marf_kv); clarity_instance .begin_test_genesis_block( &StacksBlockId::sentinel(), @@ -1210,7 +1214,7 @@ fn test_cost_voting_integration(use_mainnet: bool) { let caller = QualifiedContractIdentifier::new(p1_principal.clone(), "caller".into()); let mut marf_kv = { - let mut clarity_inst = ClarityInstance::new(use_mainnet, marf_kv); + let mut clarity_inst = ClarityInstance::new(use_mainnet, chain_id, marf_kv); let mut block_conn = clarity_inst.begin_block( &StacksBlockId::new(&FIRST_BURNCHAIN_CONSENSUS_HASH, &FIRST_STACKS_BLOCK_HASH), &StacksBlockId([1 as u8; 32]), diff --git a/src/clarity_vm/tests/events.rs b/src/clarity_vm/tests/events.rs index cc2f1ba893..5c3fa9bfa5 100644 --- a/src/clarity_vm/tests/events.rs +++ b/src/clarity_vm/tests/events.rs @@ -32,6 +32,8 @@ use stacks_common::types::StacksEpochId; use crate::vm::database::MemoryBackingStore; +use clarity::vm::tests::test_only_mainnet_to_chain_id; + fn helper_execute(contract: &str, method: &str) -> (Value, Vec) { helper_execute_epoch(contract, method, None, StacksEpochId::Epoch21, false) } @@ -48,7 +50,8 @@ fn helper_execute_epoch( let sender = execute(address).expect_principal(); let marf_kv = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(use_mainnet, marf_kv); + let chain_id = test_only_mainnet_to_chain_id(use_mainnet); + let mut clarity_instance = ClarityInstance::new(use_mainnet, chain_id, marf_kv); let mut genesis = clarity_instance.begin_test_genesis_block( &StacksBlockId::sentinel(), &StacksBlockHeader::make_index_block_hash( diff --git a/src/clarity_vm/tests/large_contract.rs b/src/clarity_vm/tests/large_contract.rs index d260881b0c..33f7d1d796 100644 --- a/src/clarity_vm/tests/large_contract.rs +++ b/src/clarity_vm/tests/large_contract.rs @@ -45,6 +45,8 @@ use crate::vm::tests::with_memory_environment; use clarity::vm::version::ClarityVersion; +use stacks_common::consts::{CHAIN_ID_MAINNET, CHAIN_ID_TESTNET}; + #[template] #[rstest] #[case(ClarityVersion::Clarity1)] @@ -89,7 +91,7 @@ const SIMPLE_TOKENS: &str = "(define-map tokens { account: principal } { balance #[apply(clarity_version_template)] fn test_simple_token_system(#[case] version: ClarityVersion) { - let mut clarity = ClarityInstance::new(false, MarfedKV::temporary()); + let mut clarity = ClarityInstance::new(false, CHAIN_ID_TESTNET, MarfedKV::temporary()); let p1 = PrincipalData::from( PrincipalData::parse_standard_principal("SZ2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQ9H6DPR") .unwrap(), @@ -559,7 +561,7 @@ fn inner_test_simple_naming_system(owned_env: &mut OwnedEnvironment, version: Cl #[ignore] pub fn rollback_log_memory_test() { let marf = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(false, marf); + let mut clarity_instance = ClarityInstance::new(false, CHAIN_ID_TESTNET, marf); let EXPLODE_N = 100; let contract_identifier = QualifiedContractIdentifier::local("foo").unwrap(); @@ -621,7 +623,7 @@ pub fn rollback_log_memory_test() { #[test] pub fn let_memory_test() { let marf = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(false, marf); + let mut clarity_instance = ClarityInstance::new(false, CHAIN_ID_TESTNET, marf); let EXPLODE_N = 100; let contract_identifier = QualifiedContractIdentifier::local("foo").unwrap(); @@ -689,7 +691,7 @@ pub fn let_memory_test() { #[test] pub fn argument_memory_test() { let marf = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(false, marf); + let mut clarity_instance = ClarityInstance::new(false, CHAIN_ID_TESTNET, marf); let EXPLODE_N = 100; let contract_identifier = QualifiedContractIdentifier::local("foo").unwrap(); @@ -757,7 +759,7 @@ pub fn argument_memory_test() { #[test] pub fn fcall_memory_test() { let marf = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(false, marf); + let mut clarity_instance = ClarityInstance::new(false, CHAIN_ID_TESTNET, marf); let COUNT_PER_FUNC = 10; let FUNCS = 10; @@ -865,7 +867,7 @@ pub fn fcall_memory_test() { #[ignore] pub fn ccall_memory_test() { let marf = MarfedKV::temporary(); - let mut clarity_instance = ClarityInstance::new(false, marf); + let mut clarity_instance = ClarityInstance::new(false, CHAIN_ID_TESTNET, marf); let COUNT_PER_CONTRACT = 20; let CONTRACTS = 5; diff --git a/src/core/mod.rs b/src/core/mod.rs index bf18e82501..333ddd4cfb 100644 --- a/src/core/mod.rs +++ b/src/core/mod.rs @@ -41,8 +41,7 @@ pub type StacksEpoch = GenericStacksEpoch; pub const SYSTEM_FORK_SET_VERSION: [u8; 4] = [23u8, 0u8, 0u8, 0u8]; // chain id -pub const CHAIN_ID_MAINNET: u32 = 0x00000001; -pub const CHAIN_ID_TESTNET: u32 = 0x80000000; +pub use stacks_common::consts::{CHAIN_ID_MAINNET, CHAIN_ID_TESTNET}; // peer version (big-endian) // first byte == major network protocol version (currently 0x18) diff --git a/src/net/p2p.rs b/src/net/p2p.rs index d02dfe4c15..772a89f3ba 100644 --- a/src/net/p2p.rs +++ b/src/net/p2p.rs @@ -1927,7 +1927,7 @@ impl PeerNetwork { } for (event_id, convo) in self.peers.iter() { - if convo.is_authenticated() { + if convo.is_authenticated() && convo.stats.last_contact_time > 0 { // have handshaked with this remote peer if convo.stats.last_contact_time + (convo.peer_heartbeat as u64) diff --git a/src/net/rpc.rs b/src/net/rpc.rs index 68a2ccb7eb..0fca5e664c 100644 --- a/src/net/rpc.rs +++ b/src/net/rpc.rs @@ -266,6 +266,7 @@ impl RPCPoxInfoData { burnchain: &Burnchain, ) -> Result { let mainnet = chainstate.mainnet; + let chain_id = chainstate.chain_id; let contract_identifier = boot_code_id("pox", mainnet); let function = "get-pox-info"; let cost_track = LimitedCostTracker::new_free(); @@ -273,9 +274,14 @@ impl RPCPoxInfoData { let data = chainstate .maybe_read_only_clarity_tx(&sortdb.index_conn(), tip, |clarity_tx| { - clarity_tx.with_readonly_clarity_env(mainnet, sender, None, cost_track, |env| { - env.execute_contract(&contract_identifier, function, &vec![], true) - }) + clarity_tx.with_readonly_clarity_env( + mainnet, + chain_id, + sender, + None, + cost_track, + |env| env.execute_contract(&contract_identifier, function, &vec![], true), + ) }) .map_err(|_| net_error::NotFoundError)?; @@ -1361,6 +1367,7 @@ impl ConversationHttp { .map(|x| SymbolicExpression::atom_value(x.clone())) .collect(); let mainnet = chainstate.mainnet; + let chain_id = chainstate.chain_id; let mut cost_limit = options.read_only_call_limit.clone(); cost_limit.write_length = 0; cost_limit.write_count = 0; @@ -1370,7 +1377,9 @@ impl ConversationHttp { let epoch = clarity_tx.get_epoch(); let cost_track = clarity_tx .with_clarity_db_readonly(|clarity_db| { - LimitedCostTracker::new_mid_block(mainnet, cost_limit, clarity_db, epoch) + LimitedCostTracker::new_mid_block( + mainnet, chain_id, cost_limit, clarity_db, epoch, + ) }) .map_err(|_| { ClarityRuntimeError::from(InterpreterError::CostContractLoadFailure) @@ -1378,6 +1387,7 @@ impl ConversationHttp { clarity_tx.with_readonly_clarity_env( mainnet, + chain_id, sender.clone(), sponsor.cloned(), cost_track, diff --git a/src/util_lib/db.rs b/src/util_lib/db.rs index 597bd95930..4ae33e7dd3 100644 --- a/src/util_lib/db.rs +++ b/src/util_lib/db.rs @@ -106,6 +106,10 @@ pub enum Error { IOError(IOError), /// MARF index error IndexError(MARFError), + /// Old schema error + OldSchema(u64), + /// Database is too old for epoch + TooOldForEpoch, /// Other error Other(String), } @@ -127,6 +131,10 @@ impl fmt::Display for Error { Error::IOError(ref e) => fmt::Display::fmt(e, f), Error::SqliteError(ref e) => fmt::Display::fmt(e, f), Error::IndexError(ref e) => fmt::Display::fmt(e, f), + Error::OldSchema(ref s) => write!(f, "Old database schema: {}", s), + Error::TooOldForEpoch => { + write!(f, "Database is not compatible with current system epoch") + } Error::Other(ref s) => fmt::Display::fmt(s, f), } } @@ -149,6 +157,8 @@ impl error::Error for Error { Error::SqliteError(ref e) => Some(e), Error::IOError(ref e) => Some(e), Error::IndexError(ref e) => Some(e), + Error::OldSchema(ref _s) => None, + Error::TooOldForEpoch => None, Error::Other(ref _s) => None, } } diff --git a/stacks-common/src/libcommon.rs b/stacks-common/src/libcommon.rs index 4b80c756c4..3481e7a4b4 100644 --- a/stacks-common/src/libcommon.rs +++ b/stacks-common/src/libcommon.rs @@ -59,4 +59,7 @@ pub mod consts { pub const FIRST_STACKS_BLOCK_HASH: BlockHeaderHash = BlockHeaderHash([0u8; 32]); pub const FIRST_BURNCHAIN_CONSENSUS_HASH: ConsensusHash = ConsensusHash([0u8; 20]); + + pub const CHAIN_ID_MAINNET: u32 = 0x00000001; + pub const CHAIN_ID_TESTNET: u32 = 0x80000000; } diff --git a/testnet/stacks-node/src/neon_node.rs b/testnet/stacks-node/src/neon_node.rs index 1e83aaf2f2..97969a7c04 100644 --- a/testnet/stacks-node/src/neon_node.rs +++ b/testnet/stacks-node/src/neon_node.rs @@ -1434,6 +1434,8 @@ impl StacksNode { // bootstrap nodes *always* allowed let mut tx = peerdb.tx_begin().unwrap(); for initial_neighbor in initial_neighbors.iter() { + // update peer in case public key changed + PeerDB::update_peer(&mut tx, &initial_neighbor).unwrap(); PeerDB::set_allow_peer( &mut tx, initial_neighbor.addr.network_id, diff --git a/testnet/stacks-node/src/run_loop/neon.rs b/testnet/stacks-node/src/run_loop/neon.rs index fe1a3f5052..df21018288 100644 --- a/testnet/stacks-node/src/run_loop/neon.rs +++ b/testnet/stacks-node/src/run_loop/neon.rs @@ -21,10 +21,12 @@ use stacks::burnchains::{Address, Burnchain}; use stacks::chainstate::burn::db::sortdb::SortitionDB; use stacks::chainstate::coordinator::comm::{CoordinatorChannels, CoordinatorReceivers}; use stacks::chainstate::coordinator::{ - check_chainstate_db_versions, BlockEventDispatcher, ChainsCoordinator, CoordinatorCommunication, + migrate_chainstate_dbs, BlockEventDispatcher, ChainsCoordinator, CoordinatorCommunication, + Error as coord_error, }; use stacks::chainstate::stacks::db::{ChainStateBootData, StacksChainState}; use stacks::net::atlas::{AtlasConfig, Attachment, AttachmentInstance, ATTACHMENTS_CHANNEL_SIZE}; +use stacks::util_lib::db::Error as db_error; use stx_genesis::GenesisData; use crate::monitoring::start_serving_monitoring_metrics; @@ -323,29 +325,29 @@ impl RunLoop { Some(self.should_keep_running.clone()), ); - // Invoke connect() to perform any db instantiation and migration early - if let Err(e) = burnchain_controller.connect_dbs() { - error!("Failed to connect to burnchain databases: {}", e); - panic!(); - }; - - let burnchain_config = burnchain_controller.get_burnchain(); + // Upgrade chainstate databases if they exist already let epochs = burnchain_controller.get_stacks_epochs(); - if !check_chainstate_db_versions( + match migrate_chainstate_dbs( &epochs, &self.config.get_burn_db_file_path(), &self.config.get_chainstate_path_str(), - ) - .expect("FATAL: unable to query filesystem or databases for version information") - { - error!( - "FATAL: chainstate database(s) are not compatible with the current system epoch" - ); - panic!(); + Some(self.config.node.get_marf_opts()), + ) { + Ok(_) => {} + Err(coord_error::DBError(db_error::TooOldForEpoch)) => { + error!( + "FATAL: chainstate database(s) are not compatible with the current system epoch" + ); + panic!(); + } + Err(e) => { + panic!("FATAL: unable to query filesystem or databases: {:?}", &e); + } } info!("Start syncing Bitcoin headers, feel free to grab a cup of coffee, this can take a while"); + let burnchain_config = burnchain_controller.get_burnchain(); let target_burnchain_block_height = match burnchain_config .get_highest_burnchain_block() .expect("FATAL: failed to access burnchain database") @@ -372,6 +374,12 @@ impl RunLoop { } }; + // if the chainstate DBs don't exist, this will instantiate them + if let Err(e) = burnchain_controller.connect_dbs() { + error!("Failed to connect to burnchain databases: {}", e); + panic!(); + }; + // TODO (hack) instantiate the sortdb in the burnchain let _ = burnchain_controller.sortdb_mut(); burnchain_controller