Skip to content

Commit

Permalink
feat: remove consensus estimation (#5116)
Browse files Browse the repository at this point in the history
Signed-off-by: Shanin Roman <shanin1000@yandex.ru>
  • Loading branch information
Erigara authored Oct 8, 2024
1 parent 289d972 commit 0729420
Show file tree
Hide file tree
Showing 11 changed files with 47 additions and 283 deletions.
91 changes: 4 additions & 87 deletions crates/iroha/tests/integration/triggers/time_trigger.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,10 @@ use iroha_test_network::*;
use iroha_test_samples::{gen_account_in, load_sample_wasm, ALICE_ID};

/// Default estimation of consensus duration.
pub fn default_consensus_estimation() -> Duration {
pub fn pipeline_time() -> Duration {
let default_parameters = SumeragiParameters::default();

default_parameters
.block_time()
.checked_add(
default_parameters
.commit_time()
.checked_div(2)
.map_or_else(|| unreachable!(), |x| x),
)
.map_or_else(|| unreachable!(), |x| x)
default_parameters.pipeline_time()
}

fn curr_time() -> core::time::Duration {
Expand All @@ -37,87 +29,12 @@ fn curr_time() -> core::time::Duration {
.unwrap()
}

/// Macro to abort compilation, if `e` isn't `true`
macro_rules! const_assert {
($e:expr) => {
#[allow(trivial_casts)]
const _: usize = ($e as bool) as usize - 1;
};
}

/// Time-based triggers and block commitment process depend heavily on **current time** and **CPU**,
/// so it's impossible to create fully reproducible test scenario.
///
/// But in general it works well and this test demonstrates it
#[test]
#[allow(clippy::cast_precision_loss)]
fn time_trigger_execution_count_error_should_be_less_than_15_percent() -> Result<()> {
const PERIOD: Duration = Duration::from_millis(100);
const ACCEPTABLE_ERROR_PERCENT: u8 = 15;
assert!(PERIOD.as_millis() < default_consensus_estimation().as_millis());
const_assert!(ACCEPTABLE_ERROR_PERCENT <= 100);

let (_rt, _peer, mut test_client) = <PeerBuilder>::new().with_port(10_775).start_with_runtime();
wait_for_genesis_committed(&vec![test_client.clone()], 0);
let start_time = curr_time();

// Start listening BEFORE submitting any transaction not to miss any block committed event
let event_listener = get_block_committed_event_listener(&test_client)?;

let account_id = ALICE_ID.clone();
let asset_definition_id = "rose#wonderland".parse().expect("Valid");
let asset_id = AssetId::new(asset_definition_id, account_id.clone());

let prev_value = get_asset_value(&mut test_client, asset_id.clone());

let schedule = TimeSchedule::starting_at(start_time).with_period(PERIOD);
let instruction = Mint::asset_numeric(1u32, asset_id.clone());
let register_trigger = Register::trigger(Trigger::new(
"mint_rose".parse()?,
Action::new(
vec![instruction],
Repeats::Indefinitely,
account_id.clone(),
TimeEventFilter::new(ExecutionTime::Schedule(schedule)),
),
));
test_client.submit(register_trigger)?;

submit_sample_isi_on_every_block_commit(
event_listener,
&mut test_client,
&account_id,
Duration::from_secs(1),
3,
)?;
std::thread::sleep(default_consensus_estimation());

let finish_time = curr_time();
let average_count = finish_time.saturating_sub(start_time).as_millis() / PERIOD.as_millis();

let actual_value = get_asset_value(&mut test_client, asset_id);
let expected_value = prev_value
.checked_add(Numeric::new(average_count, 0))
.unwrap();
let acceptable_error = expected_value.to_f64() * (f64::from(ACCEPTABLE_ERROR_PERCENT) / 100.0);
let error = core::cmp::max(actual_value, expected_value)
.checked_sub(core::cmp::min(actual_value, expected_value))
.unwrap()
.to_f64();
assert!(
error < acceptable_error,
"error = {error}, but acceptable error = {acceptable_error}"
);

Ok(())
}

#[test]
fn mint_asset_after_3_sec() -> Result<()> {
let (_rt, _peer, test_client) = <PeerBuilder>::new().with_port(10_665).start_with_runtime();
wait_for_genesis_committed(&vec![test_client.clone()], 0);
// Sleep to certainly bypass time interval analyzed by genesis
std::thread::sleep(default_consensus_estimation());
std::thread::sleep(pipeline_time());

let asset_definition_id = "rose#wonderland"
.parse::<AssetDefinitionId>()
Expand Down Expand Up @@ -152,7 +69,7 @@ fn mint_asset_after_3_sec() -> Result<()> {
assert_eq!(init_quantity, after_registration_quantity);

// Sleep long enough that trigger start is in the past
std::thread::sleep(default_consensus_estimation());
std::thread::sleep(pipeline_time());
test_client.submit_blocking(Log::new(Level::DEBUG, "Just to create block".to_string()))?;

let after_wait_quantity = test_client.query_single(FindAssetQuantityById {
Expand Down
7 changes: 0 additions & 7 deletions crates/iroha_core/src/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,6 @@ mod pending {
prev_block: Option<&SignedBlock>,
view_change_index: usize,
transactions: &[CommittedTransaction],
consensus_estimation: Duration,
) -> BlockHeader {
let prev_block_time =
prev_block.map_or(Duration::ZERO, |block| block.header().creation_time());
Expand Down Expand Up @@ -206,10 +205,6 @@ mod pending {
view_change_index: view_change_index
.try_into()
.expect("View change index should fit into u32"),
consensus_estimation_ms: consensus_estimation
.as_millis()
.try_into()
.expect("INTERNAL BUG: Time should fit into u64"),
}
}

Expand Down Expand Up @@ -254,7 +249,6 @@ mod pending {
state.latest_block().as_deref(),
view_change_index,
&transactions,
state.world.parameters().sumeragi.consensus_estimation(),
),
transactions,
}))
Expand Down Expand Up @@ -793,7 +787,6 @@ mod valid {
)),
creation_time_ms: 0,
view_change_index: 0,
consensus_estimation_ms: 4_000,
},
transactions: Vec::new(),
};
Expand Down
12 changes: 6 additions & 6 deletions crates/iroha_core/src/smartcontracts/isi/triggers/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ pub mod isi {
}
}

let last_block_estimation = state_transaction.latest_block().map(|block| {
block.header().creation_time() + block.header().consensus_estimation()
});
let latest_block_time = state_transaction
.latest_block()
.map(|block| block.header().creation_time());

let engine = state_transaction.engine.clone(); // Cloning engine is cheap
let genesis_creation_time_ms = state_transaction.world().genesis_creation_time_ms();
Expand All @@ -63,7 +63,7 @@ pub mod isi {
),
EventFilterBox::Time(time_filter) => {
if let ExecutionTime::Schedule(schedule) = time_filter.0 {
match last_block_estimation {
match latest_block_time {
// Genesis block
None => {
let genesis_creation_time_ms = genesis_creation_time_ms
Expand All @@ -75,8 +75,8 @@ pub mod isi {
));
}
}
Some(latest_block_estimation) => {
if schedule.start() < latest_block_estimation {
Some(latest_block_time) => {
if schedule.start() < latest_block_time {
return Err(Error::InvalidParameter(
InvalidParameterError::TimeTriggerInThePast,
));
Expand Down
18 changes: 7 additions & 11 deletions crates/iroha_core/src/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1455,21 +1455,17 @@ impl<'state> StateBlock<'state> {

/// Create time event using previous and current blocks
fn create_time_event(&self, block: &CommittedBlock) -> TimeEvent {
let prev_interval = self.latest_block().map(|latest_block| {
let header = &latest_block.as_ref().header();
let to = block.as_ref().header().creation_time();

TimeInterval::new(header.creation_time(), header.consensus_estimation())
let since = self.latest_block().map_or(to, |latest_block| {
let header = latest_block.header();
header.creation_time()
});

let interval = TimeInterval::new(
block.as_ref().header().creation_time(),
block.as_ref().header().consensus_estimation(),
);
// NOTE: in case of genesis block only single point in time is matched
let interval = TimeInterval::new(since, to - since);

TimeEvent {
prev_interval,
interval,
}
TimeEvent { interval }
}

/// Process every trigger in `matched_ids`
Expand Down
5 changes: 1 addition & 4 deletions crates/iroha_data_model/benches/time_event_filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,7 @@ fn schedule_from_zero_with_little_period(criterion: &mut Criterion) {
let since = Duration::from_secs(TIMESTAMP);
let length = Duration::from_secs(1);
let interval = TimeInterval::new(since, length);
let event = TimeEvent {
prev_interval: None,
interval,
};
let event = TimeEvent { interval };
let schedule = TimeSchedule::starting_at(Duration::ZERO).with_period(Duration::from_millis(1));
let filter = TimeEventFilter::new(ExecutionTime::Schedule(schedule));

Expand Down
8 changes: 0 additions & 8 deletions crates/iroha_data_model/src/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,6 @@ mod model {
/// Value of view change index. Used to resolve soft forks.
#[getset(skip)]
pub view_change_index: u32,
/// Estimation of consensus duration (in milliseconds).
pub consensus_estimation_ms: u64,
}

#[derive(
Expand Down Expand Up @@ -135,11 +133,6 @@ impl BlockHeader {
Duration::from_millis(self.creation_time_ms)
}

/// Consensus estimation
pub const fn consensus_estimation(&self) -> Duration {
Duration::from_millis(self.consensus_estimation_ms)
}

/// Calculate block hash
#[inline]
pub fn hash(&self) -> HashOf<BlockHeader> {
Expand Down Expand Up @@ -275,7 +268,6 @@ impl SignedBlock {
transactions_hash,
creation_time_ms,
view_change_index: 0,
consensus_estimation_ms: 0,
};
let transactions = genesis_transactions
.into_iter()
Expand Down
1 change: 0 additions & 1 deletion crates/iroha_data_model/src/events/pipeline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -357,7 +357,6 @@ mod tests {
)),
creation_time_ms: 0,
view_change_index: 0,
consensus_estimation_ms: 0,
}
}
}
Expand Down
Loading

0 comments on commit 0729420

Please sign in to comment.