Skip to content

Commit

Permalink
fix: fix transparent_api violations
Browse files Browse the repository at this point in the history
Signed-off-by: Marin Veršić <marin.versic101@gmail.com>
  • Loading branch information
mversic committed Oct 20, 2024
1 parent 1555b62 commit a9ee4fc
Show file tree
Hide file tree
Showing 22 changed files with 100 additions and 100 deletions.
25 changes: 15 additions & 10 deletions crates/iroha/tests/integration/asset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use iroha::{
crypto::KeyPair,
data_model::{
asset::{AssetId, AssetType, AssetValue},
isi::error::{InstructionEvaluationError, InstructionExecutionError, Mismatch, TypeError},
isi::error::{InstructionEvaluationError, InstructionExecutionError, TypeError},
prelude::*,
transaction::error::TransactionRejectionReason,
},
Expand Down Expand Up @@ -401,16 +401,21 @@ fn fail_if_dont_satisfy_spec() {
.downcast_ref::<TransactionRejectionReason>()
.unwrap_or_else(|| panic!("Error {err} is not TransactionRejectionReason"));

let TransactionRejectionReason::Validation(ValidationFail::InstructionFailed(
InstructionExecutionError::Evaluate(InstructionEvaluationError::Type(
TypeError::AssetType(rejection_reason),
)),
)) = rejection_reason
else {
panic!("Wrong rejection reason");
};
assert_eq!(
*rejection_reason.expected(),
AssetType::Numeric(NumericSpec::integer()),
);
assert_eq!(
rejection_reason,
&TransactionRejectionReason::Validation(ValidationFail::InstructionFailed(
InstructionExecutionError::Evaluate(InstructionEvaluationError::Type(
TypeError::from(Mismatch {
expected: AssetType::Numeric(NumericSpec::integer()),
actual: AssetType::Numeric(NumericSpec::fractional(2))
})
))
))
*rejection_reason.actual(),
AssetType::Numeric(NumericSpec::fractional(2))
);
}

Expand Down
29 changes: 14 additions & 15 deletions crates/iroha/tests/integration/events/pipeline.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use std::time::Duration;

use assert_matches::assert_matches;
use eyre::Result;
use futures_util::StreamExt;
use iroha::data_model::{
Expand Down Expand Up @@ -54,20 +53,20 @@ async fn test_with_instruction_and_status(

// Then
timeout(Duration::from_secs(5), async move {
assert_matches!(
events.next().await.unwrap().unwrap(),
EventBox::Pipeline(PipelineEventBox::Transaction(TransactionEvent {
status: TransactionStatus::Queued,
..
}))
);
assert_matches!(
events.next().await.unwrap().unwrap(),
EventBox::Pipeline(PipelineEventBox::Transaction(TransactionEvent {
status,
..
})) if status == *should_be
);
let EventBox::Pipeline(PipelineEventBox::Transaction(event)) =
events.next().await.unwrap().unwrap()
else {
panic!("Expected transaction event");
};
assert_eq!(*event.status(), TransactionStatus::Queued);

let EventBox::Pipeline(PipelineEventBox::Transaction(event)) =
events.next().await.unwrap().unwrap()
else {
panic!("Expected transaction event");
};

assert_eq!(event.status(), should_be);
})
.await?;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ async fn multiple_blocks_created() -> Result<()> {
// Given
let network = NetworkBuilder::new()
.with_peers(4)
.with_genesis_instruction(SetParameter(Parameter::Block(
.with_genesis_instruction(SetParameter::new(Parameter::Block(
BlockParameter::MaxTransactions(NonZero::new(N_MAX_TXS_PER_BLOCK).expect("valid")),
)))
.with_pipeline_time(Duration::from_secs(1))
Expand Down
15 changes: 3 additions & 12 deletions crates/iroha/tests/integration/pagination.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,7 @@ fn limits_should_work() -> Result<()> {

let vec = client
.query(asset::all_definitions())
.with_pagination(Pagination {
limit: Some(nonzero!(7_u64)),
offset: 1,
})
.with_pagination(Pagination::new(Some(nonzero!(7_u64)), 1))
.execute_all()?;
assert_eq!(vec.len(), 7);
Ok(())
Expand All @@ -33,10 +30,7 @@ fn reported_length_should_be_accurate() -> Result<()> {

let mut iter = client
.query(asset::all_definitions())
.with_pagination(Pagination {
limit: Some(nonzero!(7_u64)),
offset: 1,
})
.with_pagination(Pagination::new(Some(nonzero!(7_u64)), 1))
.with_fetch_size(FetchSize::new(Some(nonzero!(3_u64))))
.execute()?;

Expand Down Expand Up @@ -68,10 +62,7 @@ fn fetch_size_should_work() -> Result<()> {
let query = QueryWithParams::new(
QueryWithFilter::new(asset::all_definitions(), CompoundPredicate::PASS).into(),
QueryParams::new(
Pagination {
limit: Some(nonzero!(7_u64)),
offset: 1,
},
Pagination::new(Some(nonzero!(7_u64)), 1),
Sorting::default(),
FetchSize::new(Some(nonzero!(3_u64))),
),
Expand Down
4 changes: 2 additions & 2 deletions crates/iroha/tests/integration/queries/asset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ fn find_asset_total_quantity() -> Result<()> {
.query(FindAssetsDefinitions::new())
.filter_with(|asset_definition| asset_definition.id.eq(definition_id.clone()))
.execute_single()?
.total_quantity)
.total_quantity())
};

// Assert that initial total quantity before any registrations and unregistrations is zero
Expand Down Expand Up @@ -154,7 +154,7 @@ where
.query(FindAssetsDefinitions::new())
.filter_with(|asset_definition| asset_definition.id.eq(definition_id.clone()))
.execute_single()?
.total_quantity)
.total_quantity())
};

// Assert that initial total quantity before any burns and mints is zero
Expand Down
8 changes: 4 additions & 4 deletions crates/iroha/tests/integration/set_parameter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,22 +12,22 @@ use nonzero_ext::nonzero;
#[test]
fn can_change_parameter_value() -> Result<()> {
let (network, _rt) = NetworkBuilder::new()
.with_genesis_instruction(SetParameter(Parameter::Block(
.with_genesis_instruction(SetParameter::new(Parameter::Block(
BlockParameter::MaxTransactions(nonzero!(16u64)),
)))
.start_blocking()?;
let test_client = network.client();

let old_params: Parameters = test_client.query_single(client::parameter::all())?;
assert_eq!(old_params.block.max_transactions, nonzero!(16u64));
assert_eq!(old_params.block().max_transactions(), nonzero!(16u64));

let new_value = nonzero!(32u64);
test_client.submit_blocking(SetParameter(Parameter::Block(
test_client.submit_blocking(SetParameter::new(Parameter::Block(
BlockParameter::MaxTransactions(new_value),
)))?;

let params = test_client.query_single(client::parameter::all())?;
assert_eq!(params.block.max_transactions, new_value);
assert_eq!(params.block().max_transactions(), new_value);

Ok(())
}
5 changes: 1 addition & 4 deletions crates/iroha/tests/integration/sorting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,7 @@ fn correct_pagination_assets_after_creating_new_one() {
const N_ASSETS: usize = 12;
// 0 < pagination.start < missing_idx < pagination.end < N_ASSETS
let missing_indices = vec![N_ASSETS / 2];
let pagination = Pagination {
limit: Some(nonzero!(N_ASSETS as u64 / 3)),
offset: N_ASSETS as u64 / 3,
};
let pagination = Pagination::new(Some(nonzero!(N_ASSETS as u64 / 3)), N_ASSETS as u64 / 3);
let xor_filter =
AssetPredicateBox::build(|asset| asset.id.definition_id.name.starts_with("xor"));

Expand Down
16 changes: 8 additions & 8 deletions crates/iroha/tests/integration/triggers/by_call_trigger.rs
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,7 @@ fn only_account_with_permission_can_register_trigger() -> Result<()> {
.filter_with(|trigger| trigger.id.eq(trigger_id.clone()))
.execute_single()?;

assert_eq!(found_trigger.id, trigger_id);
assert_eq!(*found_trigger.id(), trigger_id);

Ok(())
}
Expand Down Expand Up @@ -375,17 +375,17 @@ fn unregister_trigger() -> Result<()> {
.query(FindTriggers::new())
.filter_with(|trigger| trigger.id.eq(trigger_id.clone()))
.execute_single()?;
let found_action = found_trigger.action;
let Executable::Instructions(found_instructions) = found_action.executable else {
let found_action = found_trigger.action();
let Executable::Instructions(found_instructions) = found_action.executable() else {
panic!("Expected instructions");
};
let found_trigger = Trigger::new(
found_trigger.id,
found_trigger.id().clone(),
Action::new(
Executable::Instructions(found_instructions),
found_action.repeats,
found_action.authority,
found_action.filter,
Executable::Instructions(found_instructions.to_owned()),
found_action.repeats(),
found_action.authority().clone(),
found_action.filter().clone(),
),
);
assert_eq!(found_trigger, trigger);
Expand Down
13 changes: 7 additions & 6 deletions crates/iroha/tests/integration/triggers/orphans.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,12 @@ use iroha::{
use iroha_test_network::*;
use iroha_test_samples::gen_account_in;

fn find_trigger(iroha: &Client, trigger_id: &TriggerId) -> Option<TriggerId> {
fn find_trigger(iroha: &Client, trigger_id: &TriggerId) -> Option<Trigger> {
iroha
.query(FindTriggers::new())
.filter_with(|trigger| trigger.id.eq(trigger_id.clone()))
.execute_single()
.ok()
.map(|trigger| trigger.id)
}

fn set_up_trigger(iroha: &Client) -> eyre::Result<(DomainId, AccountId, TriggerId)> {
Expand Down Expand Up @@ -45,9 +44,10 @@ fn trigger_must_be_removed_on_action_authority_account_removal() -> eyre::Result
let (network, _rt) = NetworkBuilder::new().start_blocking()?;
let iroha = network.client();
let (_, the_one_who_fails, fail_on_account_events) = set_up_trigger(&iroha)?;
let trigger = find_trigger(&iroha, &fail_on_account_events);
assert_eq!(
find_trigger(&iroha, &fail_on_account_events),
Some(fail_on_account_events.clone())
trigger.as_ref().map(Identifiable::id),
Some(&fail_on_account_events.clone())
);
iroha.submit_blocking(Unregister::account(the_one_who_fails.clone()))?;
assert_eq!(find_trigger(&iroha, &fail_on_account_events), None);
Expand All @@ -59,9 +59,10 @@ fn trigger_must_be_removed_on_action_authority_domain_removal() -> eyre::Result<
let (network, _rt) = NetworkBuilder::new().start_blocking()?;
let iroha = network.client();
let (failand, _, fail_on_account_events) = set_up_trigger(&iroha)?;
let trigger = find_trigger(&iroha, &fail_on_account_events);
assert_eq!(
find_trigger(&iroha, &fail_on_account_events),
Some(fail_on_account_events.clone())
trigger.as_ref().map(Identifiable::id),
Some(&fail_on_account_events.clone())
);
iroha.submit_blocking(Unregister::domain(failand.clone()))?;
assert_eq!(find_trigger(&iroha, &fail_on_account_events), None);
Expand Down
16 changes: 6 additions & 10 deletions crates/iroha/tests/integration/triggers/time_trigger.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,7 @@ fn mint_asset_after_3_sec() -> Result<()> {
let account_id = ALICE_ID.clone();
let asset_id = AssetId::new(asset_definition_id.clone(), account_id.clone());

let init_quantity = test_client.query_single(FindAssetQuantityById {
id: asset_id.clone(),
})?;
let init_quantity = test_client.query_single(FindAssetQuantityById::new(asset_id.clone()))?;

let start_time = curr_time();
assert!(
Expand All @@ -64,18 +62,16 @@ fn mint_asset_after_3_sec() -> Result<()> {

// Schedule start is in the future so trigger isn't executed after creating a new block
test_client.submit_blocking(Log::new(Level::DEBUG, "Just to create block".to_string()))?;
let after_registration_quantity = test_client.query_single(FindAssetQuantityById {
id: asset_id.clone(),
})?;
let after_registration_quantity =
test_client.query_single(FindAssetQuantityById::new(asset_id.clone()))?;
assert_eq!(init_quantity, after_registration_quantity);

// Sleep long enough that trigger start is in the past
std::thread::sleep(network.pipeline_time());
test_client.submit_blocking(Log::new(Level::DEBUG, "Just to create block".to_string()))?;

let after_wait_quantity = test_client.query_single(FindAssetQuantityById {
id: asset_id.clone(),
})?;
let after_wait_quantity =
test_client.query_single(FindAssetQuantityById::new(asset_id.clone()))?;
// Schedule is in the past now so trigger is executed
assert_eq!(
init_quantity.checked_add(1u32.into()).unwrap(),
Expand Down Expand Up @@ -168,7 +164,7 @@ fn mint_nft_for_every_user_every_1_sec() -> Result<()> {
let start_time = curr_time() + offset;
let schedule = TimeSchedule::starting_at(start_time).with_period(TRIGGER_PERIOD);

let filter = TimeEventFilter(ExecutionTime::Schedule(schedule));
let filter = TimeEventFilter::new(ExecutionTime::Schedule(schedule));
let register_trigger = Register::trigger(Trigger::new(
"mint_nft_for_all".parse()?,
Action::new(
Expand Down
5 changes: 1 addition & 4 deletions crates/iroha/tests/integration/tx_history.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,7 @@ fn client_has_rejected_and_accepted_txs_should_return_tx_history() -> Result<()>
let transactions = client
.query(transaction::all())
.filter_with(|tx| tx.transaction.value.authority.eq(account_id.clone()))
.with_pagination(Pagination {
limit: Some(nonzero!(50_u64)),
offset: 1,
})
.with_pagination(Pagination::new(Some(nonzero!(50_u64)), 1))
.execute_all()?;
assert_eq!(transactions.len(), 50);

Expand Down
17 changes: 7 additions & 10 deletions crates/iroha/tests/integration/upgrade.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,10 +146,9 @@ fn executor_upgrade_should_revoke_removed_permissions() -> Result<()> {
.query(client::role::all())
.execute_all()?
.into_iter()
.find(|role| role.id == test_role_id)
.find(|role| *role.id() == test_role_id)
.expect("Failed to find Role")
.permissions
.iter()
.permissions()
.any(|permission| {
CanUnregisterDomain::try_from(permission)
.is_ok_and(|permission| permission == can_unregister_domain)
Expand Down Expand Up @@ -179,10 +178,9 @@ fn executor_upgrade_should_revoke_removed_permissions() -> Result<()> {
.query(client::role::all())
.execute_all()?
.into_iter()
.find(|role| role.id == test_role_id)
.find(|role| *role.id() == test_role_id)
.expect("Failed to find Role")
.permissions
.iter()
.permissions()
.any(|permission| {
CanUnregisterDomain::try_from(permission)
.is_ok_and(|permission| permission == can_unregister_domain)
Expand Down Expand Up @@ -343,11 +341,10 @@ fn migration_should_cause_upgrade_event() {
.await
.unwrap();
while let Some(event) = stream.try_next().await.unwrap() {
if let EventBox::Data(DataEvent::Executor(ExecutorEvent::Upgraded(ExecutorUpgrade {
new_data_model,
}))) = event
if let EventBox::Data(DataEvent::Executor(ExecutorEvent::Upgraded(executor_upgrade))) =
event
{
assert!(!new_data_model.permissions.is_empty());
assert!(!executor_upgrade.new_data_model().permissions().is_empty());
break;
}
}
Expand Down
2 changes: 1 addition & 1 deletion crates/iroha_codec/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ license.workspace = true
workspace = true

[dependencies]
iroha_data_model = { workspace = true, features = ["http"] }
iroha_data_model = { workspace = true }
iroha_executor_data_model = { workspace = true }
iroha_primitives = { workspace = true }
iroha_schema = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion crates/iroha_data_model/src/asset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ mod model {
/// The total amount of this asset in existence.
///
/// For numeric assets - it is the sum of all asset values. For store assets - it is the count of all assets.
#[getset(get = "pub")]
#[getset(get_copy = "pub")]
pub total_quantity: Numeric,
}

Expand Down
Loading

0 comments on commit a9ee4fc

Please sign in to comment.