Skip to content

Commit

Permalink
fix: fix transparent_api violations
Browse files Browse the repository at this point in the history
Signed-off-by: Marin Veršić <[email protected]>
  • Loading branch information
mversic committed Oct 20, 2024
1 parent 1555b62 commit 6d19331
Show file tree
Hide file tree
Showing 17 changed files with 53 additions and 72 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ async fn multiple_blocks_created() -> Result<()> {
// Given
let network = NetworkBuilder::new()
.with_peers(4)
.with_genesis_instruction(SetParameter(Parameter::Block(
.with_genesis_instruction(SetParameter::new(Parameter::Block(
BlockParameter::MaxTransactions(NonZero::new(N_MAX_TXS_PER_BLOCK).expect("valid")),
)))
.with_pipeline_time(Duration::from_secs(1))
Expand Down
15 changes: 3 additions & 12 deletions crates/iroha/tests/integration/pagination.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,7 @@ fn limits_should_work() -> Result<()> {

let vec = client
.query(asset::all_definitions())
.with_pagination(Pagination {
limit: Some(nonzero!(7_u64)),
offset: 1,
})
.with_pagination(Pagination::new(Some(nonzero!(7_u64)), 1))
.execute_all()?;
assert_eq!(vec.len(), 7);
Ok(())
Expand All @@ -33,10 +30,7 @@ fn reported_length_should_be_accurate() -> Result<()> {

let mut iter = client
.query(asset::all_definitions())
.with_pagination(Pagination {
limit: Some(nonzero!(7_u64)),
offset: 1,
})
.with_pagination(Pagination::new(Some(nonzero!(7_u64)), 1))
.with_fetch_size(FetchSize::new(Some(nonzero!(3_u64))))
.execute()?;

Expand Down Expand Up @@ -68,10 +62,7 @@ fn fetch_size_should_work() -> Result<()> {
let query = QueryWithParams::new(
QueryWithFilter::new(asset::all_definitions(), CompoundPredicate::PASS).into(),
QueryParams::new(
Pagination {
limit: Some(nonzero!(7_u64)),
offset: 1,
},
Pagination::new(Some(nonzero!(7_u64)), 1),
Sorting::default(),
FetchSize::new(Some(nonzero!(3_u64))),
),
Expand Down
4 changes: 2 additions & 2 deletions crates/iroha/tests/integration/queries/asset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ fn find_asset_total_quantity() -> Result<()> {
.query(FindAssetsDefinitions::new())
.filter_with(|asset_definition| asset_definition.id.eq(definition_id.clone()))
.execute_single()?
.total_quantity)
.total_quantity())
};

// Assert that initial total quantity before any registrations and unregistrations is zero
Expand Down Expand Up @@ -154,7 +154,7 @@ where
.query(FindAssetsDefinitions::new())
.filter_with(|asset_definition| asset_definition.id.eq(definition_id.clone()))
.execute_single()?
.total_quantity)
.total_quantity())
};

// Assert that initial total quantity before any burns and mints is zero
Expand Down
8 changes: 4 additions & 4 deletions crates/iroha/tests/integration/set_parameter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,22 +12,22 @@ use nonzero_ext::nonzero;
#[test]
fn can_change_parameter_value() -> Result<()> {
let (network, _rt) = NetworkBuilder::new()
.with_genesis_instruction(SetParameter(Parameter::Block(
.with_genesis_instruction(SetParameter::new(Parameter::Block(
BlockParameter::MaxTransactions(nonzero!(16u64)),
)))
.start_blocking()?;
let test_client = network.client();

let old_params: Parameters = test_client.query_single(client::parameter::all())?;
assert_eq!(old_params.block.max_transactions, nonzero!(16u64));
assert_eq!(old_params.block().max_transactions(), nonzero!(16u64));

let new_value = nonzero!(32u64);
test_client.submit_blocking(SetParameter(Parameter::Block(
test_client.submit_blocking(SetParameter::new(Parameter::Block(
BlockParameter::MaxTransactions(new_value),
)))?;

let params = test_client.query_single(client::parameter::all())?;
assert_eq!(params.block.max_transactions, new_value);
assert_eq!(params.block().max_transactions(), new_value);

Ok(())
}
5 changes: 1 addition & 4 deletions crates/iroha/tests/integration/sorting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,7 @@ fn correct_pagination_assets_after_creating_new_one() {
const N_ASSETS: usize = 12;
// 0 < pagination.start < missing_idx < pagination.end < N_ASSETS
let missing_indices = vec![N_ASSETS / 2];
let pagination = Pagination {
limit: Some(nonzero!(N_ASSETS as u64 / 3)),
offset: N_ASSETS as u64 / 3,
};
let pagination = Pagination::new(Some(nonzero!(N_ASSETS as u64 / 3)), N_ASSETS as u64 / 3);
let xor_filter =
AssetPredicateBox::build(|asset| asset.id.definition_id.name.starts_with("xor"));

Expand Down
16 changes: 8 additions & 8 deletions crates/iroha/tests/integration/triggers/by_call_trigger.rs
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,7 @@ fn only_account_with_permission_can_register_trigger() -> Result<()> {
.filter_with(|trigger| trigger.id.eq(trigger_id.clone()))
.execute_single()?;

assert_eq!(found_trigger.id, trigger_id);
assert_eq!(*found_trigger.id(), trigger_id);

Ok(())
}
Expand Down Expand Up @@ -375,17 +375,17 @@ fn unregister_trigger() -> Result<()> {
.query(FindTriggers::new())
.filter_with(|trigger| trigger.id.eq(trigger_id.clone()))
.execute_single()?;
let found_action = found_trigger.action;
let Executable::Instructions(found_instructions) = found_action.executable else {
let found_action = found_trigger.action();
let Executable::Instructions(found_instructions) = found_action.executable() else {
panic!("Expected instructions");
};
let found_trigger = Trigger::new(
found_trigger.id,
found_trigger.id().clone(),
Action::new(
Executable::Instructions(found_instructions),
found_action.repeats,
found_action.authority,
found_action.filter,
Executable::Instructions(found_instructions.to_owned()),
found_action.repeats().clone(),
found_action.authority().clone(),
found_action.filter().clone(),
),
);
assert_eq!(found_trigger, trigger);
Expand Down
13 changes: 7 additions & 6 deletions crates/iroha/tests/integration/triggers/orphans.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,12 @@ use iroha::{
use iroha_test_network::*;
use iroha_test_samples::gen_account_in;

fn find_trigger(iroha: &Client, trigger_id: &TriggerId) -> Option<TriggerId> {
fn find_trigger(iroha: &Client, trigger_id: &TriggerId) -> Option<Trigger> {
iroha
.query(FindTriggers::new())
.filter_with(|trigger| trigger.id.eq(trigger_id.clone()))
.execute_single()
.ok()
.map(|trigger| trigger.id)
}

fn set_up_trigger(iroha: &Client) -> eyre::Result<(DomainId, AccountId, TriggerId)> {
Expand Down Expand Up @@ -45,9 +44,10 @@ fn trigger_must_be_removed_on_action_authority_account_removal() -> eyre::Result
let (network, _rt) = NetworkBuilder::new().start_blocking()?;
let iroha = network.client();
let (_, the_one_who_fails, fail_on_account_events) = set_up_trigger(&iroha)?;
let trigger = find_trigger(&iroha, &fail_on_account_events);
assert_eq!(
find_trigger(&iroha, &fail_on_account_events),
Some(fail_on_account_events.clone())
trigger.as_ref().map(Identifiable::id),
Some(&fail_on_account_events.clone())
);
iroha.submit_blocking(Unregister::account(the_one_who_fails.clone()))?;
assert_eq!(find_trigger(&iroha, &fail_on_account_events), None);
Expand All @@ -59,9 +59,10 @@ fn trigger_must_be_removed_on_action_authority_domain_removal() -> eyre::Result<
let (network, _rt) = NetworkBuilder::new().start_blocking()?;
let iroha = network.client();
let (failand, _, fail_on_account_events) = set_up_trigger(&iroha)?;
let trigger = find_trigger(&iroha, &fail_on_account_events);
assert_eq!(
find_trigger(&iroha, &fail_on_account_events),
Some(fail_on_account_events.clone())
trigger.as_ref().map(Identifiable::id),
Some(&fail_on_account_events.clone())
);
iroha.submit_blocking(Unregister::domain(failand.clone()))?;
assert_eq!(find_trigger(&iroha, &fail_on_account_events), None);
Expand Down
18 changes: 8 additions & 10 deletions crates/iroha/tests/integration/triggers/time_trigger.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ fn mint_asset_after_3_sec() -> Result<()> {
let account_id = ALICE_ID.clone();
let asset_id = AssetId::new(asset_definition_id.clone(), account_id.clone());

let init_quantity = test_client.query_single(FindAssetQuantityById {
id: asset_id.clone(),
})?;
let init_quantity = test_client.query_single(FindAssetQuantityById::new(
asset_id.clone(),
))?;

let start_time = curr_time();
assert!(
Expand All @@ -64,18 +64,16 @@ fn mint_asset_after_3_sec() -> Result<()> {

// Schedule start is in the future so trigger isn't executed after creating a new block
test_client.submit_blocking(Log::new(Level::DEBUG, "Just to create block".to_string()))?;
let after_registration_quantity = test_client.query_single(FindAssetQuantityById {
id: asset_id.clone(),
})?;
let after_registration_quantity =
test_client.query_single(FindAssetQuantityById::new(asset_id.clone()))?;
assert_eq!(init_quantity, after_registration_quantity);

// Sleep long enough that trigger start is in the past
std::thread::sleep(network.pipeline_time());
test_client.submit_blocking(Log::new(Level::DEBUG, "Just to create block".to_string()))?;

let after_wait_quantity = test_client.query_single(FindAssetQuantityById {
id: asset_id.clone(),
})?;
let after_wait_quantity =
test_client.query_single(FindAssetQuantityById::new(asset_id.clone()))?;
// Schedule is in the past now so trigger is executed
assert_eq!(
init_quantity.checked_add(1u32.into()).unwrap(),
Expand Down Expand Up @@ -168,7 +166,7 @@ fn mint_nft_for_every_user_every_1_sec() -> Result<()> {
let start_time = curr_time() + offset;
let schedule = TimeSchedule::starting_at(start_time).with_period(TRIGGER_PERIOD);

let filter = TimeEventFilter(ExecutionTime::Schedule(schedule));
let filter = TimeEventFilter::new(ExecutionTime::Schedule(schedule));
let register_trigger = Register::trigger(Trigger::new(
"mint_nft_for_all".parse()?,
Action::new(
Expand Down
5 changes: 1 addition & 4 deletions crates/iroha/tests/integration/tx_history.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,7 @@ fn client_has_rejected_and_accepted_txs_should_return_tx_history() -> Result<()>
let transactions = client
.query(transaction::all())
.filter_with(|tx| tx.transaction.value.authority.eq(account_id.clone()))
.with_pagination(Pagination {
limit: Some(nonzero!(50_u64)),
offset: 1,
})
.with_pagination(Pagination::new(Some(nonzero!(50_u64)), 1))
.execute_all()?;
assert_eq!(transactions.len(), 50);

Expand Down
17 changes: 7 additions & 10 deletions crates/iroha/tests/integration/upgrade.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,10 +146,9 @@ fn executor_upgrade_should_revoke_removed_permissions() -> Result<()> {
.query(client::role::all())
.execute_all()?
.into_iter()
.find(|role| role.id == test_role_id)
.find(|role| *role.id() == test_role_id)
.expect("Failed to find Role")
.permissions
.iter()
.permissions()
.any(|permission| {
CanUnregisterDomain::try_from(permission)
.is_ok_and(|permission| permission == can_unregister_domain)
Expand Down Expand Up @@ -179,10 +178,9 @@ fn executor_upgrade_should_revoke_removed_permissions() -> Result<()> {
.query(client::role::all())
.execute_all()?
.into_iter()
.find(|role| role.id == test_role_id)
.find(|role| *role.id() == test_role_id)
.expect("Failed to find Role")
.permissions
.iter()
.permissions()
.any(|permission| {
CanUnregisterDomain::try_from(permission)
.is_ok_and(|permission| permission == can_unregister_domain)
Expand Down Expand Up @@ -343,11 +341,10 @@ fn migration_should_cause_upgrade_event() {
.await
.unwrap();
while let Some(event) = stream.try_next().await.unwrap() {
if let EventBox::Data(DataEvent::Executor(ExecutorEvent::Upgraded(ExecutorUpgrade {
new_data_model,
}))) = event
if let EventBox::Data(DataEvent::Executor(ExecutorEvent::Upgraded(executor_upgrade))) =
event
{
assert!(!new_data_model.permissions.is_empty());
assert!(!executor_upgrade.new_data_model().permissions().is_empty());
break;
}
}
Expand Down
2 changes: 1 addition & 1 deletion crates/iroha_codec/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ license.workspace = true
workspace = true

[dependencies]
iroha_data_model = { workspace = true, features = ["http"] }
iroha_data_model = { workspace = true }
iroha_executor_data_model = { workspace = true }
iroha_primitives = { workspace = true }
iroha_schema = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion crates/iroha_data_model/src/asset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ mod model {
/// The total amount of this asset in existence.
///
/// For numeric assets - it is the sum of all asset values. For store assets - it is the count of all assets.
#[getset(get = "pub")]
#[getset(get_copy = "pub")]
pub total_quantity: Numeric,
}

Expand Down
2 changes: 1 addition & 1 deletion crates/iroha_data_model_derive/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ manyhow = { workspace = true }
iroha_macro_utils = { workspace = true }

[dev-dependencies]
iroha_data_model = { workspace = true, features = ["http"] }
iroha_data_model = { workspace = true }
iroha_schema = { workspace = true }
parity-scale-codec = { workspace = true }
derive_more = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion crates/iroha_genesis/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ workspace = true
[dependencies]
iroha_crypto = { workspace = true }
iroha_schema = { workspace = true }
iroha_data_model = { workspace = true, features = ["http"] }
iroha_data_model = { workspace = true, features = ["std"] }

derive_more = { workspace = true, features = ["deref"] }
serde = { workspace = true, features = ["derive"] }
Expand Down
4 changes: 2 additions & 2 deletions crates/iroha_genesis/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -423,7 +423,7 @@ mod tests {
// First transaction
{
let transaction = transactions[0];
let instructions = transaction.value.instructions();
let instructions = transaction.as_ref().instructions();
let Executable::Instructions(instructions) = instructions else {
panic!("Expected instructions");
};
Expand All @@ -434,7 +434,7 @@ mod tests {

// Second transaction
let transaction = transactions[1];
let instructions = transaction.value.instructions();
let instructions = transaction.as_ref().instructions();
let Executable::Instructions(instructions) = instructions else {
panic!("Expected instructions");
};
Expand Down
8 changes: 4 additions & 4 deletions crates/iroha_test_network/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -327,10 +327,10 @@ impl NetworkBuilder {
block_time = duration / 3;
commit_time = duration / 2;
extra_isi.extend([
InstructionBox::SetParameter(SetParameter(Parameter::Sumeragi(
InstructionBox::SetParameter(SetParameter::new(Parameter::Sumeragi(
SumeragiParameter::BlockTimeMs(block_time.as_millis() as u64),
))),
InstructionBox::SetParameter(SetParameter(Parameter::Sumeragi(
InstructionBox::SetParameter(SetParameter::new(Parameter::Sumeragi(
SumeragiParameter::CommitTimeMs(commit_time.as_millis() as u64),
))),
]);
Expand All @@ -341,10 +341,10 @@ impl NetworkBuilder {

let genesis = config::genesis(
[
InstructionBox::SetParameter(SetParameter(Parameter::Sumeragi(
InstructionBox::SetParameter(SetParameter::new(Parameter::Sumeragi(
SumeragiParameter::BlockTimeMs(block_time.as_millis() as u64),
))),
InstructionBox::SetParameter(SetParameter(Parameter::Sumeragi(
InstructionBox::SetParameter(SetParameter::new(Parameter::Sumeragi(
SumeragiParameter::CommitTimeMs(commit_time.as_millis() as u64),
))),
]
Expand Down
2 changes: 1 addition & 1 deletion crates/irohad/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ maintenance = { status = "actively-developed" }
iroha_core = { workspace = true }
iroha_logger = { workspace = true }
iroha_futures = { workspace = true }
iroha_data_model = { workspace = true, features = ["http"] }
iroha_data_model = { workspace = true }
iroha_primitives = { workspace = true }
iroha_telemetry = { workspace = true, optional = true }
iroha_config = { workspace = true }
Expand Down

0 comments on commit 6d19331

Please sign in to comment.