diff --git a/Cargo.lock b/Cargo.lock index d975fb850fe..bd767492c67 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2959,7 +2959,6 @@ dependencies = [ "eyre", "humantime", "iroha", - "iroha_config_base", "iroha_primitives", "json5", "serde", @@ -2976,14 +2975,10 @@ dependencies = [ "clap", "colored", "eyre", - "iroha_crypto", "iroha_data_model", - "iroha_executor_data_model", "iroha_genesis", - "iroha_primitives", "iroha_schema", "iroha_schema_gen", - "iroha_version", "parity-scale-codec", "serde", "serde_json", @@ -3414,7 +3409,6 @@ dependencies = [ "iroha_futures", "iroha_logger", "iroha_primitives", - "iroha_test_network", "parity-scale-codec", "rand", "thiserror", @@ -3583,7 +3577,6 @@ dependencies = [ name = "iroha_telemetry_derive" version = "2.0.0-rc.1.0" dependencies = [ - "iroha_core", "iroha_macro_utils", "manyhow", "proc-macro2", @@ -3603,7 +3596,6 @@ dependencies = [ "futures", "iroha", "iroha_config", - "iroha_core", "iroha_crypto", "iroha_data_model", "iroha_executor_data_model", diff --git a/Cargo.toml b/Cargo.toml index bd5bf0b5b3a..eeb917bca99 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -69,7 +69,6 @@ parking_lot = { version = "0.12.3" } tempfile = "3.10.1" path-absolutize = "3.1.1" -pathdiff = "0.2.1" bytes = "1.6.1" vergen = { version = "8.3.1", default-features = false } @@ -86,7 +85,6 @@ prometheus = { version = "0.13.4", default-features = false } clap = "4.5.11" owo-colors = "4.1.0" supports-color = "2.1.0" -inquire = "0.6.2" spinoff = "0.8.0" humantime = "2.1.0" @@ -119,7 +117,6 @@ rustc-hash = "1.1.0" serde = { version = "1.0.204", default-features = false } serde_json = { version = "1.0.121", default-features = false } -serde_yaml = "0.9.34" serde_with = { version = "3.9.0", default-features = false } parity-scale-codec = { version = "3.6.12", default-features = false } json5 = "0.4.1" diff --git a/crates/iroha/Cargo.toml b/crates/iroha/Cargo.toml index 207c6ae6aa8..13ece91a788 100644 --- a/crates/iroha/Cargo.toml +++ b/crates/iroha/Cargo.toml @@ -51,8 +51,7 @@ tls-rustls-webpki-roots = [ iroha_config = { workspace = true } iroha_config_base = { workspace = true } iroha_crypto = { workspace = true } -# FIXME: should remove `transparent_api` feature. Check the other FIXME comment in dev-dependencies -iroha_data_model = { workspace = true, features = ["http", "transparent_api"] } +iroha_data_model = { workspace = true, features = ["http"] } iroha_primitives = { workspace = true } iroha_logger = { workspace = true } iroha_telemetry = { workspace = true } diff --git a/crates/iroha/tests/asset.rs b/crates/iroha/tests/asset.rs index faf5f71fcc0..47db4c2550c 100644 --- a/crates/iroha/tests/asset.rs +++ b/crates/iroha/tests/asset.rs @@ -3,7 +3,7 @@ use iroha::{ crypto::KeyPair, data_model::{ asset::{AssetId, AssetType, AssetValue}, - isi::error::{InstructionEvaluationError, InstructionExecutionError, Mismatch, TypeError}, + isi::error::{InstructionEvaluationError, InstructionExecutionError, TypeError}, prelude::*, transaction::error::TransactionRejectionReason, }, @@ -400,16 +400,21 @@ fn fail_if_dont_satisfy_spec() { .downcast_ref::() .unwrap_or_else(|| panic!("Error {err} is not TransactionRejectionReason")); + let TransactionRejectionReason::Validation(ValidationFail::InstructionFailed( + InstructionExecutionError::Evaluate(InstructionEvaluationError::Type( + TypeError::AssetType(rejection_reason), + )), + )) = rejection_reason + else { + panic!("Wrong rejection reason"); + }; + assert_eq!( + *rejection_reason.expected(), + AssetType::Numeric(NumericSpec::integer()), + ); assert_eq!( - rejection_reason, - &TransactionRejectionReason::Validation(ValidationFail::InstructionFailed( - InstructionExecutionError::Evaluate(InstructionEvaluationError::Type( - TypeError::from(Mismatch { - expected: AssetType::Numeric(NumericSpec::integer()), - actual: AssetType::Numeric(NumericSpec::fractional(2)) - }) - )) - )) + *rejection_reason.actual(), + AssetType::Numeric(NumericSpec::fractional(2)) ); } diff --git a/crates/iroha/tests/events/pipeline.rs b/crates/iroha/tests/events/pipeline.rs index f0c990ad440..7bb4e1bd97f 100644 --- a/crates/iroha/tests/events/pipeline.rs +++ b/crates/iroha/tests/events/pipeline.rs @@ -1,6 +1,5 @@ use std::time::Duration; -use assert_matches::assert_matches; use eyre::Result; use futures_util::StreamExt; use iroha::data_model::{ @@ -54,20 +53,20 @@ async fn test_with_instruction_and_status( // Then timeout(Duration::from_secs(5), async move { - assert_matches!( - events.next().await.unwrap().unwrap(), - EventBox::Pipeline(PipelineEventBox::Transaction(TransactionEvent { - status: TransactionStatus::Queued, - .. - })) - ); - assert_matches!( - events.next().await.unwrap().unwrap(), - EventBox::Pipeline(PipelineEventBox::Transaction(TransactionEvent { - status, - .. - })) if status == *should_be - ); + let EventBox::Pipeline(PipelineEventBox::Transaction(event)) = + events.next().await.unwrap().unwrap() + else { + panic!("Expected transaction event"); + }; + assert_eq!(*event.status(), TransactionStatus::Queued); + + let EventBox::Pipeline(PipelineEventBox::Transaction(event)) = + events.next().await.unwrap().unwrap() + else { + panic!("Expected transaction event"); + }; + + assert_eq!(event.status(), should_be); }) .await?; diff --git a/crates/iroha/tests/extra_functional/multiple_blocks_created.rs b/crates/iroha/tests/extra_functional/multiple_blocks_created.rs index 96e45f4185d..8dc7a00086c 100644 --- a/crates/iroha/tests/extra_functional/multiple_blocks_created.rs +++ b/crates/iroha/tests/extra_functional/multiple_blocks_created.rs @@ -26,7 +26,7 @@ async fn multiple_blocks_created() -> Result<()> { // Given let network = NetworkBuilder::new() .with_peers(4) - .with_genesis_instruction(SetParameter(Parameter::Block( + .with_genesis_instruction(SetParameter::new(Parameter::Block( BlockParameter::MaxTransactions(NonZero::new(N_MAX_TXS_PER_BLOCK).expect("valid")), ))) .with_pipeline_time(Duration::from_secs(1)) diff --git a/crates/iroha/tests/pagination.rs b/crates/iroha/tests/pagination.rs index b0be0fb713e..2c8b40405d9 100644 --- a/crates/iroha/tests/pagination.rs +++ b/crates/iroha/tests/pagination.rs @@ -15,10 +15,7 @@ fn limits_should_work() -> Result<()> { let vec = client .query(FindAssetsDefinitions::new()) - .with_pagination(Pagination { - limit: Some(nonzero!(7_u64)), - offset: 1, - }) + .with_pagination(Pagination::new(Some(nonzero!(7_u64)), 1)) .execute_all()?; assert_eq!(vec.len(), 7); Ok(()) @@ -33,10 +30,7 @@ fn reported_length_should_be_accurate() -> Result<()> { let mut iter = client .query(FindAssetsDefinitions::new()) - .with_pagination(Pagination { - limit: Some(nonzero!(7_u64)), - offset: 1, - }) + .with_pagination(Pagination::new(Some(nonzero!(7_u64)), 1)) .with_fetch_size(FetchSize::new(Some(nonzero!(3_u64)))) .execute()?; @@ -68,10 +62,7 @@ fn fetch_size_should_work() -> Result<()> { let query = QueryWithParams::new( QueryWithFilter::new(FindAssetsDefinitions::new(), CompoundPredicate::PASS).into(), QueryParams::new( - Pagination { - limit: Some(nonzero!(7_u64)), - offset: 1, - }, + Pagination::new(Some(nonzero!(7_u64)), 1), Sorting::default(), FetchSize::new(Some(nonzero!(3_u64))), ), diff --git a/crates/iroha/tests/queries/asset.rs b/crates/iroha/tests/queries/asset.rs index a249df880cb..b02761a6221 100644 --- a/crates/iroha/tests/queries/asset.rs +++ b/crates/iroha/tests/queries/asset.rs @@ -78,7 +78,7 @@ fn find_asset_total_quantity() -> Result<()> { .query(FindAssetsDefinitions::new()) .filter_with(|asset_definition| asset_definition.id.eq(definition_id.clone())) .execute_single()? - .total_quantity) + .total_quantity()) }; // Assert that initial total quantity before any registrations and unregistrations is zero @@ -154,7 +154,7 @@ where .query(FindAssetsDefinitions::new()) .filter_with(|asset_definition| asset_definition.id.eq(definition_id.clone())) .execute_single()? - .total_quantity) + .total_quantity()) }; // Assert that initial total quantity before any burns and mints is zero diff --git a/crates/iroha/tests/set_parameter.rs b/crates/iroha/tests/set_parameter.rs index f06fc3c704a..6d38ffee885 100644 --- a/crates/iroha/tests/set_parameter.rs +++ b/crates/iroha/tests/set_parameter.rs @@ -9,22 +9,22 @@ use nonzero_ext::nonzero; #[test] fn can_change_parameter_value() -> Result<()> { let (network, _rt) = NetworkBuilder::new() - .with_genesis_instruction(SetParameter(Parameter::Block( + .with_genesis_instruction(SetParameter::new(Parameter::Block( BlockParameter::MaxTransactions(nonzero!(16u64)), ))) .start_blocking()?; let test_client = network.client(); let old_params: Parameters = test_client.query_single(FindParameters::new())?; - assert_eq!(old_params.block.max_transactions, nonzero!(16u64)); + assert_eq!(old_params.block().max_transactions(), nonzero!(16u64)); let new_value = nonzero!(32u64); - test_client.submit_blocking(SetParameter(Parameter::Block( + test_client.submit_blocking(SetParameter::new(Parameter::Block( BlockParameter::MaxTransactions(new_value), )))?; let params = test_client.query_single(FindParameters::new())?; - assert_eq!(params.block.max_transactions, new_value); + assert_eq!(params.block().max_transactions(), new_value); Ok(()) } diff --git a/crates/iroha/tests/sorting.rs b/crates/iroha/tests/sorting.rs index 9921a42a218..a5983b99dc1 100644 --- a/crates/iroha/tests/sorting.rs +++ b/crates/iroha/tests/sorting.rs @@ -22,10 +22,7 @@ fn correct_pagination_assets_after_creating_new_one() { const N_ASSETS: usize = 12; // 0 < pagination.start < missing_idx < pagination.end < N_ASSETS let missing_indices = vec![N_ASSETS / 2]; - let pagination = Pagination { - limit: Some(nonzero!(N_ASSETS as u64 / 3)), - offset: N_ASSETS as u64 / 3, - }; + let pagination = Pagination::new(Some(nonzero!(N_ASSETS as u64 / 3)), N_ASSETS as u64 / 3); let xor_filter = AssetPredicateBox::build(|asset| asset.id.definition_id.name.starts_with("xor")); diff --git a/crates/iroha/tests/triggers/by_call_trigger.rs b/crates/iroha/tests/triggers/by_call_trigger.rs index 8c537be33ae..81dd9adcfee 100644 --- a/crates/iroha/tests/triggers/by_call_trigger.rs +++ b/crates/iroha/tests/triggers/by_call_trigger.rs @@ -341,7 +341,7 @@ fn only_account_with_permission_can_register_trigger() -> Result<()> { .filter_with(|trigger| trigger.id.eq(trigger_id.clone())) .execute_single()?; - assert_eq!(found_trigger.id, trigger_id); + assert_eq!(*found_trigger.id(), trigger_id); Ok(()) } @@ -374,17 +374,17 @@ fn unregister_trigger() -> Result<()> { .query(FindTriggers::new()) .filter_with(|trigger| trigger.id.eq(trigger_id.clone())) .execute_single()?; - let found_action = found_trigger.action; - let Executable::Instructions(found_instructions) = found_action.executable else { + let found_action = found_trigger.action(); + let Executable::Instructions(found_instructions) = found_action.executable() else { panic!("Expected instructions"); }; let found_trigger = Trigger::new( - found_trigger.id, + found_trigger.id().clone(), Action::new( - Executable::Instructions(found_instructions), - found_action.repeats, - found_action.authority, - found_action.filter, + Executable::Instructions(found_instructions.to_owned()), + found_action.repeats(), + found_action.authority().clone(), + found_action.filter().clone(), ), ); assert_eq!(found_trigger, trigger); diff --git a/crates/iroha/tests/triggers/orphans.rs b/crates/iroha/tests/triggers/orphans.rs index 725007dbc64..8f8e018391c 100644 --- a/crates/iroha/tests/triggers/orphans.rs +++ b/crates/iroha/tests/triggers/orphans.rs @@ -5,13 +5,12 @@ use iroha::{ use iroha_test_network::*; use iroha_test_samples::gen_account_in; -fn find_trigger(iroha: &Client, trigger_id: &TriggerId) -> Option { +fn find_trigger(iroha: &Client, trigger_id: &TriggerId) -> Option { iroha .query(FindTriggers::new()) .filter_with(|trigger| trigger.id.eq(trigger_id.clone())) .execute_single() .ok() - .map(|trigger| trigger.id) } fn set_up_trigger(iroha: &Client) -> eyre::Result<(DomainId, AccountId, TriggerId)> { @@ -45,9 +44,10 @@ fn trigger_must_be_removed_on_action_authority_account_removal() -> eyre::Result let (network, _rt) = NetworkBuilder::new().start_blocking()?; let iroha = network.client(); let (_, the_one_who_fails, fail_on_account_events) = set_up_trigger(&iroha)?; + let trigger = find_trigger(&iroha, &fail_on_account_events); assert_eq!( - find_trigger(&iroha, &fail_on_account_events), - Some(fail_on_account_events.clone()) + trigger.as_ref().map(Identifiable::id), + Some(&fail_on_account_events.clone()) ); iroha.submit_blocking(Unregister::account(the_one_who_fails.clone()))?; assert_eq!(find_trigger(&iroha, &fail_on_account_events), None); @@ -59,9 +59,10 @@ fn trigger_must_be_removed_on_action_authority_domain_removal() -> eyre::Result< let (network, _rt) = NetworkBuilder::new().start_blocking()?; let iroha = network.client(); let (failand, _, fail_on_account_events) = set_up_trigger(&iroha)?; + let trigger = find_trigger(&iroha, &fail_on_account_events); assert_eq!( - find_trigger(&iroha, &fail_on_account_events), - Some(fail_on_account_events.clone()) + trigger.as_ref().map(Identifiable::id), + Some(&fail_on_account_events.clone()) ); iroha.submit_blocking(Unregister::domain(failand.clone()))?; assert_eq!(find_trigger(&iroha, &fail_on_account_events), None); diff --git a/crates/iroha/tests/triggers/time_trigger.rs b/crates/iroha/tests/triggers/time_trigger.rs index 2d09fe72e24..03c7418b740 100644 --- a/crates/iroha/tests/triggers/time_trigger.rs +++ b/crates/iroha/tests/triggers/time_trigger.rs @@ -40,9 +40,7 @@ fn mint_asset_after_3_sec() -> Result<()> { let account_id = ALICE_ID.clone(); let asset_id = AssetId::new(asset_definition_id.clone(), account_id.clone()); - let init_quantity = test_client.query_single(FindAssetQuantityById { - id: asset_id.clone(), - })?; + let init_quantity = test_client.query_single(FindAssetQuantityById::new(asset_id.clone()))?; let start_time = curr_time(); assert!( @@ -64,18 +62,16 @@ fn mint_asset_after_3_sec() -> Result<()> { // Schedule start is in the future so trigger isn't executed after creating a new block test_client.submit_blocking(Log::new(Level::DEBUG, "Just to create block".to_string()))?; - let after_registration_quantity = test_client.query_single(FindAssetQuantityById { - id: asset_id.clone(), - })?; + let after_registration_quantity = + test_client.query_single(FindAssetQuantityById::new(asset_id.clone()))?; assert_eq!(init_quantity, after_registration_quantity); // Sleep long enough that trigger start is in the past std::thread::sleep(network.pipeline_time()); test_client.submit_blocking(Log::new(Level::DEBUG, "Just to create block".to_string()))?; - let after_wait_quantity = test_client.query_single(FindAssetQuantityById { - id: asset_id.clone(), - })?; + let after_wait_quantity = + test_client.query_single(FindAssetQuantityById::new(asset_id.clone()))?; // Schedule is in the past now so trigger is executed assert_eq!( init_quantity.checked_add(1u32.into()).unwrap(), @@ -168,7 +164,7 @@ fn mint_nft_for_every_user_every_1_sec() -> Result<()> { let start_time = curr_time() + offset; let schedule = TimeSchedule::starting_at(start_time).with_period(TRIGGER_PERIOD); - let filter = TimeEventFilter(ExecutionTime::Schedule(schedule)); + let filter = TimeEventFilter::new(ExecutionTime::Schedule(schedule)); let register_trigger = Register::trigger(Trigger::new( "mint_nft_for_all".parse()?, Action::new( diff --git a/crates/iroha/tests/tx_history.rs b/crates/iroha/tests/tx_history.rs index e9333f35be6..850e519c0fd 100644 --- a/crates/iroha/tests/tx_history.rs +++ b/crates/iroha/tests/tx_history.rs @@ -41,10 +41,7 @@ fn client_has_rejected_and_accepted_txs_should_return_tx_history() -> Result<()> let transactions = client .query(FindTransactions::new()) .filter_with(|tx| tx.value.authority.eq(account_id.clone())) - .with_pagination(Pagination { - limit: Some(nonzero!(50_u64)), - offset: 1, - }) + .with_pagination(Pagination::new(Some(nonzero!(50_u64)), 1)) .execute_all()?; assert_eq!(transactions.len(), 50); diff --git a/crates/iroha/tests/upgrade.rs b/crates/iroha/tests/upgrade.rs index 4e456510dec..c6f2c04f995 100644 --- a/crates/iroha/tests/upgrade.rs +++ b/crates/iroha/tests/upgrade.rs @@ -146,10 +146,9 @@ fn executor_upgrade_should_revoke_removed_permissions() -> Result<()> { .query(FindRoles::new()) .execute_all()? .into_iter() - .find(|role| role.id == test_role_id) + .find(|role| *role.id() == test_role_id) .expect("Failed to find Role") - .permissions - .iter() + .permissions() .any(|permission| { CanUnregisterDomain::try_from(permission) .is_ok_and(|permission| permission == can_unregister_domain) @@ -179,10 +178,9 @@ fn executor_upgrade_should_revoke_removed_permissions() -> Result<()> { .query(FindRoles::new()) .execute_all()? .into_iter() - .find(|role| role.id == test_role_id) + .find(|role| *role.id() == test_role_id) .expect("Failed to find Role") - .permissions - .iter() + .permissions() .any(|permission| { CanUnregisterDomain::try_from(permission) .is_ok_and(|permission| permission == can_unregister_domain) @@ -343,11 +341,10 @@ fn migration_should_cause_upgrade_event() { .await .unwrap(); while let Some(event) = stream.try_next().await.unwrap() { - if let EventBox::Data(DataEvent::Executor(ExecutorEvent::Upgraded(ExecutorUpgrade { - new_data_model, - }))) = event + if let EventBox::Data(DataEvent::Executor(ExecutorEvent::Upgraded(executor_upgrade))) = + event { - assert!(!new_data_model.permissions.is_empty()); + assert!(!executor_upgrade.new_data_model().permissions().is_empty()); break; } } diff --git a/crates/iroha_cli/Cargo.toml b/crates/iroha_cli/Cargo.toml index 8e253e4b697..68727ce2d6d 100644 --- a/crates/iroha_cli/Cargo.toml +++ b/crates/iroha_cli/Cargo.toml @@ -29,7 +29,6 @@ path = "src/main.rs" [dependencies] iroha = { workspace = true } iroha_primitives = { workspace = true } -iroha_config_base = { workspace = true } thiserror = { workspace = true } error-stack = { workspace = true, features = ["eyre"] } diff --git a/crates/iroha_cli/src/main.rs b/crates/iroha_cli/src/main.rs index 5aba6adc54d..3552960a5d7 100644 --- a/crates/iroha_cli/src/main.rs +++ b/crates/iroha_cli/src/main.rs @@ -17,8 +17,6 @@ use thiserror::Error; /// Re-usable clap `--metadata ` (`-m`) argument. /// Should be combined with `#[command(flatten)]` attr. #[derive(clap::Args, Debug, Clone)] -// FIXME: `pub` is needed because Rust complains about "leaking private types" -// when this type is used inside of modules. I don't know how to fix it. pub struct MetadataArgs { /// The JSON/JSON5 file with key-value metadata pairs #[arg(short, long, value_name("PATH"), value_hint(clap::ValueHint::FilePath))] diff --git a/crates/iroha_codec/Cargo.toml b/crates/iroha_codec/Cargo.toml index fad92c53da8..fd1bac9b464 100644 --- a/crates/iroha_codec/Cargo.toml +++ b/crates/iroha_codec/Cargo.toml @@ -11,13 +11,8 @@ license.workspace = true workspace = true [dependencies] -iroha_data_model = { workspace = true, features = ["http"] } -iroha_executor_data_model = { workspace = true } -iroha_primitives = { workspace = true } iroha_schema = { workspace = true } iroha_schema_gen = { workspace = true } -iroha_crypto = { workspace = true } -iroha_version = { workspace = true } iroha_genesis = { workspace = true } clap = { workspace = true, features = ["derive", "cargo", "env", "string"] } @@ -28,6 +23,9 @@ serde_json = { workspace = true, features = ["std"]} serde = { workspace = true } supports-color = { workspace = true } +[dev-dependencies] +iroha_data_model = { workspace = true } + [build-dependencies] iroha_data_model = { workspace = true } diff --git a/crates/iroha_config_base/src/toml.rs b/crates/iroha_config_base/src/toml.rs index 2b7ab4adb1a..bbfd7547229 100644 --- a/crates/iroha_config_base/src/toml.rs +++ b/crates/iroha_config_base/src/toml.rs @@ -289,6 +289,7 @@ impl<'a> From<&'a mut Table> for Writer<'a> { /// Extension trait to implement writing with [`Writer`] directly into [`Table`] in a chained manner. pub trait WriteExt: Sized { /// See [`Writer::write`]. + #[must_use] fn write(self, path: P, value: T) -> Self; } diff --git a/crates/iroha_data_model/src/asset.rs b/crates/iroha_data_model/src/asset.rs index f0edc5dc68e..89dd37fbebb 100644 --- a/crates/iroha_data_model/src/asset.rs +++ b/crates/iroha_data_model/src/asset.rs @@ -130,7 +130,7 @@ mod model { /// The total amount of this asset in existence. /// /// For numeric assets - it is the sum of all asset values. For store assets - it is the count of all assets. - #[getset(get = "pub")] + #[getset(get_copy = "pub")] pub total_quantity: Numeric, } diff --git a/crates/iroha_data_model/src/isi.rs b/crates/iroha_data_model/src/isi.rs index 45f5a3b318b..5f0e6b5ae1f 100644 --- a/crates/iroha_data_model/src/isi.rs +++ b/crates/iroha_data_model/src/isi.rs @@ -1232,6 +1232,7 @@ pub mod error { #[model] mod model { + use getset::Getters; use serde::{Deserialize, Serialize}; use super::*; @@ -1469,6 +1470,7 @@ pub mod error { Eq, PartialOrd, Ord, + Getters, Deserialize, Serialize, Decode, @@ -1479,12 +1481,27 @@ pub mod error { #[ffi_type] pub struct RepetitionError { /// Instruction type + #[getset(get = "pub")] pub instruction: InstructionType, /// Id of the object being repeated pub id: IdBox, } } + impl Mismatch { + /// The value that is needed for normal execution + pub fn expected(&self) -> &T { + &self.expected + } + } + + impl Mismatch { + /// The value that caused the error + pub fn actual(&self) -> &T { + &self.actual + } + } + impl From for InstructionExecutionError { fn from(err: TypeError) -> Self { Self::Evaluate(InstructionEvaluationError::Type(err)) diff --git a/crates/iroha_data_model/src/transaction.rs b/crates/iroha_data_model/src/transaction.rs index 1df809d685e..01ba60744c2 100644 --- a/crates/iroha_data_model/src/transaction.rs +++ b/crates/iroha_data_model/src/transaction.rs @@ -9,8 +9,6 @@ use core::{ }; use derive_more::{DebugCustom, Display}; -#[cfg(feature = "http")] -pub use http::*; use iroha_crypto::{Signature, SignatureOf}; use iroha_data_model_derive::model; use iroha_macro::FromVariant; @@ -151,6 +149,15 @@ mod model { /// Payload of the transaction. pub(super) payload: TransactionPayload, } + + /// Structure that represents the initial state of a transaction before the transaction receives any signatures. + #[derive(Debug, Clone)] + #[repr(transparent)] + #[must_use] + pub struct TransactionBuilder { + /// [`Transaction`] payload. + pub(super) payload: TransactionPayload, + } } impl FromIterator for Executable { @@ -289,6 +296,132 @@ impl TransactionSignature { } } +impl TransactionBuilder { + #[cfg(feature = "std")] + fn new_with_time(chain: ChainId, authority: AccountId, creation_time_ms: u64) -> Self { + Self { + payload: TransactionPayload { + chain, + authority, + creation_time_ms, + nonce: None, + time_to_live_ms: None, + instructions: Vec::::new().into(), + metadata: Metadata::default(), + }, + } + } + + /// Construct [`Self`], using the time from [`TimeSource`] + // we don't want to expose this to non-tests + #[inline] + #[cfg(all(feature = "std", feature = "transparent_api"))] + pub fn new_with_time_source( + chain_id: ChainId, + authority: AccountId, + time_source: &iroha_primitives::time::TimeSource, + ) -> Self { + let creation_time_ms = time_source + .get_unix_time() + .as_millis() + .try_into() + .expect("INTERNAL BUG: Unix timestamp exceedes u64::MAX"); + + Self::new_with_time(chain_id, authority, creation_time_ms) + } + + /// Construct [`Self`]. + #[inline] + #[cfg(feature = "std")] + pub fn new(chain_id: ChainId, authority: AccountId) -> Self { + use std::time::SystemTime; + + // can't delegate to `new_with_time_source`, because it's gated behind "transparent_api" + let creation_time_ms = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap() + .as_millis() + .try_into() + .expect("INTERNAL BUG: Unix timestamp exceedes u64::MAX"); + Self::new_with_time(chain_id, authority, creation_time_ms) + } +} + +impl TransactionBuilder { + /// Set instructions for this transaction + pub fn with_instructions( + mut self, + instructions: impl IntoIterator, + ) -> Self { + self.payload.instructions = instructions + .into_iter() + .map(Into::into) + .collect::>() + .into(); + self + } + + /// Add wasm to this transaction + pub fn with_wasm(mut self, wasm: WasmSmartContract) -> Self { + self.payload.instructions = wasm.into(); + self + } + + /// Set executable for this transaction + pub fn with_executable(mut self, executable: Executable) -> Self { + self.payload.instructions = executable; + self + } + + /// Adds metadata to the `Transaction` + pub fn with_metadata(mut self, metadata: Metadata) -> Self { + self.payload.metadata = metadata; + self + } + + /// Set nonce for [`Transaction`] + pub fn set_nonce(&mut self, nonce: NonZeroU32) -> &mut Self { + self.payload.nonce = Some(nonce); + self + } + + /// Set time-to-live for [`Transaction`] + pub fn set_ttl(&mut self, time_to_live: Duration) -> &mut Self { + let ttl: u64 = time_to_live + .as_millis() + .try_into() + .expect("INTERNAL BUG: Unix timestamp exceedes u64::MAX"); + + self.payload.time_to_live_ms = if ttl == 0 { + // TODO: This is not correct, 0 is not the same as None + None + } else { + Some(NonZeroU64::new(ttl).expect("Can't be 0")) + }; + + self + } + + /// Set creation time of transaction + pub fn set_creation_time(&mut self, value: Duration) -> &mut Self { + self.payload.creation_time_ms = u64::try_from(value.as_millis()) + .expect("INTERNAL BUG: Unix timestamp exceedes u64::MAX"); + self + } + + /// Sign transaction with provided key pair. + #[must_use] + pub fn sign(self, private_key: &iroha_crypto::PrivateKey) -> SignedTransaction { + let signature = TransactionSignature(SignatureOf::new(private_key, &self.payload)); + + SignedTransactionV1 { + signature, + payload: self.payload, + } + .into() + } +} + mod candidate { use parity_scale_codec::Input; @@ -565,157 +698,11 @@ pub mod error { } } -#[cfg(feature = "http")] -mod http { - pub use self::model::*; - use super::*; - - #[model] - mod model { - use super::*; - - /// Structure that represents the initial state of a transaction before the transaction receives any signatures. - #[derive(Debug, Clone)] - #[repr(transparent)] - #[must_use] - pub struct TransactionBuilder { - /// [`Transaction`] payload. - pub(super) payload: TransactionPayload, - } - } - - impl TransactionBuilder { - #[cfg(feature = "std")] - fn new_with_time(chain: ChainId, authority: AccountId, creation_time_ms: u64) -> Self { - Self { - payload: TransactionPayload { - chain, - authority, - creation_time_ms, - nonce: None, - time_to_live_ms: None, - instructions: Vec::::new().into(), - metadata: Metadata::default(), - }, - } - } - - /// Construct [`Self`], using the time from [`TimeSource`] - // we don't want to expose this to non-tests - #[inline] - #[cfg(all(feature = "std", feature = "transparent_api"))] - pub fn new_with_time_source( - chain_id: ChainId, - authority: AccountId, - time_source: &iroha_primitives::time::TimeSource, - ) -> Self { - let creation_time_ms = time_source - .get_unix_time() - .as_millis() - .try_into() - .expect("INTERNAL BUG: Unix timestamp exceedes u64::MAX"); - - Self::new_with_time(chain_id, authority, creation_time_ms) - } - - /// Construct [`Self`]. - #[inline] - #[cfg(feature = "std")] - pub fn new(chain_id: ChainId, authority: AccountId) -> Self { - use std::time::SystemTime; - - // can't delegate to `new_with_time_source`, because it's gated behind "transparent_api" - let creation_time_ms = SystemTime::now() - .duration_since(SystemTime::UNIX_EPOCH) - .unwrap() - .as_millis() - .try_into() - .expect("INTERNAL BUG: Unix timestamp exceedes u64::MAX"); - Self::new_with_time(chain_id, authority, creation_time_ms) - } - } - - impl TransactionBuilder { - /// Set instructions for this transaction - pub fn with_instructions( - mut self, - instructions: impl IntoIterator, - ) -> Self { - self.payload.instructions = instructions - .into_iter() - .map(Into::into) - .collect::>() - .into(); - self - } - - /// Add wasm to this transaction - pub fn with_wasm(mut self, wasm: WasmSmartContract) -> Self { - self.payload.instructions = wasm.into(); - self - } - - /// Set executable for this transaction - pub fn with_executable(mut self, executable: Executable) -> Self { - self.payload.instructions = executable; - self - } - - /// Adds metadata to the `Transaction` - pub fn with_metadata(mut self, metadata: Metadata) -> Self { - self.payload.metadata = metadata; - self - } - - /// Set nonce for [`Transaction`] - pub fn set_nonce(&mut self, nonce: NonZeroU32) -> &mut Self { - self.payload.nonce = Some(nonce); - self - } - - /// Set time-to-live for [`Transaction`] - pub fn set_ttl(&mut self, time_to_live: Duration) -> &mut Self { - let ttl: u64 = time_to_live - .as_millis() - .try_into() - .expect("INTERNAL BUG: Unix timestamp exceedes u64::MAX"); - - self.payload.time_to_live_ms = if ttl == 0 { - // TODO: This is not correct, 0 is not the same as None - None - } else { - Some(NonZeroU64::new(ttl).expect("Can't be 0")) - }; - - self - } - - /// Set creation time of transaction - pub fn set_creation_time(&mut self, value: Duration) -> &mut Self { - self.payload.creation_time_ms = u64::try_from(value.as_millis()) - .expect("INTERNAL BUG: Unix timestamp exceedes u64::MAX"); - self - } - - /// Sign transaction with provided key pair. - #[must_use] - pub fn sign(self, private_key: &iroha_crypto::PrivateKey) -> SignedTransaction { - let signature = TransactionSignature(SignatureOf::new(private_key, &self.payload)); - - SignedTransactionV1 { - signature, - payload: self.payload, - } - .into() - } - } -} - /// The prelude re-exports most commonly used traits, structs and macros from this module. pub mod prelude { - #[cfg(feature = "http")] - pub use super::http::TransactionBuilder; - pub use super::{error::prelude::*, Executable, SignedTransaction, WasmSmartContract}; + pub use super::{ + error::prelude::*, Executable, SignedTransaction, TransactionBuilder, WasmSmartContract, + }; } #[cfg(test)] diff --git a/crates/iroha_data_model/src/trigger.rs b/crates/iroha_data_model/src/trigger.rs index 34b6825fc01..a14e7ae85f4 100644 --- a/crates/iroha_data_model/src/trigger.rs +++ b/crates/iroha_data_model/src/trigger.rs @@ -160,8 +160,8 @@ pub mod action { /// The repeating scheme of the action. It's kept as part of the /// action and not inside the [`Trigger`] type, so that further /// sanity checking can be done. - pub fn repeats(&self) -> &Repeats { - &self.repeats + pub fn repeats(&self) -> Repeats { + self.repeats } /// Account executing this action pub fn authority(&self) -> &AccountId { diff --git a/crates/iroha_data_model_derive/Cargo.toml b/crates/iroha_data_model_derive/Cargo.toml index be1b79f628e..5b51fd9a2bc 100644 --- a/crates/iroha_data_model_derive/Cargo.toml +++ b/crates/iroha_data_model_derive/Cargo.toml @@ -22,7 +22,7 @@ manyhow = { workspace = true } iroha_macro_utils = { workspace = true } [dev-dependencies] -iroha_data_model = { workspace = true, features = ["http"] } +iroha_data_model = { workspace = true } iroha_schema = { workspace = true } parity-scale-codec = { workspace = true } derive_more = { workspace = true } diff --git a/crates/iroha_genesis/Cargo.toml b/crates/iroha_genesis/Cargo.toml index 16a01ddbe2f..0e2965edaa3 100644 --- a/crates/iroha_genesis/Cargo.toml +++ b/crates/iroha_genesis/Cargo.toml @@ -13,7 +13,7 @@ workspace = true [dependencies] iroha_crypto = { workspace = true } iroha_schema = { workspace = true } -iroha_data_model = { workspace = true, features = ["http"] } +iroha_data_model = { workspace = true, features = ["std"] } iroha_executor_data_model = { workspace = true } derive_more = { workspace = true, features = ["deref"] } diff --git a/crates/iroha_p2p/Cargo.toml b/crates/iroha_p2p/Cargo.toml index 6b80621f569..f5090b0df5f 100644 --- a/crates/iroha_p2p/Cargo.toml +++ b/crates/iroha_p2p/Cargo.toml @@ -14,9 +14,7 @@ workspace = true [dependencies] iroha_logger = { workspace = true } iroha_crypto = { workspace = true, default-features = true } -iroha_data_model = { workspace = true, default-features = true, features = [ - "transparent_api", -] } +iroha_data_model = { workspace = true, default-features = true } iroha_primitives = { workspace = true } iroha_config = { workspace = true } iroha_futures = { workspace = true } @@ -39,4 +37,3 @@ bytes = { workspace = true } [dev-dependencies] iroha_config_base = { workspace = true } -iroha_test_network = { workspace = true } diff --git a/crates/iroha_p2p/src/network.rs b/crates/iroha_p2p/src/network.rs index cf29f4e1c1c..548e9d225d9 100644 --- a/crates/iroha_p2p/src/network.rs +++ b/crates/iroha_p2p/src/network.rs @@ -332,7 +332,7 @@ impl NetworkBase { .iter() // Peer is not connected but should .filter_map(|(peer, is_active)| { - (!self.peers.contains_key(&peer.public_key) + (!self.peers.contains_key(peer.public_key()) && !self .connecting_peers .values() @@ -362,7 +362,7 @@ impl NetworkBase { fn connect_peer(&mut self, peer: &PeerId) { iroha_logger::trace!( - listen_addr = %self.listen_addr, peer.id.address = %peer.address, + listen_addr = %self.listen_addr, peer.id.address = %peer.address(), "Creating new peer actor", ); @@ -372,7 +372,7 @@ impl NetworkBase { let service_message_sender = self.service_message_sender.clone(); connecting::( // NOTE: we intentionally use peer's address and our public key, it's used during handshake - peer.address.clone(), + peer.address().clone(), self.key_pair.clone(), conn_id, service_message_sender, @@ -410,7 +410,7 @@ impl NetworkBase { } // Insert peer if peer not in peers yet or replace peer if it's disambiguator value is smaller than new one (simultaneous connections resolution rule) - match self.peers.get(&peer_id.public_key) { + match self.peers.get(peer_id.public_key()) { Some(peer) if peer.disambiguator > disambiguator => { iroha_logger::debug!( "Peer is disconnected due to simultaneous connection resolution policy" @@ -428,7 +428,7 @@ impl NetworkBase { let ref_peer = RefPeer { handle: ready_peer_handle, conn_id: connection_id, - p2p_addr: peer_id.address.clone(), + p2p_addr: peer_id.address().clone(), disambiguator, }; let _ = peer_message_sender.send(self.peer_message_sender.clone()); @@ -439,10 +439,10 @@ impl NetworkBase { fn peer_terminated(&mut self, Terminated { peer_id, conn_id }: Terminated) { self.connecting_peers.remove(&conn_id); if let Some(peer_id) = peer_id { - if let Some(peer) = self.peers.get(&peer_id.public_key) { + if let Some(peer) = self.peers.get(peer_id.public_key()) { if peer.conn_id == conn_id { iroha_logger::debug!(conn_id, peer=%peer_id, "Peer terminated"); - self.peers.remove(&peer_id.public_key); + self.peers.remove(peer_id.public_key()); Self::remove_online_peer(&self.online_peers_sender, &peer_id); } } @@ -451,11 +451,11 @@ impl NetworkBase { fn post(&mut self, Post { data, peer_id }: Post) { iroha_logger::trace!(peer=%peer_id, "Post message"); - match self.peers.get(&peer_id.public_key) { + match self.peers.get(peer_id.public_key()) { Some(peer) => { if peer.handle.post(data).is_err() { iroha_logger::error!(peer=%peer_id, "Failed to send message to peer"); - self.peers.remove(&peer_id.public_key); + self.peers.remove(peer_id.public_key()); Self::remove_online_peer(&self.online_peers_sender, &peer_id); } } diff --git a/crates/iroha_p2p/tests/integration/p2p.rs b/crates/iroha_p2p/tests/integration/p2p.rs index cd33e3843ea..c5911f92f27 100644 --- a/crates/iroha_p2p/tests/integration/p2p.rs +++ b/crates/iroha_p2p/tests/integration/p2p.rs @@ -301,12 +301,12 @@ async fn start_network( barrier: Arc, shutdown_signal: ShutdownSignal, ) -> (PeerId, NetworkHandle) { - info!(peer_addr = %peer.address, "Starting network"); + info!(peer_addr = %peer.address(), "Starting network"); // This actor will get the messages from other peers and increment the counter let actor = TestActor::start(messages); - let PeerId { address, .. } = peer.clone(); + let address = peer.address().clone(); let idle_timeout = Duration::from_secs(60); let config = Config { address: WithOrigin::inline(address), @@ -329,14 +329,14 @@ async fn start_network( tokio::time::timeout(Duration::from_millis(10_000), async { let mut connections = network.wait_online_peers_update(HashSet::len).await; while conn_count != connections { - info!(peer_addr = %peer.address, %connections); + info!(peer_addr = %peer.address(), %connections); connections = network.wait_online_peers_update(HashSet::len).await; } }) .await .expect("Failed to get all connections"); - info!(peer_addr = %peer.address, %conn_count, "Got all connections!"); + info!(peer_addr = %peer.address(), %conn_count, "Got all connections!"); (peer, network) } diff --git a/crates/iroha_schema_derive/src/trait_bounds.rs b/crates/iroha_schema_derive/src/trait_bounds.rs index eebc2fe31ab..fd6d40d2bcb 100644 --- a/crates/iroha_schema_derive/src/trait_bounds.rs +++ b/crates/iroha_schema_derive/src/trait_bounds.rs @@ -130,7 +130,7 @@ pub fn add( // = match custom_trait_bound { // Some(CustomTraitBound::SpecifiedBounds { bounds, .. }) => { // generics.make_where_clause().predicates.extend(bounds); - // return Ok(()); + // return; // } // Some(CustomTraitBound::SkipTypeParams { type_names, .. }) => { // type_names.into_iter().collect::>() diff --git a/crates/iroha_schema_gen/Cargo.toml b/crates/iroha_schema_gen/Cargo.toml index 0c8a7511dc6..47822214c03 100644 --- a/crates/iroha_schema_gen/Cargo.toml +++ b/crates/iroha_schema_gen/Cargo.toml @@ -11,6 +11,7 @@ license.workspace = true workspace = true [dependencies] +# TODO: `transparent_api` feature shouldn't be activated/required here iroha_data_model = { workspace = true, features = ["http", "transparent_api"] } iroha_executor_data_model = { workspace = true } iroha_multisig_data_model = { workspace = true } diff --git a/crates/iroha_swarm/Cargo.toml b/crates/iroha_swarm/Cargo.toml index 92f47d97e0b..8831c10bd73 100644 --- a/crates/iroha_swarm/Cargo.toml +++ b/crates/iroha_swarm/Cargo.toml @@ -15,14 +15,14 @@ iroha_data_model.workspace = true iroha_primitives.workspace = true color-eyre.workspace = true path-absolutize.workspace = true -pathdiff.workspace = true owo-colors = { workspace = true, features = ["supports-colors"] } serde = { workspace = true, features = ["derive"] } clap = { workspace = true, features = ["derive"] } -serde_yaml.workspace = true serde_with = { workspace = true, features = ["json", "macros", "hex"] } -inquire.workspace = true displaydoc.workspace = true +serde_yaml = "0.9.34" +pathdiff = "0.2.1" +inquire = "0.6.2" [dev-dependencies] serde_json.workspace = true diff --git a/crates/iroha_telemetry_derive/Cargo.toml b/crates/iroha_telemetry_derive/Cargo.toml index be453c38274..83973e6c86b 100644 --- a/crates/iroha_telemetry_derive/Cargo.toml +++ b/crates/iroha_telemetry_derive/Cargo.toml @@ -30,6 +30,4 @@ manyhow = { workspace = true } iroha_macro_utils = { workspace = true } [dev-dependencies] -iroha_core = { workspace = true } - trybuild = { workspace = true } diff --git a/crates/iroha_telemetry_derive/tests/ui_fail/not_execute.rs b/crates/iroha_telemetry_derive/tests/ui_fail/not_execute.rs index 3cbfe765c0e..29057ec507e 100644 --- a/crates/iroha_telemetry_derive/tests/ui_fail/not_execute.rs +++ b/crates/iroha_telemetry_derive/tests/ui_fail/not_execute.rs @@ -1,6 +1,7 @@ -use iroha_core::state::StateTransaction; use iroha_telemetry_derive::metrics; +struct StateTransaction; + #[metrics(+"test_query", "another_test_query_without_timing")] fn exequte(_state_transaction: &StateTransaction) -> Result<(), ()> { Ok(()) diff --git a/crates/iroha_telemetry_derive/tests/ui_fail/not_execute.stderr b/crates/iroha_telemetry_derive/tests/ui_fail/not_execute.stderr index 0a80929d8ad..598a97db919 100644 --- a/crates/iroha_telemetry_derive/tests/ui_fail/not_execute.stderr +++ b/crates/iroha_telemetry_derive/tests/ui_fail/not_execute.stderr @@ -1,5 +1,5 @@ error: Function should be an `impl execute` - --> tests/ui_fail/not_execute.rs:5:4 + --> tests/ui_fail/not_execute.rs:6:4 | -5 | fn exequte(_state_transaction: &StateTransaction) -> Result<(), ()> { +6 | fn exequte(_state_transaction: &StateTransaction) -> Result<(), ()> { | ^^^^^^^ diff --git a/crates/iroha_telemetry_derive/tests/ui_fail/not_return_result.rs b/crates/iroha_telemetry_derive/tests/ui_fail/not_return_result.rs index a970bd8aadf..790462027ee 100644 --- a/crates/iroha_telemetry_derive/tests/ui_fail/not_return_result.rs +++ b/crates/iroha_telemetry_derive/tests/ui_fail/not_return_result.rs @@ -1,8 +1,9 @@ use iroha_telemetry_derive::metrics; -use iroha_core::state::StateTransaction; type MyNotResult = Option; +struct StateTransaction; + #[metrics(+"test_query", "another_test_query_without_timing")] fn execute(_state_transaction: &StateTransaction) -> MyNotResult { None diff --git a/crates/iroha_telemetry_derive/tests/ui_fail/not_return_result.stderr b/crates/iroha_telemetry_derive/tests/ui_fail/not_return_result.stderr index ec4fed2ce5e..4eaf1b890ac 100644 --- a/crates/iroha_telemetry_derive/tests/ui_fail/not_return_result.stderr +++ b/crates/iroha_telemetry_derive/tests/ui_fail/not_return_result.stderr @@ -1,5 +1,5 @@ error: Should return `Result`. Found MyNotResult - --> tests/ui_fail/not_return_result.rs:7:54 + --> tests/ui_fail/not_return_result.rs:8:54 | -7 | fn execute(_state_transaction: &StateTransaction) -> MyNotResult { +8 | fn execute(_state_transaction: &StateTransaction) -> MyNotResult { | ^^^^^^^^^^^ diff --git a/crates/iroha_telemetry_derive/tests/ui_fail/return_nothing.rs b/crates/iroha_telemetry_derive/tests/ui_fail/return_nothing.rs index f60bdd6bc7c..071dd9d7f7d 100644 --- a/crates/iroha_telemetry_derive/tests/ui_fail/return_nothing.rs +++ b/crates/iroha_telemetry_derive/tests/ui_fail/return_nothing.rs @@ -1,6 +1,7 @@ -use iroha_core::state::StateTransaction; use iroha_telemetry_derive::metrics; +struct StateTransaction; + #[metrics(+"test_query", "another_test_query_without_timing")] fn execute(_state_transaction: &StateTransaction) { Ok::<(), ()>(()); diff --git a/crates/iroha_telemetry_derive/tests/ui_fail/return_nothing.stderr b/crates/iroha_telemetry_derive/tests/ui_fail/return_nothing.stderr index f92d23ee205..df94191a7b0 100644 --- a/crates/iroha_telemetry_derive/tests/ui_fail/return_nothing.stderr +++ b/crates/iroha_telemetry_derive/tests/ui_fail/return_nothing.stderr @@ -1,7 +1,7 @@ error: `Fn` must return `Result`. Returns nothing instead. - --> tests/ui_fail/return_nothing.rs:4:1 + --> tests/ui_fail/return_nothing.rs:5:1 | -4 | #[metrics(+"test_query", "another_test_query_without_timing")] +5 | #[metrics(+"test_query", "another_test_query_without_timing")] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: this error originates in the attribute macro `metrics` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/crates/iroha_test_network/Cargo.toml b/crates/iroha_test_network/Cargo.toml index 9510da71959..98fd4f67099 100644 --- a/crates/iroha_test_network/Cargo.toml +++ b/crates/iroha_test_network/Cargo.toml @@ -8,7 +8,6 @@ authors.workspace = true license.workspace = true [dependencies] -iroha_core.workspace = true iroha.workspace = true iroha_executor_data_model.workspace = true diff --git a/crates/iroha_test_network/src/lib.rs b/crates/iroha_test_network/src/lib.rs index 70190e9f660..a03e70561b9 100644 --- a/crates/iroha_test_network/src/lib.rs +++ b/crates/iroha_test_network/src/lib.rs @@ -23,7 +23,6 @@ use iroha_config::base::{ read::ConfigReader, toml::{TomlSource, WriteExt as _, Writer as TomlWriter}, }; -pub use iroha_core::state::StateReadOnly; use iroha_crypto::{ExposedPrivateKey, KeyPair, PrivateKey}; use iroha_data_model::{ events::pipeline::BlockEventFilter, @@ -328,10 +327,10 @@ impl NetworkBuilder { block_time = duration / 3; commit_time = duration / 2; extra_isi.extend([ - InstructionBox::SetParameter(SetParameter(Parameter::Sumeragi( + InstructionBox::SetParameter(SetParameter::new(Parameter::Sumeragi( SumeragiParameter::BlockTimeMs(block_time.as_millis() as u64), ))), - InstructionBox::SetParameter(SetParameter(Parameter::Sumeragi( + InstructionBox::SetParameter(SetParameter::new(Parameter::Sumeragi( SumeragiParameter::CommitTimeMs(commit_time.as_millis() as u64), ))), ]); @@ -342,10 +341,10 @@ impl NetworkBuilder { let genesis = config::genesis( [ - InstructionBox::SetParameter(SetParameter(Parameter::Sumeragi( + InstructionBox::SetParameter(SetParameter::new(Parameter::Sumeragi( SumeragiParameter::BlockTimeMs(block_time.as_millis() as u64), ))), - InstructionBox::SetParameter(SetParameter(Parameter::Sumeragi( + InstructionBox::SetParameter(SetParameter::new(Parameter::Sumeragi( SumeragiParameter::CommitTimeMs(commit_time.as_millis() as u64), ))), ] diff --git a/crates/irohad/Cargo.toml b/crates/irohad/Cargo.toml index 9e76ed838d7..ee75417b35d 100644 --- a/crates/irohad/Cargo.toml +++ b/crates/irohad/Cargo.toml @@ -40,7 +40,7 @@ maintenance = { status = "actively-developed" } iroha_core = { workspace = true } iroha_logger = { workspace = true } iroha_futures = { workspace = true } -iroha_data_model = { workspace = true, features = ["http"] } +iroha_data_model = { workspace = true } iroha_primitives = { workspace = true } iroha_telemetry = { workspace = true, optional = true } iroha_config = { workspace = true } diff --git a/wasm/samples/executor_custom_instructions_complex/src/lib.rs b/wasm/samples/executor_custom_instructions_complex/src/lib.rs index 58dc40b82ef..549c7e3c162 100644 --- a/wasm/samples/executor_custom_instructions_complex/src/lib.rs +++ b/wasm/samples/executor_custom_instructions_complex/src/lib.rs @@ -87,7 +87,7 @@ impl executor_custom_data_model::complex_isi::Context for Context<'_> { _ => unreachable!(), })?; - Ok(*asset_definition.total_quantity()) + Ok(asset_definition.total_quantity()) } };