diff --git a/Cargo.lock b/Cargo.lock index 027e85a9751..921bd1d5b87 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1599,6 +1599,7 @@ name = "executor_custom_data_model" version = "2.0.0-pre-rc.21" dependencies = [ "iroha_data_model", + "iroha_executor", "iroha_schema", "serde", "serde_json", diff --git a/cli/src/lib.rs b/cli/src/lib.rs index 0f03eda9c36..ded6b11fbe0 100644 --- a/cli/src/lib.rs +++ b/cli/src/lib.rs @@ -294,8 +294,7 @@ impl Iroha { None } }.unwrap_or_else(|| { - State::from_config( - config.chain_wide, + State::new( world, Arc::clone(&kura), live_query_store_handle.clone(), diff --git a/cli/src/samples.rs b/cli/src/samples.rs index 5aaa54701a5..b03d96279bd 100644 --- a/cli/src/samples.rs +++ b/cli/src/samples.rs @@ -67,9 +67,8 @@ pub fn get_config_toml( .write(["sumeragi", "trusted_peers"], peers) .write(["network", "address"], DEFAULT_P2P_ADDR) .write(["network", "block_gossip_period_ms"], 500) - .write(["network", "block_gossip_max_size"], 1) + .write(["network", "block_gossip_size"], 1) .write(["torii", "address"], DEFAULT_TORII_ADDR) - .write(["chain_wide", "max_transactions_in_block"], 2) .write(["genesis", "public_key"], genesis_public_key) .write( ["genesis", "signed_file"], diff --git a/client/benches/tps/utils.rs b/client/benches/tps/utils.rs index 6fe35e80d3f..cca409724ae 100644 --- a/client/benches/tps/utils.rs +++ b/client/benches/tps/utils.rs @@ -6,7 +6,7 @@ use iroha::{ crypto::KeyPair, data_model::{ events::pipeline::{BlockEventFilter, BlockStatus}, - parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, + parameter::BlockParameter, prelude::*, }, }; @@ -22,7 +22,7 @@ pub struct Config { pub peers: u32, /// Interval in microseconds between transactions to reduce load pub interval_us_per_tx: u64, - pub max_txs_per_block: u32, + pub block_limits: BlockParameter, pub blocks: u32, pub sample_size: u32, pub genesis_max_retries: u32, @@ -33,11 +33,7 @@ impl fmt::Display for Config { write!( f, "{}peers-{}interval_µs-{}max_txs-{}blocks-{}samples", - self.peers, - self.interval_us_per_tx, - self.max_txs_per_block, - self.blocks, - self.sample_size, + self.peers, self.interval_us_per_tx, self.block_limits, self.blocks, self.sample_size, ) } } @@ -55,11 +51,7 @@ impl Config { let clients = network.clients(); wait_for_genesis_committed_with_max_retries(&clients, 0, self.genesis_max_retries); - client.submit_all_blocking( - ParametersBuilder::new() - .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, self.max_txs_per_block)? - .into_set_parameters(), - )?; + client.submit_blocking(SetParameter::new(Parameter::Block(self.block_limits)))?; let unit_names = (UnitName::MIN..).take(self.peers as usize); let units = clients diff --git a/client/examples/register_1000_triggers.rs b/client/examples/register_1000_triggers.rs index 567dd9d3317..a7f31bd2962 100644 --- a/client/examples/register_1000_triggers.rs +++ b/client/examples/register_1000_triggers.rs @@ -1,10 +1,14 @@ //! Example of registering multiple triggers //! Used to show Iroha's trigger deduplication capabilities +use std::num::NonZeroU64; + use iroha::{ client::Client, + crypto::KeyPair, data_model::{prelude::*, trigger::TriggerId}, }; +use iroha_data_model::parameter::{Parameter, SmartContractParameter}; use iroha_genesis::{GenesisBlock, GenesisBuilder}; use iroha_primitives::unique_vec; use irohad::samples::{construct_executor, get_config}; @@ -18,17 +22,24 @@ use tokio::runtime::Runtime; fn generate_genesis( num_triggers: u32, chain_id: ChainId, - genesis_key_pair: &iroha_crypto::KeyPair, + genesis_key_pair: &KeyPair, topology: Vec, ) -> Result> { - let builder = GenesisBuilder::default(); + let builder = GenesisBuilder::default() + .append_instruction(SetParameter::new(Parameter::Executor( + SmartContractParameter::Fuel(NonZeroU64::MAX), + ))) + .append_instruction(SetParameter::new(Parameter::Executor( + SmartContractParameter::Memory(NonZeroU64::MAX), + ))); - let wasm = - iroha_wasm_builder::Builder::new("tests/integration/smartcontracts/mint_rose_trigger") - .show_output() - .build()? - .optimize()? - .into_bytes()?; + let wasm = iroha_wasm_builder::Builder::new( + "client/tests/integration/smartcontracts/mint_rose_trigger", + ) + .show_output() + .build()? + .optimize()? + .into_bytes()?; let wasm = WasmSmartContract::from_compiled(wasm); let (account_id, _account_keypair) = gen_account_in("wonderland"); @@ -54,7 +65,7 @@ fn generate_genesis( }) .fold(builder, GenesisBuilder::append_instruction); - let executor = construct_executor("../default_executor").expect("Failed to construct executor"); + let executor = construct_executor("default_executor").expect("Failed to construct executor"); Ok(builder.build_and_sign(executor, chain_id, genesis_key_pair, topology)) } @@ -64,17 +75,13 @@ fn main() -> Result<(), Box> { let chain_id = get_chain_id(); let genesis_key_pair = get_key_pair(test_network::Signatory::Genesis); let topology = vec![peer.id.clone()]; - let mut configuration = get_config( + let configuration = get_config( unique_vec![peer.id.clone()], chain_id.clone(), get_key_pair(test_network::Signatory::Peer), genesis_key_pair.public_key(), ); - // Increase executor limits for large genesis - configuration.chain_wide.executor_runtime.fuel_limit = u64::MAX; - configuration.chain_wide.executor_runtime.max_memory = u32::MAX.into(); - let genesis = generate_genesis(1_000_u32, chain_id, &genesis_key_pair, topology)?; let builder = PeerBuilder::new() diff --git a/client/examples/tutorial.rs b/client/examples/tutorial.rs index 1589b8d78ad..4718137ab0c 100644 --- a/client/examples/tutorial.rs +++ b/client/examples/tutorial.rs @@ -34,7 +34,7 @@ fn domain_registration_test(config: Config) -> Result<(), Error> { use iroha::{ client::Client, data_model::{ - metadata::UnlimitedMetadata, + metadata::Metadata, prelude::{Domain, DomainId, InstructionBox, Register}, }, }; @@ -57,7 +57,7 @@ fn domain_registration_test(config: Config) -> Result<(), Error> { // #region domain_register_example_prepare_tx // Prepare a transaction - let metadata = UnlimitedMetadata::default(); + let metadata = Metadata::default(); let instructions: Vec = vec![create_looking_glass.into()]; let tx = iroha.build_transaction(instructions, metadata); // #endregion domain_register_example_prepare_tx @@ -101,7 +101,7 @@ fn account_registration_test(config: Config) -> Result<(), Error> { client::Client, crypto::KeyPair, data_model::{ - metadata::UnlimitedMetadata, + metadata::Metadata, prelude::{Account, AccountId, InstructionBox, Register}, }, }; @@ -127,7 +127,7 @@ fn account_registration_test(config: Config) -> Result<(), Error> { // #region register_account_prepare_tx // Prepare a transaction using the // Account's RegisterBox - let metadata = UnlimitedMetadata::new(); + let metadata = Metadata::default(); let instructions: Vec = vec![create_account.into()]; let tx = iroha.build_transaction(instructions, metadata); // #endregion register_account_prepare_tx diff --git a/client/src/client.rs b/client/src/client.rs index cb88d6d0d11..624f454d82c 100644 --- a/client/src/client.rs +++ b/client/src/client.rs @@ -332,6 +332,7 @@ impl_query_output! { crate::data_model::executor::ExecutorDataModel, crate::data_model::trigger::Trigger, crate::data_model::prelude::Numeric, + crate::data_model::parameter::Parameters, } /// Iroha client @@ -453,7 +454,7 @@ impl Client { pub fn build_transaction( &self, instructions: impl Into, - metadata: UnlimitedMetadata, + metadata: Metadata, ) -> SignedTransaction { let tx_builder = TransactionBuilder::new(self.chain.clone(), self.account.clone()); @@ -510,7 +511,7 @@ impl Client { &self, instructions: impl IntoIterator, ) -> Result> { - self.submit_all_with_metadata(instructions, UnlimitedMetadata::new()) + self.submit_all_with_metadata(instructions, Metadata::default()) } /// Instructions API entry point. Submits one Iroha Special Instruction to `Iroha` peers. @@ -522,7 +523,7 @@ impl Client { pub fn submit_with_metadata( &self, instruction: impl Instruction, - metadata: UnlimitedMetadata, + metadata: Metadata, ) -> Result> { self.submit_all_with_metadata([instruction], metadata) } @@ -536,7 +537,7 @@ impl Client { pub fn submit_all_with_metadata( &self, instructions: impl IntoIterator, - metadata: UnlimitedMetadata, + metadata: Metadata, ) -> Result> { self.submit_transaction(&self.build_transaction(instructions, metadata)) } @@ -719,7 +720,7 @@ impl Client { &self, instructions: impl IntoIterator, ) -> Result> { - self.submit_all_blocking_with_metadata(instructions, UnlimitedMetadata::new()) + self.submit_all_blocking_with_metadata(instructions, Metadata::default()) } /// Submits and waits until the transaction is either rejected or committed. @@ -731,7 +732,7 @@ impl Client { pub fn submit_blocking_with_metadata( &self, instruction: impl Instruction, - metadata: UnlimitedMetadata, + metadata: Metadata, ) -> Result> { self.submit_all_blocking_with_metadata(vec![instruction.into()], metadata) } @@ -745,7 +746,7 @@ impl Client { pub fn submit_all_blocking_with_metadata( &self, instructions: impl IntoIterator, - metadata: UnlimitedMetadata, + metadata: Metadata, ) -> Result> { let transaction = self.build_transaction(instructions, metadata); self.submit_transaction_blocking(&transaction) @@ -1621,7 +1622,7 @@ mod tests { }); let build_transaction = - || client.build_transaction(Vec::::new(), UnlimitedMetadata::new()); + || client.build_transaction(Vec::::new(), Metadata::default()); let tx1 = build_transaction(); let tx2 = build_transaction(); assert_ne!(tx1.hash(), tx2.hash()); diff --git a/client/src/config/user.rs b/client/src/config/user.rs index 000ab2a2dd8..71bf826d4d3 100644 --- a/client/src/config/user.rs +++ b/client/src/config/user.rs @@ -6,11 +6,13 @@ use iroha_config_base::{ util::{DurationMs, Emitter, EmitterResultExt}, ReadConfig, WithOrigin, }; -use iroha_crypto::{KeyPair, PrivateKey, PublicKey}; -use iroha_data_model::prelude::{AccountId, ChainId, DomainId}; use url::Url; -use crate::config::BasicAuth; +use crate::{ + config::BasicAuth, + crypto::{KeyPair, PrivateKey, PublicKey}, + data_model::prelude::{AccountId, ChainId, DomainId}, +}; /// Root of the user configuration #[derive(Clone, Debug, ReadConfig)] diff --git a/client/tests/integration/add_domain.rs b/client/tests/integration/add_domain.rs deleted file mode 100644 index 514e18b85d6..00000000000 --- a/client/tests/integration/add_domain.rs +++ /dev/null @@ -1,37 +0,0 @@ -use std::thread; - -use eyre::Result; -use iroha::{client, data_model::prelude::*}; -use iroha_config::parameters::actual::Root as Config; -use test_network::*; - -#[test] -// This test suite is also covered at the UI level in the iroha_cli tests -// in test_register_domains.py -fn client_add_domain_with_name_length_more_than_limit_should_not_commit_transaction() -> Result<()> -{ - let (_rt, _peer, test_client) = ::new().with_port(10_500).start_with_runtime(); - wait_for_genesis_committed(&vec![test_client.clone()], 0); - let pipeline_time = Config::pipeline_time(); - - // Given - - let normal_domain_id: DomainId = "sora".parse()?; - let create_domain = Register::domain(Domain::new(normal_domain_id.clone())); - test_client.submit(create_domain)?; - - let too_long_domain_name: DomainId = "0".repeat(2_usize.pow(14)).parse()?; - let create_domain = Register::domain(Domain::new(too_long_domain_name.clone())); - test_client.submit(create_domain)?; - - thread::sleep(pipeline_time * 2); - - assert!(test_client - .request(client::domain::by_id(normal_domain_id)) - .is_ok()); - assert!(test_client - .request(client::domain::by_id(too_long_domain_name)) - .is_err()); - - Ok(()) -} diff --git a/client/tests/integration/asset.rs b/client/tests/integration/asset.rs index 85aecc3e6df..a1f8a9d2115 100644 --- a/client/tests/integration/asset.rs +++ b/client/tests/integration/asset.rs @@ -106,7 +106,7 @@ fn client_add_asset_quantity_to_existing_asset_should_increase_asset_amount() -> let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); - let metadata = iroha::data_model::metadata::UnlimitedMetadata::default(); + let metadata = iroha::data_model::metadata::Metadata::default(); //When let quantity = numeric!(200); let mint = Mint::asset_numeric( @@ -137,7 +137,7 @@ fn client_add_big_asset_quantity_to_existing_asset_should_increase_asset_amount( let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); - let metadata = iroha::data_model::metadata::UnlimitedMetadata::default(); + let metadata = iroha::data_model::metadata::Metadata::default(); //When let quantity = Numeric::new(2_u128.pow(65), 0); let mint = Mint::asset_numeric( @@ -168,7 +168,7 @@ fn client_add_asset_with_decimal_should_increase_asset_amount() -> Result<()> { let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); let asset_definition = AssetDefinition::numeric(asset_definition_id.clone()); let create_asset = Register::asset_definition(asset_definition); - let metadata = iroha::data_model::metadata::UnlimitedMetadata::default(); + let metadata = iroha::data_model::metadata::Metadata::default(); //When let quantity = numeric!(123.456); diff --git a/client/tests/integration/asset_propagation.rs b/client/tests/integration/asset_propagation.rs index bcf99a5ca9c..8e6984ac0ca 100644 --- a/client/tests/integration/asset_propagation.rs +++ b/client/tests/integration/asset_propagation.rs @@ -3,12 +3,10 @@ use std::{str::FromStr as _, thread}; use eyre::Result; use iroha::{ client::{self, QueryResult}, - data_model::{ - parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, - prelude::*, - }, + data_model::{parameter::BlockParameter, prelude::*}, }; use iroha_config::parameters::actual::Root as Config; +use nonzero_ext::nonzero; use test_network::*; use test_samples::gen_account_in; @@ -22,11 +20,9 @@ fn client_add_asset_quantity_to_existing_asset_should_increase_asset_amount_on_a wait_for_genesis_committed(&network.clients(), 0); let pipeline_time = Config::pipeline_time(); - client.submit_all_blocking( - ParametersBuilder::new() - .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? - .into_set_parameters(), - )?; + client.submit_blocking(SetParameter::new(Parameter::Block( + BlockParameter::MaxTransactions(nonzero!(1_u64)), + )))?; let create_domain: InstructionBox = Register::domain(Domain::new(DomainId::from_str("domain")?)).into(); diff --git a/client/tests/integration/events/data.rs b/client/tests/integration/events/data.rs index 8fea7736954..d623bef784d 100644 --- a/client/tests/integration/events/data.rs +++ b/client/tests/integration/events/data.rs @@ -151,7 +151,7 @@ fn transaction_execution_should_produce_events( // submit transaction to produce events init_receiver.recv()?; - let transaction = client.build_transaction(executable, UnlimitedMetadata::new()); + let transaction = client.build_transaction(executable, Metadata::default()); client.submit_transaction_blocking(&transaction)?; // assertion diff --git a/client/tests/integration/events/pipeline.rs b/client/tests/integration/events/pipeline.rs index f9b0817c252..f57d4382563 100644 --- a/client/tests/integration/events/pipeline.rs +++ b/client/tests/integration/events/pipeline.rs @@ -1,7 +1,4 @@ -use std::{ - num::NonZeroUsize, - thread::{self, JoinHandle}, -}; +use std::thread::{self, JoinHandle}; use eyre::Result; use iroha::{ @@ -11,7 +8,7 @@ use iroha::{ BlockEvent, BlockEventFilter, BlockStatus, TransactionEventFilter, TransactionStatus, }, isi::error::InstructionExecutionError, - parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, + parameter::BlockParameter, prelude::*, transaction::error::TransactionRejectionReason, ValidationFail, @@ -19,6 +16,7 @@ use iroha::{ }; use iroha_config::parameters::actual::Root as Config; use iroha_data_model::query::error::FindError; +use nonzero_ext::nonzero; use test_network::*; // Needed to re-enable ignored tests. @@ -59,15 +57,13 @@ fn test_with_instruction_and_status_and_port( wait_for_genesis_committed(&clients, 0); let pipeline_time = Config::pipeline_time(); - client.submit_all_blocking( - ParametersBuilder::new() - .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? - .into_set_parameters(), - )?; + client.submit_blocking(SetParameter::new(Parameter::Block( + BlockParameter::MaxTransactions(nonzero!(1_u64)), + )))?; // Given let submitter = client; - let transaction = submitter.build_transaction(instruction, UnlimitedMetadata::new()); + let transaction = submitter.build_transaction(instruction, Metadata::default()); let hash = transaction.hash(); let mut handles = Vec::new(); for listener in clients { @@ -133,8 +129,6 @@ fn applied_block_must_be_available_in_kura() { .as_ref() .expect("Must be some") .kura() - .get_block_by_height( - NonZeroUsize::new(event.header().height().try_into().unwrap()).unwrap(), - ) + .get_block_by_height(event.header().height().try_into().unwrap()) .expect("Block applied event was received earlier"); } diff --git a/client/tests/integration/extra_functional/genesis.rs b/client/tests/integration/extra_functional/genesis.rs index f0c0507e497..eb2da99b843 100644 --- a/client/tests/integration/extra_functional/genesis.rs +++ b/client/tests/integration/extra_functional/genesis.rs @@ -1,4 +1,4 @@ -use iroha_data_model::{ +use iroha::data_model::{ domain::{Domain, DomainId}, isi::Register, }; diff --git a/client/tests/integration/extra_functional/multiple_blocks_created.rs b/client/tests/integration/extra_functional/multiple_blocks_created.rs index 458af606d10..f48fbf521f5 100644 --- a/client/tests/integration/extra_functional/multiple_blocks_created.rs +++ b/client/tests/integration/extra_functional/multiple_blocks_created.rs @@ -3,12 +3,10 @@ use std::thread; use eyre::Result; use iroha::{ client::{self, Client, QueryResult}, - data_model::{ - parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, - prelude::*, - }, + data_model::{parameter::BlockParameter, prelude::*}, }; use iroha_config::parameters::actual::Root as Config; +use nonzero_ext::nonzero; use test_network::*; use test_samples::gen_account_in; @@ -22,11 +20,9 @@ fn long_multiple_blocks_created() -> Result<()> { wait_for_genesis_committed(&network.clients(), 0); let pipeline_time = Config::pipeline_time(); - client.submit_all_blocking( - ParametersBuilder::new() - .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? - .into_set_parameters(), - )?; + client.submit_blocking(SetParameter::new(Parameter::Block( + BlockParameter::MaxTransactions(nonzero!(1_u64)), + )))?; let create_domain: InstructionBox = Register::domain(Domain::new("domain".parse()?)).into(); let (account_id, _account_keypair) = gen_account_in("domain"); diff --git a/client/tests/integration/extra_functional/normal.rs b/client/tests/integration/extra_functional/normal.rs index c4a2c930ee4..401d3b22626 100644 --- a/client/tests/integration/extra_functional/normal.rs +++ b/client/tests/integration/extra_functional/normal.rs @@ -1,18 +1,19 @@ -use std::num::NonZeroU32; - -use iroha::client; -use iroha_config::parameters::actual::Root as Config; -use iroha_data_model::{asset::AssetDefinitionId, prelude::*}; +use iroha::{ + client, + data_model::{asset::AssetDefinitionId, parameter::BlockParameter, prelude::*}, +}; +use nonzero_ext::nonzero; use test_network::*; #[test] fn tranasctions_should_be_applied() { - let mut configuration = Config::test(); - configuration.chain_wide.max_transactions_in_block = NonZeroU32::new(1).unwrap(); - let (_rt, network, iroha) = NetworkBuilder::new(4, Some(11_300)) - .with_config(configuration) - .create_with_runtime(); + let (_rt, network, iroha) = NetworkBuilder::new(4, Some(11_300)).create_with_runtime(); wait_for_genesis_committed(&network.clients(), 0); + iroha + .submit_blocking(SetParameter::new(Parameter::Block( + BlockParameter::MaxTransactions(nonzero!(1_u64)), + ))) + .unwrap(); let domain_id = "and".parse::().unwrap(); let account_id = "ed01201F803CB23B1AAFB958368DF2F67CB78A2D1DFB47FFFC3133718F165F54DFF677@and" diff --git a/client/tests/integration/extra_functional/unregister_peer.rs b/client/tests/integration/extra_functional/unregister_peer.rs index ade2324d525..5fa97a5f231 100644 --- a/client/tests/integration/extra_functional/unregister_peer.rs +++ b/client/tests/integration/extra_functional/unregister_peer.rs @@ -3,12 +3,10 @@ use std::thread; use eyre::Result; use iroha::{ client::{self, QueryResult}, - data_model::{ - parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, - prelude::*, - }, + data_model::{parameter::BlockParameter, prelude::*}, }; use iroha_config::parameters::actual::Root as Config; +use nonzero_ext::nonzero; use test_network::*; use test_samples::gen_account_in; @@ -117,20 +115,23 @@ fn init() -> Result<( let (rt, network, client) = Network::start_test_with_runtime(4, Some(10_925)); let pipeline_time = Config::pipeline_time(); iroha_logger::info!("Started"); - let parameters = ParametersBuilder::new() - .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? - .into_set_parameters(); + + let set_max_txns_in_block = SetParameter::new(Parameter::Block( + BlockParameter::MaxTransactions(nonzero!(1_u64)), + )); + let create_domain = Register::domain(Domain::new("domain".parse()?)); let (account_id, _account_keypair) = gen_account_in("domain"); let create_account = Register::account(Account::new(account_id.clone())); let asset_definition_id: AssetDefinitionId = "xor#domain".parse()?; let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); - let instructions = parameters.into_iter().chain([ + let instructions: [InstructionBox; 4] = [ + set_max_txns_in_block.into(), create_domain.into(), create_account.into(), create_asset.into(), - ]); + ]; client.submit_all_blocking(instructions)?; iroha_logger::info!("Init"); Ok(( diff --git a/client/tests/integration/extra_functional/unstable_network.rs b/client/tests/integration/extra_functional/unstable_network.rs index a1c3d46328e..c917b85580d 100644 --- a/client/tests/integration/extra_functional/unstable_network.rs +++ b/client/tests/integration/extra_functional/unstable_network.rs @@ -2,15 +2,17 @@ use std::thread; use iroha::{ client::{self, QueryResult}, - data_model::prelude::*, + data_model::{ + parameter::{BlockParameter, Parameter}, + prelude::*, + }, }; use iroha_config::parameters::actual::Root as Config; +use nonzero_ext::nonzero; use rand::seq::SliceRandom; use test_network::*; use test_samples::ALICE_ID; -const MAX_TRANSACTIONS_IN_BLOCK: u32 = 5; - #[test] fn unstable_network_5_peers_1_fault() { let n_peers = 4; @@ -51,8 +53,6 @@ fn unstable_network( // Given let mut configuration = Config::test(); - configuration.chain_wide.max_transactions_in_block = - MAX_TRANSACTIONS_IN_BLOCK.try_into().unwrap(); #[cfg(debug_assertions)] { configuration.sumeragi.debug_force_soft_fork = force_soft_fork; @@ -63,6 +63,11 @@ fn unstable_network( .with_offline_peers(0) .create_with_runtime(); wait_for_genesis_committed(&network.clients(), n_offline_peers); + iroha + .submit_blocking(SetParameter::new(Parameter::Block( + BlockParameter::MaxTransactions(nonzero!(5_u64)), + ))) + .unwrap(); let pipeline_time = Config::pipeline_time(); diff --git a/client/tests/integration/mod.rs b/client/tests/integration/mod.rs index 37299969665..13b8bd2528c 100644 --- a/client/tests/integration/mod.rs +++ b/client/tests/integration/mod.rs @@ -1,4 +1,3 @@ -mod add_domain; mod asset; mod asset_propagation; mod domain_owner_permissions; diff --git a/client/tests/integration/non_mintable.rs b/client/tests/integration/non_mintable.rs index 4f65579a9be..d976fce1eb9 100644 --- a/client/tests/integration/non_mintable.rs +++ b/client/tests/integration/non_mintable.rs @@ -3,7 +3,7 @@ use std::str::FromStr as _; use eyre::Result; use iroha::{ client::{self, QueryResult}, - data_model::{isi::InstructionBox, metadata::UnlimitedMetadata, prelude::*}, + data_model::{isi::InstructionBox, prelude::*}, }; use test_network::*; use test_samples::ALICE_ID; @@ -20,7 +20,7 @@ fn non_mintable_asset_can_be_minted_once_but_not_twice() -> Result<()> { AssetDefinition::numeric(asset_definition_id.clone()).mintable_once(), ); - let metadata = UnlimitedMetadata::default(); + let metadata = Metadata::default(); let mint = Mint::asset_numeric( 200_u32, diff --git a/client/tests/integration/queries/smart_contract.rs b/client/tests/integration/queries/smart_contract.rs index 551949f2bae..e41c4bd985a 100644 --- a/client/tests/integration/queries/smart_contract.rs +++ b/client/tests/integration/queries/smart_contract.rs @@ -20,10 +20,8 @@ fn live_query_is_dropped_after_smart_contract_end() -> Result<()> { .optimize()? .into_bytes()?; - let transaction = client.build_transaction( - WasmSmartContract::from_compiled(wasm), - UnlimitedMetadata::default(), - ); + let transaction = + client.build_transaction(WasmSmartContract::from_compiled(wasm), Metadata::default()); client.submit_transaction_blocking(&transaction)?; let metadata_value: JsonString = client.request(FindAccountKeyValueByIdAndKey::new( @@ -59,10 +57,8 @@ fn smart_contract_can_filter_queries() -> Result<()> { .optimize()? .into_bytes()?; - let transaction = client.build_transaction( - WasmSmartContract::from_compiled(wasm), - UnlimitedMetadata::default(), - ); + let transaction = + client.build_transaction(WasmSmartContract::from_compiled(wasm), Metadata::default()); client.submit_transaction_blocking(&transaction)?; Ok(()) diff --git a/client/tests/integration/set_parameter.rs b/client/tests/integration/set_parameter.rs index cf2fa11accb..ec376d3f817 100644 --- a/client/tests/integration/set_parameter.rs +++ b/client/tests/integration/set_parameter.rs @@ -1,9 +1,12 @@ -use std::str::FromStr; +use std::time::Duration; use eyre::Result; use iroha::{ - client::{self, QueryResult}, - data_model::prelude::*, + client, + data_model::{ + parameter::{Parameter, Parameters, SumeragiParameter, SumeragiParameters}, + prelude::*, + }, }; use test_network::*; @@ -12,51 +15,22 @@ fn can_change_parameter_value() -> Result<()> { let (_rt, _peer, test_client) = ::new().with_port(11_135).start_with_runtime(); wait_for_genesis_committed(&vec![test_client.clone()], 0); - let parameter = Parameter::from_str("?BlockTime=4000")?; - let parameter_id = ParameterId::from_str("BlockTime")?; - let param_box = SetParameter::new(parameter); + let old_params: Parameters = test_client.request(client::parameter::all())?; + assert_eq!( + old_params.sumeragi().block_time(), + SumeragiParameters::default().block_time() + ); - let old_params = test_client - .request(client::parameter::all())? - .collect::>>()?; - let param_val_old = old_params - .iter() - .find(|param| param.id() == ¶meter_id) - .expect("Parameter should exist") - .val(); + let block_time = 40_000; + let parameter = Parameter::Sumeragi(SumeragiParameter::BlockTimeMs(block_time)); + let set_param_isi = SetParameter::new(parameter); + test_client.submit_blocking(set_param_isi)?; - test_client.submit_blocking(param_box)?; + let sumeragi_params = test_client.request(client::parameter::all())?.sumeragi; + assert_eq!( + sumeragi_params.block_time(), + Duration::from_millis(block_time) + ); - let new_params = test_client - .request(client::parameter::all())? - .collect::>>()?; - let param_val_new = new_params - .iter() - .find(|param| param.id() == ¶meter_id) - .expect("Parameter should exist") - .val(); - - assert_ne!(param_val_old, param_val_new); - Ok(()) -} - -#[test] -fn parameter_propagated() -> Result<()> { - let (_rt, _peer, test_client) = ::new().with_port(10_985).start_with_runtime(); - wait_for_genesis_committed(&vec![test_client.clone()], 0); - - let too_long_domain_name: DomainId = "0".repeat(2_usize.pow(8)).parse()?; - let create_domain = Register::domain(Domain::new(too_long_domain_name)); - let _ = test_client - .submit_blocking(create_domain.clone()) - .expect_err("Should fail before ident length limits update"); - - let parameter = Parameter::from_str("?WSVIdentLengthLimits=1,256_LL")?; - let param_box = SetParameter::new(parameter); - test_client.submit_blocking(param_box)?; - - test_client - .submit_blocking(create_domain) - .expect("Should work after ident length limits update"); Ok(()) } diff --git a/client/tests/integration/smartcontracts/Cargo.toml b/client/tests/integration/smartcontracts/Cargo.toml index 5004748a0c0..e6fb9bcaf40 100644 --- a/client/tests/integration/smartcontracts/Cargo.toml +++ b/client/tests/integration/smartcontracts/Cargo.toml @@ -13,6 +13,7 @@ members = [ "mint_rose_trigger", "executor_with_admin", "executor_with_custom_permission", + "executor_with_custom_parameter", "executor_remove_permission", "executor_with_migration_fail", "executor_custom_instructions_simple", diff --git a/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs b/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs index 811e64fbbb8..83fb83970b0 100644 --- a/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs +++ b/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs @@ -21,8 +21,6 @@ fn main(_id: TriggerId, _owner: AccountId, _event: EventBox) { let accounts_cursor = FindAllAccounts.execute().dbg_unwrap(); - let limits = MetadataLimits::new(256, 256); - let bad_domain_ids: [DomainId; 2] = [ "genesis".parse().dbg_unwrap(), "garden_of_live_flowers".parse().dbg_unwrap(), @@ -35,7 +33,7 @@ fn main(_id: TriggerId, _owner: AccountId, _event: EventBox) { continue; } - let mut metadata = Metadata::new(); + let mut metadata = Metadata::default(); let name = format!( "nft_for_{}_in_{}", account.id().signatory(), @@ -43,14 +41,14 @@ fn main(_id: TriggerId, _owner: AccountId, _event: EventBox) { ) .parse() .dbg_unwrap(); - metadata.insert_with_limits(name, true, limits).dbg_unwrap(); + metadata.insert(name, true); let nft_id = generate_new_nft_id(account.id()); let nft_definition = AssetDefinition::store(nft_id.clone()) .mintable_once() .with_metadata(metadata); let account_nft_id = AssetId::new(nft_id, account.id().clone()); - let account_nft = Asset::new(account_nft_id, Metadata::new()); + let account_nft = Asset::new(account_nft_id, Metadata::default()); Register::asset_definition(nft_definition) .execute() diff --git a/client/tests/integration/smartcontracts/executor_custom_data_model/Cargo.toml b/client/tests/integration/smartcontracts/executor_custom_data_model/Cargo.toml index 6ce6deb6833..be8ba50c7f6 100644 --- a/client/tests/integration/smartcontracts/executor_custom_data_model/Cargo.toml +++ b/client/tests/integration/smartcontracts/executor_custom_data_model/Cargo.toml @@ -8,6 +8,9 @@ authors.workspace = true license.workspace = true [dependencies] +# TODO: Cargo complains if I take it from workspace +iroha_executor = { version = "=2.0.0-pre-rc.21", path = "../../../../../smart_contract/executor", features = ["debug"] } + iroha_data_model.workspace = true iroha_schema.workspace = true diff --git a/client/tests/integration/smartcontracts/executor_custom_data_model/src/lib.rs b/client/tests/integration/smartcontracts/executor_custom_data_model/src/lib.rs index 08da8f82822..d245d6f5290 100644 --- a/client/tests/integration/smartcontracts/executor_custom_data_model/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_custom_data_model/src/lib.rs @@ -5,4 +5,5 @@ extern crate alloc; pub mod complex_isi; +pub mod parameters; pub mod simple_isi; diff --git a/client/tests/integration/smartcontracts/executor_custom_data_model/src/parameters.rs b/client/tests/integration/smartcontracts/executor_custom_data_model/src/parameters.rs index 9c7bb31cfdd..621e646a963 100644 --- a/client/tests/integration/smartcontracts/executor_custom_data_model/src/parameters.rs +++ b/client/tests/integration/smartcontracts/executor_custom_data_model/src/parameters.rs @@ -1,14 +1,12 @@ //! Module with custom parameters use alloc::{format, string::String, vec::Vec}; +use iroha_executor::prelude::*; use iroha_schema::IntoSchema; -use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; -use iroha_executor::prelude::*; - /// Parameter that controls domain limits -#[derive(PartialEq, Eq, Parameter, Decode, Encode, Serialize, Deserialize, IntoSchema)] +#[derive(PartialEq, Eq, Parameter, Serialize, Deserialize, IntoSchema)] pub struct DomainLimits { /// Length of domain id in bytes pub id_len: u32, diff --git a/client/tests/integration/smartcontracts/executor_custom_instructions_complex/src/lib.rs b/client/tests/integration/smartcontracts/executor_custom_instructions_complex/src/lib.rs index 7bd80e4e8d1..a9ae532107f 100644 --- a/client/tests/integration/smartcontracts/executor_custom_instructions_complex/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_custom_instructions_complex/src/lib.rs @@ -77,9 +77,11 @@ impl executor_custom_data_model::complex_isi::Context for Context { } #[entrypoint] -pub fn migrate(_block_height: u64) -> MigrationResult { +fn migrate(_block_height: u64) -> MigrationResult { DataModelBuilder::with_default_permissions() - .with_custom_instruction::() + .add_instruction::() + .add_instruction::() + .add_instruction::() .build_and_set(); Ok(()) diff --git a/client/tests/integration/smartcontracts/executor_custom_instructions_simple/src/lib.rs b/client/tests/integration/smartcontracts/executor_custom_instructions_simple/src/lib.rs index 425f3a16f19..f1dfafe37d1 100644 --- a/client/tests/integration/smartcontracts/executor_custom_instructions_simple/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_custom_instructions_simple/src/lib.rs @@ -55,9 +55,10 @@ fn execute_mint_asset_for_all_accounts(isi: MintAssetForAllAccounts) -> Result<( } #[entrypoint] -pub fn migrate(_block_height: u64) -> MigrationResult { +fn migrate(_block_height: u64) -> MigrationResult { DataModelBuilder::with_default_permissions() - .with_custom_instruction::() + .add_instruction::() + .add_instruction::() .build_and_set(); Ok(()) diff --git a/client/tests/integration/smartcontracts/executor_remove_permission/src/lib.rs b/client/tests/integration/smartcontracts/executor_remove_permission/src/lib.rs index a88a34fd123..83583d2cec0 100644 --- a/client/tests/integration/smartcontracts/executor_remove_permission/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_remove_permission/src/lib.rs @@ -23,7 +23,7 @@ struct Executor { } #[entrypoint] -pub fn migrate(_block_height: u64) -> MigrationResult { +fn migrate(_block_height: u64) -> MigrationResult { // Note that actually migration will reset token schema to default (minus `CanUnregisterDomain`) // So any added custom permission tokens will be also removed DataModelBuilder::with_default_permissions() diff --git a/client/tests/integration/smartcontracts/executor_with_admin/src/lib.rs b/client/tests/integration/smartcontracts/executor_with_admin/src/lib.rs index f34d4f2eb57..0f6b152a16b 100644 --- a/client/tests/integration/smartcontracts/executor_with_admin/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_with_admin/src/lib.rs @@ -32,6 +32,6 @@ fn visit_instruction(executor: &mut Executor, authority: &AccountId, isi: &Instr } #[entrypoint] -pub fn migrate(_block_height: u64) -> MigrationResult { +fn migrate(_block_height: u64) -> MigrationResult { Ok(()) } diff --git a/client/tests/integration/smartcontracts/executor_with_custom_parameter/Cargo.toml b/client/tests/integration/smartcontracts/executor_with_custom_parameter/Cargo.toml new file mode 100644 index 00000000000..fe61791b90e --- /dev/null +++ b/client/tests/integration/smartcontracts/executor_with_custom_parameter/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "executor_with_custom_parameter" + +edition.workspace = true +version.workspace = true +authors.workspace = true + +license.workspace = true + +[lib] +crate-type = ['cdylib'] + +[dependencies] +executor_custom_data_model.workspace = true +iroha_executor.workspace = true +iroha_schema.workspace = true + +parity-scale-codec.workspace = true +anyhow.workspace = true +serde_json.workspace = true +serde.workspace = true + +panic-halt.workspace = true +lol_alloc.workspace = true +getrandom.workspace = true diff --git a/client/tests/integration/smartcontracts/executor_with_custom_parameter/src/lib.rs b/client/tests/integration/smartcontracts/executor_with_custom_parameter/src/lib.rs new file mode 100644 index 00000000000..401ce4a17cf --- /dev/null +++ b/client/tests/integration/smartcontracts/executor_with_custom_parameter/src/lib.rs @@ -0,0 +1,51 @@ +//! Runtime Executor which allows domains whose id satisfies the length limit +#![no_std] + +extern crate alloc; +#[cfg(not(test))] +extern crate panic_halt; + +use alloc::format; + +use executor_custom_data_model::parameters::DomainLimits; +use iroha_executor::{prelude::*, DataModelBuilder}; +use lol_alloc::{FreeListAllocator, LockedAllocator}; + +#[global_allocator] +static ALLOC: LockedAllocator = LockedAllocator::new(FreeListAllocator::new()); + +getrandom::register_custom_getrandom!(iroha_executor::stub_getrandom); + +#[derive(Constructor, ValidateEntrypoints, Validate, Visit)] +#[visit(custom(visit_register_domain))] +struct Executor { + verdict: Result, + block_height: u64, +} + +fn visit_register_domain(executor: &mut Executor, _authority: &AccountId, isi: &Register) { + let parameters = FindAllParameters.execute().unwrap().into_inner(); + + let domain_limits: DomainLimits = parameters + .custom() + .get(&DomainLimits::id()) + .unwrap() + .try_into() + .expect("INTERNAL BUG: Failed to deserialize json as `DomainLimits`"); + + iroha_executor::log::info!(&format!("Registering domain: {}", isi.object().id())); + if isi.object().id().name().as_ref().len() > domain_limits.id_len as usize { + deny!(executor, "Domain id exceeds the limit"); + } + + execute!(executor, isi); +} + +#[entrypoint] +fn migrate(_block_height: u64) -> MigrationResult { + DataModelBuilder::with_default_permissions() + .add_parameter(DomainLimits::default()) + .build_and_set(); + + Ok(()) +} diff --git a/client/tests/integration/smartcontracts/executor_with_migration_fail/src/lib.rs b/client/tests/integration/smartcontracts/executor_with_migration_fail/src/lib.rs index 0aaa7907707..98d517d084c 100644 --- a/client/tests/integration/smartcontracts/executor_with_migration_fail/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_with_migration_fail/src/lib.rs @@ -24,7 +24,7 @@ struct Executor { } #[entrypoint] -pub fn migrate(_block_height: u64) -> MigrationResult { +fn migrate(_block_height: u64) -> MigrationResult { // Performing side-effects to check in the test that it won't be applied after failure // Registering a new domain (using ISI) diff --git a/client/tests/integration/sorting.rs b/client/tests/integration/sorting.rs index 7d4b5a03e4f..bae12a3ed30 100644 --- a/client/tests/integration/sorting.rs +++ b/client/tests/integration/sorting.rs @@ -52,14 +52,8 @@ fn correct_pagination_assets_after_creating_new_one() { let asset_definition_id = AssetDefinitionId::from_str(&format!("xor{i}#wonderland")).expect("Valid"); let asset_definition = AssetDefinition::store(asset_definition_id.clone()); - let mut asset_metadata = Metadata::new(); - asset_metadata - .insert_with_limits( - sort_by_metadata_key.clone(), - i as u32, - MetadataLimits::new(10, 23), - ) - .expect("Valid"); + let mut asset_metadata = Metadata::default(); + asset_metadata.insert(sort_by_metadata_key.clone(), i as u32); let asset = Asset::new( AssetId::new(asset_definition_id, account_id.clone()), AssetValue::Store(asset_metadata), @@ -147,14 +141,8 @@ fn correct_sorting_of_entities() { for i in 0..n { let asset_definition_id = AssetDefinitionId::from_str(&format!("xor_{i}#wonderland")).expect("Valid"); - let mut asset_metadata = Metadata::new(); - asset_metadata - .insert_with_limits( - sort_by_metadata_key.clone(), - n - i - 1, - MetadataLimits::new(10, 28), - ) - .expect("Valid"); + let mut asset_metadata = Metadata::default(); + asset_metadata.insert(sort_by_metadata_key.clone(), n - i - 1); let asset_definition = AssetDefinition::numeric(asset_definition_id.clone()) .with_metadata(asset_metadata.clone()); @@ -208,14 +196,8 @@ fn correct_sorting_of_entities() { public_keys.sort_unstable(); for i in 0..n { let account_id = AccountId::new(domain_id.clone(), public_keys[i as usize].clone()); - let mut account_metadata = Metadata::new(); - account_metadata - .insert_with_limits( - sort_by_metadata_key.clone(), - n - i - 1, - MetadataLimits::new(10, 28), - ) - .expect("Valid"); + let mut account_metadata = Metadata::default(); + account_metadata.insert(sort_by_metadata_key.clone(), n - i - 1); let account = Account::new(account_id.clone()).with_metadata(account_metadata.clone()); accounts.push(account_id); @@ -256,14 +238,8 @@ fn correct_sorting_of_entities() { let n = 10u32; for i in 0..n { let domain_id = DomainId::from_str(&format!("neverland{i}")).expect("Valid"); - let mut domain_metadata = Metadata::new(); - domain_metadata - .insert_with_limits( - sort_by_metadata_key.clone(), - n - i - 1, - MetadataLimits::new(10, 28), - ) - .expect("Valid"); + let mut domain_metadata = Metadata::default(); + domain_metadata.insert(sort_by_metadata_key.clone(), n - i - 1); let domain = Domain::new(domain_id.clone()).with_metadata(domain_metadata.clone()); domains.push(domain_id); @@ -303,14 +279,8 @@ fn correct_sorting_of_entities() { let mut instructions = vec![]; for (idx, val) in input { let domain_id = DomainId::from_str(&format!("neverland_{idx}")).expect("Valid"); - let mut domain_metadata = Metadata::new(); - domain_metadata - .insert_with_limits( - sort_by_metadata_key.clone(), - val, - MetadataLimits::new(10, 28), - ) - .expect("Valid"); + let mut domain_metadata = Metadata::default(); + domain_metadata.insert(sort_by_metadata_key.clone(), val); let domain = Domain::new(domain_id.clone()).with_metadata(domain_metadata.clone()); domains.push(domain_id); @@ -377,14 +347,8 @@ fn sort_only_elements_which_have_sorting_key() -> Result<()> { accounts_b.push(account_id); account } else { - let mut account_metadata = Metadata::new(); - account_metadata - .insert_with_limits( - sort_by_metadata_key.clone(), - n - i - 1, - MetadataLimits::new(10, 28), - ) - .expect("Valid"); + let mut account_metadata = Metadata::default(); + account_metadata.insert(sort_by_metadata_key.clone(), n - i - 1); let account = Account::new(account_id.clone()).with_metadata(account_metadata); accounts_a.push(account_id); account diff --git a/client/tests/integration/triggers/time_trigger.rs b/client/tests/integration/triggers/time_trigger.rs index f26b2f2ff45..c77ca97eea9 100644 --- a/client/tests/integration/triggers/time_trigger.rs +++ b/client/tests/integration/triggers/time_trigger.rs @@ -6,16 +6,31 @@ use iroha::{ data_model::{ asset::AssetId, events::pipeline::{BlockEventFilter, BlockStatus}, + parameter::SumeragiParameters, prelude::*, transaction::WasmSmartContract, Level, }, }; -use iroha_config::parameters::defaults::chain_wide::CONSENSUS_ESTIMATION as DEFAULT_CONSENSUS_ESTIMATION; use iroha_logger::info; use test_network::*; use test_samples::{gen_account_in, ALICE_ID}; +/// Default estimation of consensus duration. +pub fn default_consensus_estimation() -> Duration { + let default_parameters = SumeragiParameters::default(); + + default_parameters + .block_time() + .checked_add( + default_parameters + .commit_time() + .checked_div(2) + .map_or_else(|| unreachable!(), |x| x), + ) + .map_or_else(|| unreachable!(), |x| x) +} + fn curr_time() -> core::time::Duration { use std::time::SystemTime; @@ -41,7 +56,7 @@ macro_rules! const_assert { fn time_trigger_execution_count_error_should_be_less_than_15_percent() -> Result<()> { const PERIOD: Duration = Duration::from_millis(100); const ACCEPTABLE_ERROR_PERCENT: u8 = 15; - const_assert!(PERIOD.as_millis() < DEFAULT_CONSENSUS_ESTIMATION.as_millis()); + assert!(PERIOD.as_millis() < default_consensus_estimation().as_millis()); const_assert!(ACCEPTABLE_ERROR_PERCENT <= 100); let (_rt, _peer, mut test_client) = ::new().with_port(10_775).start_with_runtime(); @@ -77,7 +92,7 @@ fn time_trigger_execution_count_error_should_be_less_than_15_percent() -> Result Duration::from_secs(1), 3, )?; - std::thread::sleep(DEFAULT_CONSENSUS_ESTIMATION); + std::thread::sleep(default_consensus_estimation()); let finish_time = curr_time(); let average_count = finish_time.saturating_sub(start_time).as_millis() / PERIOD.as_millis(); @@ -104,7 +119,7 @@ fn mint_asset_after_3_sec() -> Result<()> { let (_rt, _peer, test_client) = ::new().with_port(10_665).start_with_runtime(); wait_for_genesis_committed(&vec![test_client.clone()], 0); // Sleep to certainly bypass time interval analyzed by genesis - std::thread::sleep(DEFAULT_CONSENSUS_ESTIMATION); + std::thread::sleep(default_consensus_estimation()); let asset_definition_id = "rose#wonderland" .parse::() @@ -139,7 +154,7 @@ fn mint_asset_after_3_sec() -> Result<()> { assert_eq!(init_quantity, after_registration_quantity); // Sleep long enough that trigger start is in the past - std::thread::sleep(DEFAULT_CONSENSUS_ESTIMATION); + std::thread::sleep(default_consensus_estimation()); test_client.submit_blocking(Log::new(Level::DEBUG, "Just to create block".to_string()))?; let after_wait_quantity = test_client.request(FindAssetQuantityById { @@ -201,9 +216,10 @@ fn pre_commit_trigger_should_be_executed() -> Result<()> { #[test] fn mint_nft_for_every_user_every_1_sec() -> Result<()> { - // Building trigger - info!("Building trigger"); + const TRIGGER_PERIOD: Duration = Duration::from_millis(1000); + const EXPECTED_COUNT: u64 = 4; + info!("Building trigger"); let wasm = iroha_wasm_builder::Builder::new( "tests/integration/smartcontracts/create_nft_for_every_user_trigger", ) @@ -214,9 +230,6 @@ fn mint_nft_for_every_user_every_1_sec() -> Result<()> { info!("WASM size is {} bytes", wasm.len()); - const TRIGGER_PERIOD: Duration = Duration::from_millis(1000); - const EXPECTED_COUNT: u64 = 4; - let (_rt, _peer, mut test_client) = ::new().with_port(10_780).start_with_runtime(); wait_for_genesis_committed(&vec![test_client.clone()], 0); diff --git a/client/tests/integration/tx_history.rs b/client/tests/integration/tx_history.rs index 77ee2fcfa2e..be415f90eae 100644 --- a/client/tests/integration/tx_history.rs +++ b/client/tests/integration/tx_history.rs @@ -46,7 +46,7 @@ fn client_has_rejected_and_acepted_txs_should_return_tx_history() -> Result<()> &mint_not_existed_asset }; let instructions: Vec = vec![mint_asset.clone().into()]; - let transaction = client.build_transaction(instructions, UnlimitedMetadata::new()); + let transaction = client.build_transaction(instructions, Metadata::default()); client.submit_transaction(&transaction)?; } thread::sleep(pipeline_time * 5); diff --git a/client/tests/integration/upgrade.rs b/client/tests/integration/upgrade.rs index 1aef328fdef..242bf039412 100644 --- a/client/tests/integration/upgrade.rs +++ b/client/tests/integration/upgrade.rs @@ -4,14 +4,16 @@ use eyre::Result; use futures_util::TryStreamExt as _; use iroha::{ client::{self, Client, QueryResult}, - data_model::prelude::*, + data_model::{ + parameter::{Parameter, SmartContractParameter}, + prelude::*, + }, }; -use iroha_data_model::parameter::{default::EXECUTOR_FUEL_LIMIT, ParametersBuilder}; use iroha_logger::info; +use nonzero_ext::nonzero; use serde_json::json; use test_network::*; use test_samples::{ALICE_ID, BOB_ID}; -use tokio::sync::mpsc; const ADMIN_PUBLIC_KEY_MULTIHASH: &str = "ed012076E5CA9698296AF9BE2CA45F525CB3BCFDEB7EE068BA56F973E9DD90564EF4FC"; @@ -240,25 +242,14 @@ fn executor_custom_instructions_complex() -> Result<()> { use executor_custom_data_model::complex_isi::{ ConditionalExpr, CoreExpr, EvaluatesTo, Expression, Greater, }; - use iroha_config::parameters::actual::Root as Config; - let mut config = Config::test(); - // Note that this value will be overwritten by genesis block with NewParameter ISI - // But it will be needed after NewParameter removal in #4597 - config.chain_wide.executor_runtime.fuel_limit = 1_000_000_000; - - let (_rt, _peer, client) = PeerBuilder::new() - .with_port(11_275) - .with_config(config) - .start_with_runtime(); + let (_rt, _peer, client) = PeerBuilder::new().with_port(11_275).start_with_runtime(); wait_for_genesis_committed(&vec![client.clone()], 0); - // Remove this after #4597 - config value will be used (see above) - let parameters = ParametersBuilder::new() - .add_parameter(EXECUTOR_FUEL_LIMIT, Numeric::from(1_000_000_000_u32))? - .into_set_parameters(); - client.submit_all_blocking(parameters)?; - + let executor_fuel_limit = SetParameter::new(Parameter::Executor(SmartContractParameter::Fuel( + nonzero!(1_000_000_000_u64), + ))); + client.submit_blocking(executor_fuel_limit)?; upgrade_executor( &client, "tests/integration/smartcontracts/executor_custom_instructions_complex", @@ -344,10 +335,8 @@ fn migration_should_cause_upgrade_event() { let (rt, _peer, client) = ::new().with_port(10_996).start_with_runtime(); wait_for_genesis_committed(&vec![client.clone()], 0); - let (sender, mut receiver) = mpsc::channel(1); let events_client = client.clone(); - - let _handle = rt.spawn(async move { + let task = rt.spawn(async move { let mut stream = events_client .listen_for_events_async([ExecutorEventFilter::new()]) .await @@ -357,7 +346,8 @@ fn migration_should_cause_upgrade_event() { new_data_model, }))) = event { - let _ = sender.send(new_data_model).await; + assert!(!new_data_model.permissions.is_empty()); + break; } } }); @@ -368,15 +358,43 @@ fn migration_should_cause_upgrade_event() { ) .unwrap(); - let data_model = rt - .block_on(async { - tokio::time::timeout(std::time::Duration::from_secs(60), receiver.recv()).await - }) - .ok() - .flatten() - .expect("should receive upgraded event immediately after upgrade"); + rt.block_on(async { + tokio::time::timeout(core::time::Duration::from_secs(60), task) + .await + .unwrap() + }) + .expect("should receive upgraded event immediately after upgrade"); +} + +#[test] +fn define_custom_parameter() -> Result<()> { + use executor_custom_data_model::parameters::DomainLimits; + + let (_rt, _peer, client) = ::new().with_port(10_996).start_with_runtime(); + wait_for_genesis_committed(&vec![client.clone()], 0); - assert!(!data_model.permissions.is_empty()); + let long_domain_name = "0".repeat(2_usize.pow(5)).parse::()?; + let create_domain = Register::domain(Domain::new(long_domain_name)); + client.submit_blocking(create_domain)?; + + upgrade_executor( + &client, + "tests/integration/smartcontracts/executor_with_custom_parameter", + ) + .unwrap(); + + let too_long_domain_name = "1".repeat(2_usize.pow(5)).parse::()?; + let create_domain = Register::domain(Domain::new(too_long_domain_name)); + let _err = client.submit_blocking(create_domain.clone()).unwrap_err(); + + let parameter = DomainLimits { + id_len: 2_u32.pow(6), + } + .into(); + let set_param_isi: InstructionBox = SetParameter::new(parameter).into(); + client.submit_all_blocking([set_param_isi, create_domain.into()])?; + + Ok(()) } fn upgrade_executor(client: &Client, executor: impl AsRef) -> Result<()> { diff --git a/client_cli/pytests/test/assets/test_register_asset_definitions.py b/client_cli/pytests/test/assets/test_register_asset_definitions.py index cb6308378dd..6ef4c4f9a6f 100644 --- a/client_cli/pytests/test/assets/test_register_asset_definitions.py +++ b/client_cli/pytests/test/assets/test_register_asset_definitions.py @@ -33,24 +33,6 @@ def test_register_asset_definition_with_numeric_type( ) -@allure.label("sdk_test_id", "register_asset_definition_with_too_long_name") -def test_register_asset_definition_with_too_long_name( - GIVEN_129_length_name, GIVEN_registered_domain, GIVEN_numeric_type -): - with allure.step( - f'WHEN client_cli registers the asset_definition "{GIVEN_129_length_name}" ' - f'with "{GIVEN_numeric_type}" value type' - f'in the "{GIVEN_registered_domain.name}" domain' - ): - client_cli.register().asset().definition( - asset=GIVEN_129_length_name, - domain=GIVEN_registered_domain.name, - type_=GIVEN_numeric_type, - ) - with allure.step(f'THEN Iroha should have the asset "{GIVEN_129_length_name}"'): - client_cli.should(have.error(Stderr.TOO_LONG.value)) - - @allure.label("sdk_test_id", "register_asset_definition_with_store_type") def test_register_asset_definition_with_store_type( GIVEN_fake_asset_name, GIVEN_registered_domain, GIVEN_store_type diff --git a/client_cli/pytests/test/domains/test_register_domains.py b/client_cli/pytests/test/domains/test_register_domains.py index 7f01073934e..4b9752c9b5d 100644 --- a/client_cli/pytests/test/domains/test_register_domains.py +++ b/client_cli/pytests/test/domains/test_register_domains.py @@ -66,30 +66,6 @@ def test_register_one_letter_domain(GIVEN_random_character): iroha.should(have.domain(GIVEN_random_character)) -@allure.label("sdk_test_id", "register_max_length_domain") -def test_register_max_length_domain(GIVEN_128_length_name): - with allure.step( - f'WHEN client_cli registers the longest domain "{GIVEN_128_length_name}"' - ): - client_cli.register().domain(GIVEN_128_length_name) - with allure.step( - f'THEN Iroha should have the longest domain "{GIVEN_128_length_name}"' - ): - iroha.should(have.domain(GIVEN_128_length_name)) - - -@allure.label("sdk_test_id", "register_domain_with_too_long_name") -def test_register_domain_with_too_long_name(GIVEN_129_length_name): - with allure.step( - f'WHEN client_cli registers the domain "{GIVEN_129_length_name}" with too long name' - ): - client_cli.register().domain(GIVEN_129_length_name) - with allure.step( - f'THEN client_cli should have the too long domain error: "{Stderr.TOO_LONG}"' - ): - client_cli.should(have.error(Stderr.TOO_LONG.value)) - - @allure.label("sdk_test_id", "register_domain_with_reserved_character") def test_register_domain_with_reserved_character(GIVEN_string_with_reserved_character): with allure.step( diff --git a/client_cli/src/main.rs b/client_cli/src/main.rs index 81dd1dd3b7b..3ccf2ddea90 100644 --- a/client_cli/src/main.rs +++ b/client_cli/src/main.rs @@ -30,20 +30,19 @@ pub struct MetadataArgs { } impl MetadataArgs { - fn load(self) -> Result { - let value: Option = self + fn load(self) -> Result { + let value: Option = self .metadata .map(|path| { let content = fs::read_to_string(&path).wrap_err_with(|| { eyre!("Failed to read the metadata file `{}`", path.display()) })?; - let metadata: UnlimitedMetadata = - json5::from_str(&content).wrap_err_with(|| { - eyre!( - "Failed to deserialize metadata from file `{}`", - path.display() - ) - })?; + let metadata: Metadata = json5::from_str(&content).wrap_err_with(|| { + eyre!( + "Failed to deserialize metadata from file `{}`", + path.display() + ) + })?; Ok::<_, eyre::Report>(metadata) }) .transpose()?; @@ -235,7 +234,7 @@ fn color_mode() -> ColorMode { #[allow(clippy::shadow_unrelated)] fn submit( instructions: impl Into, - metadata: UnlimitedMetadata, + metadata: Metadata, context: &mut dyn RunContext, ) -> Result<()> { let iroha = context.client_from_config(); @@ -488,7 +487,7 @@ mod domain { value: MetadataValueArg { value }, } = self; let set_key_value = SetKeyValue::domain(id, key, value); - submit([set_key_value], UnlimitedMetadata::new(), context) + submit([set_key_value], Metadata::default(), context) .wrap_err("Failed to submit Set instruction") } } @@ -508,7 +507,7 @@ mod domain { fn run(self, context: &mut dyn RunContext) -> Result<()> { let Self { id, key } = self; let remove_key_value = RemoveKeyValue::domain(id, key); - submit([remove_key_value], UnlimitedMetadata::new(), context) + submit([remove_key_value], Metadata::default(), context) .wrap_err("Failed to submit Remove instruction") } } @@ -885,7 +884,7 @@ mod asset { } = self; let set = iroha::data_model::isi::SetKeyValue::asset(asset_id, key, value); - submit([set], UnlimitedMetadata::default(), context)?; + submit([set], Metadata::default(), context)?; Ok(()) } } @@ -903,7 +902,7 @@ mod asset { fn run(self, context: &mut dyn RunContext) -> Result<()> { let Self { asset_id, key } = self; let remove = iroha::data_model::isi::RemoveKeyValue::asset(asset_id, key); - submit([remove], UnlimitedMetadata::default(), context)?; + submit([remove], Metadata::default(), context)?; Ok(()) } } @@ -1034,7 +1033,7 @@ mod wasm { submit( WasmSmartContract::from_compiled(raw_data), - UnlimitedMetadata::new(), + Metadata::default(), context, ) .wrap_err("Failed to submit a Wasm smart contract") @@ -1074,7 +1073,7 @@ mod json { match self.variant { Variant::Transaction => { let instructions: Vec = json5::from_str(&string_content)?; - submit(instructions, UnlimitedMetadata::new(), context) + submit(instructions, Metadata::default(), context) .wrap_err("Failed to submit parsed instructions") } Variant::Query => { diff --git a/config/src/parameters/actual.rs b/config/src/parameters/actual.rs index 0bea7dd1b13..8e51f814c44 100644 --- a/config/src/parameters/actual.rs +++ b/config/src/parameters/actual.rs @@ -10,12 +10,8 @@ use std::{ use error_stack::{Result, ResultExt}; use iroha_config_base::{read::ConfigReader, toml::TomlSource, util::Bytes, WithOrigin}; use iroha_crypto::{KeyPair, PublicKey}; -use iroha_data_model::{ - metadata::Limits as MetadataLimits, peer::PeerId, transaction::TransactionLimits, ChainId, - LengthLimits, -}; +use iroha_data_model::{peer::PeerId, ChainId}; use iroha_primitives::{addr::SocketAddr, unique_vec::UniqueVec}; -use serde::{Deserialize, Serialize}; use url::Url; pub use user::{DevTelemetry, Logger, Snapshot}; @@ -42,7 +38,6 @@ pub struct Root { pub snapshot: Snapshot, pub telemetry: Option, pub dev_telemetry: DevTelemetry, - pub chain_wide: ChainWide, } /// See [`Root::from_toml_source`] @@ -168,78 +163,14 @@ impl Default for LiveQueryStore { #[derive(Debug, Clone, Copy)] pub struct BlockSync { pub gossip_period: Duration, - pub gossip_max_size: NonZeroU32, + pub gossip_size: NonZeroU32, } #[derive(Debug, Clone, Copy)] #[allow(missing_docs)] pub struct TransactionGossiper { pub gossip_period: Duration, - pub gossip_max_size: NonZeroU32, -} - -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] -#[allow(missing_docs)] -pub struct ChainWide { - pub max_transactions_in_block: NonZeroU32, - pub block_time: Duration, - pub commit_time: Duration, - pub transaction_limits: TransactionLimits, - pub domain_metadata_limits: MetadataLimits, - pub asset_definition_metadata_limits: MetadataLimits, - pub account_metadata_limits: MetadataLimits, - pub asset_metadata_limits: MetadataLimits, - pub trigger_metadata_limits: MetadataLimits, - pub ident_length_limits: LengthLimits, - pub executor_runtime: WasmRuntime, - pub wasm_runtime: WasmRuntime, -} - -impl ChainWide { - /// Calculate pipeline time based on the block time and commit time - pub fn pipeline_time(&self) -> Duration { - self.block_time + self.commit_time - } - - /// Estimates as `block_time + commit_time / 2` - pub fn consensus_estimation(&self) -> Duration { - self.block_time + (self.commit_time / 2) - } -} - -impl Default for ChainWide { - fn default() -> Self { - Self { - max_transactions_in_block: defaults::chain_wide::MAX_TXS, - block_time: defaults::chain_wide::BLOCK_TIME, - commit_time: defaults::chain_wide::COMMIT_TIME, - transaction_limits: defaults::chain_wide::TRANSACTION_LIMITS, - domain_metadata_limits: defaults::chain_wide::METADATA_LIMITS, - account_metadata_limits: defaults::chain_wide::METADATA_LIMITS, - asset_definition_metadata_limits: defaults::chain_wide::METADATA_LIMITS, - asset_metadata_limits: defaults::chain_wide::METADATA_LIMITS, - trigger_metadata_limits: defaults::chain_wide::METADATA_LIMITS, - ident_length_limits: defaults::chain_wide::IDENT_LENGTH_LIMITS, - executor_runtime: WasmRuntime::default(), - wasm_runtime: WasmRuntime::default(), - } - } -} - -#[allow(missing_docs)] -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] -pub struct WasmRuntime { - pub fuel_limit: u64, - pub max_memory: Bytes, -} - -impl Default for WasmRuntime { - fn default() -> Self { - Self { - fuel_limit: defaults::chain_wide::WASM_FUEL_LIMIT, - max_memory: defaults::chain_wide::WASM_MAX_MEMORY, - } - } + pub gossip_size: NonZeroU32, } #[derive(Debug, Clone)] diff --git a/config/src/parameters/defaults.rs b/config/src/parameters/defaults.rs index e68bfbf1c73..aa5e96ce6a9 100644 --- a/config/src/parameters/defaults.rs +++ b/config/src/parameters/defaults.rs @@ -8,7 +8,6 @@ use std::{ time::Duration, }; -use iroha_data_model::{prelude::MetadataLimits, transaction::TransactionLimits, LengthLimits}; use nonzero_ext::nonzero; pub mod queue { @@ -29,10 +28,10 @@ pub mod network { use super::*; pub const TRANSACTION_GOSSIP_PERIOD: Duration = Duration::from_secs(1); - pub const TRANSACTION_GOSSIP_MAX_SIZE: NonZeroU32 = nonzero!(500u32); + pub const TRANSACTION_GOSSIP_SIZE: NonZeroU32 = nonzero!(500u32); pub const BLOCK_GOSSIP_PERIOD: Duration = Duration::from_secs(10); - pub const BLOCK_GOSSIP_MAX_SIZE: NonZeroU32 = nonzero!(4u32); + pub const BLOCK_GOSSIP_SIZE: NonZeroU32 = nonzero!(4u32); pub const IDLE_TIMEOUT: Duration = Duration::from_secs(60); } @@ -45,41 +44,6 @@ pub mod snapshot { pub const CREATE_EVERY: Duration = Duration::from_secs(60); } -pub mod chain_wide { - use iroha_config_base::util::Bytes; - - use super::*; - - pub const MAX_TXS: NonZeroU32 = nonzero!(2_u32.pow(9)); - pub const BLOCK_TIME: Duration = Duration::from_secs(2); - pub const COMMIT_TIME: Duration = Duration::from_secs(4); - pub const WASM_FUEL_LIMIT: u64 = 55_000_000; - pub const WASM_MAX_MEMORY: Bytes = Bytes(500 * 2_u32.pow(20)); - - /// Default estimation of consensus duration. - pub const CONSENSUS_ESTIMATION: Duration = - match BLOCK_TIME.checked_add(match COMMIT_TIME.checked_div(2) { - Some(x) => x, - None => unreachable!(), - }) { - Some(x) => x, - None => unreachable!(), - }; - - /// Default limits for metadata - pub const METADATA_LIMITS: MetadataLimits = MetadataLimits::new(2_u32.pow(20), 2_u32.pow(12)); - /// Default limits for ident length - pub const IDENT_LENGTH_LIMITS: LengthLimits = LengthLimits::new(1, 2_u32.pow(7)); - /// Default maximum number of instructions and expressions per transaction - pub const MAX_INSTRUCTION_NUMBER: u64 = 2_u64.pow(12); - /// Default maximum number of instructions and expressions per transaction - pub const MAX_WASM_SIZE_BYTES: u64 = 4 * 2_u64.pow(20); - - /// Default transaction limits - pub const TRANSACTION_LIMITS: TransactionLimits = - TransactionLimits::new(MAX_INSTRUCTION_NUMBER, MAX_WASM_SIZE_BYTES); -} - pub mod torii { use std::time::Duration; diff --git a/config/src/parameters/user.rs b/config/src/parameters/user.rs index a619626e896..c3fd636ec8f 100644 --- a/config/src/parameters/user.rs +++ b/config/src/parameters/user.rs @@ -25,10 +25,7 @@ use iroha_config_base::{ ReadConfig, WithOrigin, }; use iroha_crypto::{PrivateKey, PublicKey}; -use iroha_data_model::{ - metadata::Limits as MetadataLimits, peer::PeerId, transaction::TransactionLimits, ChainId, - LengthLimits, Level, -}; +use iroha_data_model::{peer::PeerId, ChainId, Level}; use iroha_primitives::{addr::SocketAddr, unique_vec::UniqueVec}; use serde::Deserialize; use url::Url; @@ -81,8 +78,6 @@ pub struct Root { dev_telemetry: DevTelemetry, #[config(nested)] torii: Torii, - #[config(nested)] - chain_wide: ChainWide, } #[derive(thiserror::Error, Debug, Copy, Clone)] @@ -119,7 +114,6 @@ impl Root { let dev_telemetry = self.dev_telemetry; let (torii, live_query_store) = self.torii.parse(); let telemetry = self.telemetry.map(actual::Telemetry::from); - let chain_wide = self.chain_wide.parse(); let peer_id = key_pair.as_ref().map(|key_pair| { PeerId::new( @@ -156,7 +150,6 @@ impl Root { snapshot, telemetry, dev_telemetry, - chain_wide, }) } } @@ -272,12 +265,12 @@ pub struct Network { /// Peer-to-peer address #[config(env = "P2P_ADDRESS")] pub address: WithOrigin, - #[config(default = "defaults::network::BLOCK_GOSSIP_MAX_SIZE")] - pub block_gossip_max_size: NonZeroU32, + #[config(default = "defaults::network::BLOCK_GOSSIP_SIZE")] + pub block_gossip_size: NonZeroU32, #[config(default = "defaults::network::BLOCK_GOSSIP_PERIOD.into()")] pub block_gossip_period_ms: DurationMs, - #[config(default = "defaults::network::TRANSACTION_GOSSIP_MAX_SIZE")] - pub transaction_gossip_max_size: NonZeroU32, + #[config(default = "defaults::network::TRANSACTION_GOSSIP_SIZE")] + pub transaction_gossip_size: NonZeroU32, #[config(default = "defaults::network::TRANSACTION_GOSSIP_PERIOD.into()")] pub transaction_gossip_period_ms: DurationMs, /// Duration of time after which connection with peer is terminated if peer is idle @@ -295,9 +288,9 @@ impl Network { ) { let Self { address, - block_gossip_max_size, + block_gossip_size, block_gossip_period_ms: block_gossip_period, - transaction_gossip_max_size, + transaction_gossip_size, transaction_gossip_period_ms: transaction_gossip_period, idle_timeout_ms: idle_timeout, } = self; @@ -309,11 +302,11 @@ impl Network { }, actual::BlockSync { gossip_period: block_gossip_period.get(), - gossip_max_size: block_gossip_max_size, + gossip_size: block_gossip_size, }, actual::TransactionGossiper { gossip_period: transaction_gossip_period.get(), - gossip_max_size: transaction_gossip_max_size, + gossip_size: transaction_gossip_size, }, ) } @@ -433,81 +426,6 @@ pub struct Snapshot { pub store_dir: WithOrigin, } -// TODO: make serde -#[derive(Debug, Copy, Clone, ReadConfig)] -pub struct ChainWide { - #[config(default = "defaults::chain_wide::MAX_TXS")] - pub max_transactions_in_block: NonZeroU32, - #[config(default = "defaults::chain_wide::BLOCK_TIME.into()")] - pub block_time_ms: DurationMs, - #[config(default = "defaults::chain_wide::COMMIT_TIME.into()")] - pub commit_time_ms: DurationMs, - #[config(default = "defaults::chain_wide::TRANSACTION_LIMITS")] - pub transaction_limits: TransactionLimits, - #[config(default = "defaults::chain_wide::METADATA_LIMITS")] - pub domain_metadata_limits: MetadataLimits, - #[config(default = "defaults::chain_wide::METADATA_LIMITS")] - pub asset_definition_metadata_limits: MetadataLimits, - #[config(default = "defaults::chain_wide::METADATA_LIMITS")] - pub account_metadata_limits: MetadataLimits, - #[config(default = "defaults::chain_wide::METADATA_LIMITS")] - pub asset_metadata_limits: MetadataLimits, - #[config(default = "defaults::chain_wide::METADATA_LIMITS")] - pub trigger_metadata_limits: MetadataLimits, - #[config(default = "defaults::chain_wide::IDENT_LENGTH_LIMITS")] - pub ident_length_limits: LengthLimits, - #[config(default = "defaults::chain_wide::WASM_FUEL_LIMIT")] - pub executor_fuel_limit: u64, - #[config(default = "defaults::chain_wide::WASM_MAX_MEMORY")] - pub executor_max_memory: Bytes, - #[config(default = "defaults::chain_wide::WASM_FUEL_LIMIT")] - pub wasm_fuel_limit: u64, - #[config(default = "defaults::chain_wide::WASM_MAX_MEMORY")] - pub wasm_max_memory: Bytes, -} - -impl ChainWide { - fn parse(self) -> actual::ChainWide { - let Self { - max_transactions_in_block, - block_time_ms: DurationMs(block_time), - commit_time_ms: DurationMs(commit_time), - transaction_limits, - asset_metadata_limits, - trigger_metadata_limits, - asset_definition_metadata_limits, - account_metadata_limits, - domain_metadata_limits, - ident_length_limits, - executor_fuel_limit, - executor_max_memory, - wasm_fuel_limit, - wasm_max_memory, - } = self; - - actual::ChainWide { - max_transactions_in_block, - block_time, - commit_time, - transaction_limits, - asset_metadata_limits, - trigger_metadata_limits, - asset_definition_metadata_limits, - account_metadata_limits, - domain_metadata_limits, - ident_length_limits, - executor_runtime: actual::WasmRuntime { - fuel_limit: executor_fuel_limit, - max_memory: executor_max_memory, - }, - wasm_runtime: actual::WasmRuntime { - fuel_limit: wasm_fuel_limit, - max_memory: wasm_max_memory, - }, - } - } -} - #[derive(Debug, ReadConfig)] pub struct Torii { #[config(env = "API_ADDRESS")] diff --git a/config/tests/fixtures.rs b/config/tests/fixtures.rs index ae29cb5ba15..411b1460fd8 100644 --- a/config/tests/fixtures.rs +++ b/config/tests/fixtures.rs @@ -162,11 +162,11 @@ fn minimal_config_snapshot() { }, block_sync: BlockSync { gossip_period: 10s, - gossip_max_size: 4, + gossip_size: 4, }, transaction_gossiper: TransactionGossiper { gossip_period: 1s, - gossip_max_size: 500, + gossip_size: 500, }, live_query_store: LiveQueryStore { idle_time: 30s, @@ -197,51 +197,6 @@ fn minimal_config_snapshot() { dev_telemetry: DevTelemetry { out_file: None, }, - chain_wide: ChainWide { - max_transactions_in_block: 512, - block_time: 2s, - commit_time: 4s, - transaction_limits: TransactionLimits { - max_instruction_number: 4096, - max_wasm_size_bytes: 4194304, - }, - domain_metadata_limits: Limits { - capacity: 1048576, - max_entry_len: 4096, - }, - asset_definition_metadata_limits: Limits { - capacity: 1048576, - max_entry_len: 4096, - }, - account_metadata_limits: Limits { - capacity: 1048576, - max_entry_len: 4096, - }, - asset_metadata_limits: Limits { - capacity: 1048576, - max_entry_len: 4096, - }, - trigger_metadata_limits: Limits { - capacity: 1048576, - max_entry_len: 4096, - }, - ident_length_limits: LengthLimits { - min: 1, - max: 128, - }, - executor_runtime: WasmRuntime { - fuel_limit: 55000000, - max_memory: Bytes( - 524288000, - ), - }, - wasm_runtime: WasmRuntime { - fuel_limit: 55000000, - max_memory: Bytes( - 524288000, - ), - }, - }, }"#]].assert_eq(&format!("{config:#?}")); } diff --git a/config/tests/fixtures/full.toml b/config/tests/fixtures/full.toml index 02404b6cf3d..22aa92459af 100644 --- a/config/tests/fixtures/full.toml +++ b/config/tests/fixtures/full.toml @@ -11,9 +11,9 @@ signed_file = "genesis.signed.scale" [network] address = "localhost:3840" block_gossip_period_ms = 10_000 -block_gossip_max_size = 4 +block_gossip_size = 4 transaction_gossip_period_ms = 1_000 -transaction_gossip_max_size = 500 +transaction_gossip_size = 500 idle_timeout_ms = 10_000 [torii] @@ -40,8 +40,8 @@ level = "TRACE" format = "compact" [queue] -capacity = 65536 -capacity_per_user = 65536 +capacity = 65_536 +capacity_per_user = 65_536 transaction_time_to_live_ms = 100 future_threshold_ms = 50 @@ -58,16 +58,3 @@ max_retry_delay_exponent = 4 [dev_telemetry] out_file = "./dev_telemetry.json" - -[chain_wide] -max_transactions_in_block = 512 -block_time_ms = 2_000 -commit_time_ms = 4_000 -transaction_limits = { max_instruction_number = 4096, max_wasm_size_bytes = 4194304 } -asset_metadata_limits = { capacity = 1048576, max_entry_len = 4096 } -asset_definition_metadata_limits = { capacity = 1048576, max_entry_len = 4096 } -account_metadata_limits = { capacity = 1048576, max_entry_len = 4096 } -domain_metadata_limits = { capacity = 1048576, max_entry_len = 4096 } -ident_length_limits = { min = 1, max = 128 } -wasm_fuel_limit = 55000000 -wasm_max_memory = 524288000 diff --git a/configs/peer.template.toml b/configs/peer.template.toml index 2c32102420d..0dcfb679abb 100644 --- a/configs/peer.template.toml +++ b/configs/peer.template.toml @@ -20,9 +20,9 @@ [network] # address = # block_gossip_period_ms = 10_000 -# block_gossip_max_size = 4 +# block_gossip_size = 4 # transaction_gossip_period_ms = 1_000 -# transaction_gossip_max_size = 500 +# transaction_gossip_size = 500 # idle_timeout_ms = 60_000 [torii] diff --git a/configs/swarm/executor.wasm b/configs/swarm/executor.wasm index 83ca0b9350b..8cf4a4e0d8c 100644 Binary files a/configs/swarm/executor.wasm and b/configs/swarm/executor.wasm differ diff --git a/configs/swarm/genesis.json b/configs/swarm/genesis.json index 432cb2ce7b2..c952e7dd08a 100644 --- a/configs/swarm/genesis.json +++ b/configs/swarm/genesis.json @@ -117,48 +117,6 @@ } } }, - { - "NewParameter": "?MaxTransactionsInBlock=512" - }, - { - "NewParameter": "?BlockTime=2000" - }, - { - "NewParameter": "?CommitTimeLimit=4000" - }, - { - "NewParameter": "?TransactionLimits=4096,4194304_TL" - }, - { - "NewParameter": "?WSVDomainMetadataLimits=1048576,4096_ML" - }, - { - "NewParameter": "?WSVAssetDefinitionMetadataLimits=1048576,4096_ML" - }, - { - "NewParameter": "?WSVAccountMetadataLimits=1048576,4096_ML" - }, - { - "NewParameter": "?WSVAssetMetadataLimits=1048576,4096_ML" - }, - { - "NewParameter": "?WSVTriggerMetadataLimits=1048576,4096_ML" - }, - { - "NewParameter": "?WSVIdentLengthLimits=1,128_LL" - }, - { - "NewParameter": "?ExecutorFuelLimit=55000000" - }, - { - "NewParameter": "?ExecutorMaxMemory=524288000" - }, - { - "NewParameter": "?WASMFuelLimit=55000000" - }, - { - "NewParameter": "?WASMMaxMemory=524288000" - }, { "Register": { "Role": { diff --git a/core/benches/blocks/common.rs b/core/benches/blocks/common.rs index ee3c08b88c8..6ec0bcf3055 100644 --- a/core/benches/blocks/common.rs +++ b/core/benches/blocks/common.rs @@ -1,4 +1,4 @@ -use std::str::FromStr as _; +use std::{num::NonZeroU64, str::FromStr as _}; use iroha_core::{ block::{BlockBuilder, CommittedBlock}, @@ -13,8 +13,8 @@ use iroha_data_model::{ asset::{AssetDefinition, AssetDefinitionId}, domain::Domain, isi::InstructionBox, + parameter::TransactionParameters, prelude::*, - transaction::TransactionLimits, ChainId, }; use iroha_primitives::{json::JsonString, unique_vec::UniqueVec}; @@ -34,7 +34,7 @@ pub fn create_block( let transaction = TransactionBuilder::new(chain_id.clone(), account_id) .with_instructions(instructions) .sign(account_private_key); - let limits = state.transaction_executor().transaction_limits; + let limits = state.transaction_executor().limits; let block = BlockBuilder::new( vec![AcceptedTransaction::accept(transaction, &chain_id, limits).unwrap()], @@ -197,9 +197,10 @@ pub fn build_state(rt: &tokio::runtime::Handle, account_id: &AccountId) -> State { let mut state_block = state.block(); - state_block.config.transaction_limits = TransactionLimits::new(u64::MAX, u64::MAX); - state_block.config.executor_runtime.fuel_limit = u64::MAX; - state_block.config.executor_runtime.max_memory = u32::MAX.into(); + state_block.world.parameters.transaction = + TransactionParameters::new(NonZeroU64::MAX, NonZeroU64::MAX); + state_block.world.parameters.executor.fuel = NonZeroU64::MAX; + state_block.world.parameters.executor.memory = NonZeroU64::MAX; let mut state_transaction = state_block.transaction(); let path_to_executor = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) diff --git a/core/benches/kura.rs b/core/benches/kura.rs index bc3b55a49e6..ad00f123533 100644 --- a/core/benches/kura.rs +++ b/core/benches/kura.rs @@ -14,7 +14,8 @@ use iroha_core::{ sumeragi::network_topology::Topology, }; use iroha_crypto::KeyPair; -use iroha_data_model::{prelude::*, transaction::TransactionLimits}; +use iroha_data_model::{parameter::TransactionParameters, prelude::*}; +use nonzero_ext::nonzero; use test_samples::gen_account_in; use tokio::{fs, runtime::Runtime}; @@ -29,11 +30,11 @@ async fn measure_block_size_for_n_executors(n_executors: u32) { let tx = TransactionBuilder::new(chain_id.clone(), alice_id.clone()) .with_instructions([transfer]) .sign(alice_keypair.private_key()); - let transaction_limits = TransactionLimits { - max_instruction_number: 4096, - max_wasm_size_bytes: 0, + let txn_limits = TransactionParameters { + max_instructions: nonzero!(4096_u64), + smart_contract_size: nonzero!(1_u64), }; - let tx = AcceptedTransaction::accept(tx, &chain_id, transaction_limits) + let tx = AcceptedTransaction::accept(tx, &chain_id, txn_limits) .expect("Failed to accept Transaction."); let dir = tempfile::tempdir().expect("Could not create tempfile."); let cfg = Config { diff --git a/core/benches/validation.rs b/core/benches/validation.rs index 4814d0dd2ca..91feaa7a9da 100644 --- a/core/benches/validation.rs +++ b/core/benches/validation.rs @@ -11,12 +11,11 @@ use iroha_core::{ tx::TransactionExecutor, }; use iroha_data_model::{ - account::AccountId, - isi::InstructionBox, - prelude::*, - transaction::{TransactionBuilder, TransactionLimits}, + account::AccountId, isi::InstructionBox, parameter::TransactionParameters, prelude::*, + transaction::TransactionBuilder, }; use iroha_primitives::unique_vec::UniqueVec; +use nonzero_ext::nonzero; use once_cell::sync::Lazy; use test_samples::gen_account_in; @@ -25,10 +24,8 @@ static STARTER_KEYPAIR: Lazy = Lazy::new(KeyPair::random); static STARTER_ID: Lazy = Lazy::new(|| AccountId::new(STARTER_DOMAIN.clone(), STARTER_KEYPAIR.public_key().clone())); -const TRANSACTION_LIMITS: TransactionLimits = TransactionLimits { - max_instruction_number: 4096, - max_wasm_size_bytes: 0, -}; +const TRANSACTION_LIMITS: TransactionParameters = + TransactionParameters::new(nonzero!(4096_u64), nonzero!(1_u64)); fn build_test_transaction(chain_id: ChainId) -> TransactionBuilder { let domain_id: DomainId = "domain".parse().unwrap(); diff --git a/core/src/block.rs b/core/src/block.rs index 7efaba05a69..179b5d6024c 100644 --- a/core/src/block.rs +++ b/core/src/block.rs @@ -107,7 +107,10 @@ pub enum InvalidGenesisError { pub struct BlockBuilder(B); mod pending { - use std::time::{Duration, SystemTime}; + use std::{ + num::NonZeroUsize, + time::{Duration, SystemTime}, + }; use iroha_data_model::transaction::CommittedTransaction; @@ -156,11 +159,14 @@ mod pending { consensus_estimation: Duration, ) -> BlockHeader { BlockHeader { - height: prev_height - .checked_add(1) - .expect("INTERNAL BUG: Blockchain height exceeds usize::MAX") - .try_into() - .expect("INTERNAL BUG: Number of blocks exceeds u64::MAX"), + height: NonZeroUsize::new( + prev_height + .checked_add(1) + .expect("INTERNAL BUG: Blockchain height exceeds usize::MAX"), + ) + .expect("INTERNAL BUG: block height must not be 0") + .try_into() + .expect("INTERNAL BUG: Number of blocks exceeds u64::MAX"), prev_block_hash, transactions_hash: transactions .iter() @@ -228,7 +234,7 @@ mod pending { state.latest_block_hash(), view_change_index, &transactions, - state.config.consensus_estimation(), + state.world.parameters().sumeragi.consensus_estimation(), ), transactions, commit_topology: self.0.commit_topology.into_iter().collect(), @@ -414,10 +420,14 @@ mod valid { genesis_account: &AccountId, state_block: &mut StateBlock<'_>, ) -> WithEvents> { - let expected_block_height = state_block.height() + 1; + let expected_block_height = state_block + .height() + .checked_add(1) + .expect("INTERNAL BUG: Block height exceeds usize::MAX"); let actual_height = block .header() .height + .get() .try_into() .expect("INTERNAL BUG: Block height exceeds usize::MAX"); @@ -516,7 +526,7 @@ mod valid { AcceptedTransaction::accept( value, expected_chain_id, - transaction_executor.transaction_limits, + transaction_executor.limits, ) }?; @@ -638,9 +648,11 @@ mod valid { leader_private_key: &PrivateKey, f: impl FnOnce(&mut BlockPayload), ) -> Self { + use nonzero_ext::nonzero; + let mut payload = BlockPayload { header: BlockHeader { - height: 2, + height: nonzero!(2_u64), prev_block_hash: None, transactions_hash: HashOf::from_untyped_unchecked(Hash::prehashed( [1; Hash::LENGTH], @@ -997,7 +1009,7 @@ mod tests { Register::asset_definition(AssetDefinition::numeric(asset_definition_id)); // Making two transactions that have the same instruction - let transaction_limits = state_block.transaction_executor().transaction_limits; + let transaction_limits = state_block.transaction_executor().limits; let tx = TransactionBuilder::new(chain_id.clone(), alice_id) .with_instructions([create_asset_definition]) .sign(alice_keypair.private_key()); @@ -1053,7 +1065,7 @@ mod tests { Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); // Making two transactions that have the same instruction - let transaction_limits = state_block.transaction_executor().transaction_limits; + let transaction_limits = state_block.transaction_executor().limits; let tx = TransactionBuilder::new(chain_id.clone(), alice_id.clone()) .with_instructions([create_asset_definition]) .sign(alice_keypair.private_key()); @@ -1120,7 +1132,7 @@ mod tests { let query_handle = LiveQueryStore::test().start(); let state = State::new(world, kura, query_handle); let mut state_block = state.block(); - let transaction_limits = state_block.transaction_executor().transaction_limits; + let transaction_limits = state_block.transaction_executor().limits; let domain_id = DomainId::from_str("domain").expect("Valid"); let create_domain = Register::domain(Domain::new(domain_id)); diff --git a/core/src/block_sync.rs b/core/src/block_sync.rs index 85dd12ff289..99553729f92 100644 --- a/core/src/block_sync.rs +++ b/core/src/block_sync.rs @@ -41,7 +41,7 @@ pub struct BlockSynchronizer { kura: Arc, peer_id: PeerId, gossip_period: Duration, - gossip_max_size: NonZeroU32, + gossip_size: NonZeroU32, network: IrohaNetwork, state: Arc, } @@ -118,7 +118,7 @@ impl BlockSynchronizer { sumeragi, kura, gossip_period: config.gossip_period, - gossip_max_size: config.gossip_max_size, + gossip_size: config.gossip_size, network, state, } @@ -219,7 +219,7 @@ pub mod message { }; let blocks = (start_height.get()..) - .take(block_sync.gossip_max_size.get() as usize + 1) + .take(block_sync.gossip_size.get() as usize + 1) .map_while(|height| { NonZeroUsize::new(height) .and_then(|height| block_sync.kura.get_block_by_height(height)) diff --git a/core/src/executor.rs b/core/src/executor.rs index 28c2ff9f7a0..ab0bd0b2b12 100644 --- a/core/src/executor.rs +++ b/core/src/executor.rs @@ -18,6 +18,7 @@ use serde::{ use crate::{ smartcontracts::{wasm, Execute as _}, state::{deserialize::WasmSeed, StateReadOnly, StateTransaction}, + WorldReadOnly as _, }; impl From for ValidationFail { @@ -151,7 +152,7 @@ impl Executor { let runtime = wasm::RuntimeBuilder::::new() .with_engine(state_transaction.engine.clone()) // Cloning engine is cheap, see [`wasmtime::Engine`] docs - .with_config(state_transaction.config.executor_runtime) + .with_config(state_transaction.world.parameters().executor) .build()?; runtime.execute_executor_validate_transaction( @@ -187,7 +188,7 @@ impl Executor { let runtime = wasm::RuntimeBuilder::::new() .with_engine(state_transaction.engine.clone()) // Cloning engine is cheap, see [`wasmtime::Engine`] docs - .with_config(state_transaction.config.executor_runtime) + .with_config(state_transaction.world.parameters().executor) .build()?; runtime.execute_executor_validate_instruction( @@ -221,7 +222,7 @@ impl Executor { let runtime = wasm::RuntimeBuilder::>::new() .with_engine(state_ro.engine().clone()) // Cloning engine is cheap, see [`wasmtime::Engine`] docs - .with_config(state_ro.config().executor_runtime) + .with_config(state_ro.world().parameters().executor) .build()?; runtime.execute_executor_validate_query( @@ -256,7 +257,7 @@ impl Executor { let runtime = wasm::RuntimeBuilder::::new() .with_engine(state_transaction.engine.clone()) // Cloning engine is cheap, see [`wasmtime::Engine`] docs - .with_config(state_transaction.config.executor_runtime) + .with_config(state_transaction.world().parameters().executor) .build()?; runtime diff --git a/core/src/gossiper.rs b/core/src/gossiper.rs index 4f5018aa9f6..4a08606108e 100644 --- a/core/src/gossiper.rs +++ b/core/src/gossiper.rs @@ -8,7 +8,10 @@ use iroha_p2p::Broadcast; use parity_scale_codec::{Decode, Encode}; use tokio::sync::mpsc; -use crate::{queue::Queue, state::State, tx::AcceptedTransaction, IrohaNetwork, NetworkMessage}; +use crate::{ + queue::Queue, state::State, tx::AcceptedTransaction, IrohaNetwork, NetworkMessage, + StateReadOnly, WorldReadOnly, +}; /// [`Gossiper`] actor handle. #[derive(Clone)] @@ -26,21 +29,18 @@ impl TransactionGossiperHandle { } } -/// Actor to gossip transactions and receive transaction gossips +/// Actor which gossips transactions and receives transaction gossips pub struct TransactionGossiper { /// Unique id of the blockchain. Used for simple replay attack protection. chain_id: ChainId, - /// The size of batch that is being gossiped. Smaller size leads - /// to longer time to synchronise, useful if you have high packet loss. - gossip_max_size: NonZeroU32, - /// The time between gossiping. More frequent gossiping shortens + /// The time between gossip messages. More frequent gossiping shortens /// the time to sync, but can overload the network. gossip_period: Duration, - /// Address of queue - queue: Arc, - /// [`iroha_p2p::Network`] actor handle + /// Maximum size of a batch that is being gossiped. Smaller size leads + /// to longer time to synchronise, useful if you have high packet loss. + gossip_size: NonZeroU32, network: IrohaNetwork, - /// [`WorldState`] + queue: Arc, state: Arc, } @@ -57,7 +57,7 @@ impl TransactionGossiper { chain_id: ChainId, Config { gossip_period, - gossip_max_size, + gossip_size, }: Config, network: IrohaNetwork, queue: Arc, @@ -65,10 +65,10 @@ impl TransactionGossiper { ) -> Self { Self { chain_id, - gossip_max_size, gossip_period, - queue, + gossip_size, network, + queue, state, } } @@ -93,7 +93,7 @@ impl TransactionGossiper { fn gossip_transactions(&self) { let txs = self .queue - .n_random_transactions(self.gossip_max_size.get(), &self.state.view()); + .n_random_transactions(self.gossip_size.get(), &self.state.view()); if txs.is_empty() { return; @@ -110,7 +110,7 @@ impl TransactionGossiper { let state_view = self.state.view(); for tx in txs { - let transaction_limits = state_view.config.transaction_limits; + let transaction_limits = state_view.world().parameters().transaction; match AcceptedTransaction::accept(tx, &self.chain_id, transaction_limits) { Ok(tx) => match self.queue.push(tx, &state_view) { diff --git a/core/src/lib.rs b/core/src/lib.rs index f1104d47163..0d25bf98ca7 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -18,7 +18,6 @@ pub mod tx; use core::time::Duration; use gossiper::TransactionGossip; -use indexmap::IndexSet; use iroha_data_model::{events::EventBox, prelude::*}; use iroha_primitives::unique_vec::UniqueVec; use parity_scale_codec::{Decode, Encode}; @@ -39,9 +38,6 @@ pub type IrohaNetwork = iroha_p2p::NetworkHandle; /// Ids of peers. pub type PeersIds = UniqueVec; -/// Parameters set. -pub type Parameters = IndexSet; - /// Type of `Sender` which should be used for channels of `Event` messages. pub type EventsSender = broadcast::Sender; diff --git a/core/src/queue.rs b/core/src/queue.rs index ebea3f56634..d3ac864685b 100644 --- a/core/src/queue.rs +++ b/core/src/queue.rs @@ -284,9 +284,9 @@ impl Queue { fn collect_transactions_for_block( &self, state_view: &StateView, - max_txs_in_block: usize, + max_txs_in_block: NonZeroUsize, ) -> Vec { - let mut transactions = Vec::with_capacity(max_txs_in_block); + let mut transactions = Vec::with_capacity(max_txs_in_block.get()); self.get_transactions_for_block(state_view, max_txs_in_block, &mut transactions); transactions } @@ -297,10 +297,10 @@ impl Queue { pub fn get_transactions_for_block( &self, state_view: &StateView, - max_txs_in_block: usize, + max_txs_in_block: NonZeroUsize, transactions: &mut Vec, ) { - if transactions.len() >= max_txs_in_block { + if transactions.len() >= max_txs_in_block.get() { return; } @@ -315,7 +315,7 @@ impl Queue { transactions.iter().map(|tx| tx.as_ref().hash()).collect(); let txs = txs_from_queue .filter(|tx| !transactions_hashes.contains(&tx.as_ref().hash())) - .take(max_txs_in_block - transactions.len()); + .take(max_txs_in_block.get() - transactions.len()); transactions.extend(txs); seen_queue @@ -377,7 +377,7 @@ impl Queue { pub mod tests { use std::{str::FromStr, sync::Arc, thread, time::Duration}; - use iroha_data_model::{prelude::*, transaction::TransactionLimits}; + use iroha_data_model::{parameter::TransactionParameters, prelude::*}; use nonzero_ext::nonzero; use rand::Rng as _; use test_samples::gen_account_in; @@ -425,9 +425,9 @@ pub mod tests { TransactionBuilder::new_with_time_source(chain_id.clone(), account_id, time_source) .with_instructions(instructions) .sign(key_pair.private_key()); - let limits = TransactionLimits { - max_instruction_number: 4096, - max_wasm_size_bytes: 0, + let limits = TransactionParameters { + max_instructions: nonzero!(4096_u64), + smart_contract_size: nonzero!(1024_u64), }; AcceptedTransaction::accept(tx, &chain_id, limits).expect("Failed to accept Transaction.") } @@ -502,7 +502,7 @@ pub mod tests { #[test] async fn get_available_txs() { - let max_txs_in_block = 2; + let max_txs_in_block = nonzero!(2_usize); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); let state = Arc::new(State::new(world_with_test_domains(), kura, query_handle)); @@ -525,7 +525,7 @@ pub mod tests { } let available = queue.collect_transactions_for_block(&state_view, max_txs_in_block); - assert_eq!(available.len(), max_txs_in_block); + assert_eq!(available.len(), max_txs_in_block.get()); } #[test] @@ -536,7 +536,9 @@ pub mod tests { let (_time_handle, time_source) = TimeSource::new_mock(Duration::default()); let tx = accepted_tx_by_someone(&time_source); let mut state_block = state.block(); - state_block.transactions.insert(tx.as_ref().hash(), 1); + state_block + .transactions + .insert(tx.as_ref().hash(), nonzero!(1_usize)); state_block.commit(); let state_view = state.view(); let queue = Queue::test(config_factory(), &time_source); @@ -552,7 +554,7 @@ pub mod tests { #[test] async fn get_tx_drop_if_in_blockchain() { - let max_txs_in_block = 2; + let max_txs_in_block = nonzero!(2_usize); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); let state = State::new(world_with_test_domains(), kura, query_handle); @@ -561,7 +563,9 @@ pub mod tests { let queue = Queue::test(config_factory(), &time_source); queue.push(tx.clone(), &state.view()).unwrap(); let mut state_block = state.block(); - state_block.transactions.insert(tx.as_ref().hash(), 1); + state_block + .transactions + .insert(tx.as_ref().hash(), nonzero!(1_usize)); state_block.commit(); assert_eq!( queue @@ -574,7 +578,7 @@ pub mod tests { #[test] async fn get_available_txs_with_timeout() { - let max_txs_in_block = 6; + let max_txs_in_block = nonzero!(6_usize); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); let state = Arc::new(State::new(world_with_test_domains(), kura, query_handle)); @@ -589,7 +593,7 @@ pub mod tests { }, &time_source, ); - for _ in 0..(max_txs_in_block - 1) { + for _ in 0..(max_txs_in_block.get() - 1) { queue .push(accepted_tx_by_someone(&time_source), &state_view) .expect("Failed to push tx into queue"); @@ -623,7 +627,7 @@ pub mod tests { // Others should stay in the queue until that moment. #[test] async fn transactions_available_after_pop() { - let max_txs_in_block = 2; + let max_txs_in_block = nonzero!(2_usize); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); let state = Arc::new(State::new(world_with_test_domains(), kura, query_handle)); @@ -656,7 +660,7 @@ pub mod tests { let chain_id = ChainId::from("00000000-0000-0000-0000-000000000000"); - let max_txs_in_block = 2; + let max_txs_in_block = nonzero!(2_usize); let (alice_id, alice_keypair) = gen_account_in("wonderland"); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); @@ -674,9 +678,9 @@ pub mod tests { .with_instructions(instructions); tx.set_ttl(Duration::from_millis(TTL_MS)); let tx = tx.sign(alice_keypair.private_key()); - let limits = TransactionLimits { - max_instruction_number: 4096, - max_wasm_size_bytes: 0, + let limits = TransactionParameters { + max_instructions: nonzero!(4096_u64), + smart_contract_size: nonzero!(1024_u64), }; let tx_hash = tx.hash(); let tx = AcceptedTransaction::accept(tx, &chain_id, limits) @@ -715,7 +719,7 @@ pub mod tests { #[test] async fn concurrent_stress_test() { - let max_txs_in_block = 10; + let max_txs_in_block = nonzero!(10_usize); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); let state = Arc::new(State::new(world_with_test_domains(), kura, query_handle)); @@ -763,7 +767,9 @@ pub mod tests { for tx in queue.collect_transactions_for_block(&state.view(), max_txs_in_block) { let mut state_block = state.block(); - state_block.transactions.insert(tx.as_ref().hash(), 1); + state_block + .transactions + .insert(tx.as_ref().hash(), nonzero!(1_usize)); state_block.commit(); } // Simulate random small delays @@ -881,18 +887,18 @@ pub mod tests { ) .expect("Failed to push tx into queue"); - let transactions = queue.collect_transactions_for_block(&state.view(), 10); + let transactions = queue.collect_transactions_for_block(&state.view(), nonzero!(10_usize)); assert_eq!(transactions.len(), 2); let mut state_block = state.block(); for transaction in transactions { // Put transaction hashes into state as if they were in the blockchain state_block .transactions - .insert(transaction.as_ref().hash(), 1); + .insert(transaction.as_ref().hash(), nonzero!(1_usize)); } state_block.commit(); // Cleanup transactions - let transactions = queue.collect_transactions_for_block(&state.view(), 10); + let transactions = queue.collect_transactions_for_block(&state.view(), nonzero!(10_usize)); assert!(transactions.is_empty()); // After cleanup Alice and Bob pushes should work fine diff --git a/core/src/smartcontracts/isi/account.rs b/core/src/smartcontracts/isi/account.rs index 46870b8254a..4278dd67229 100644 --- a/core/src/smartcontracts/isi/account.rs +++ b/core/src/smartcontracts/isi/account.rs @@ -174,21 +174,14 @@ pub mod isi { ) -> Result<(), Error> { let account_id = self.object; - let account_metadata_limits = state_transaction.config.account_metadata_limits; - state_transaction .world .account_mut(&account_id) .map_err(Error::from) - .and_then(|account| { + .map(|account| { account .metadata - .insert_with_limits( - self.key.clone(), - self.value.clone(), - account_metadata_limits, - ) - .map_err(Error::from) + .insert(self.key.clone(), self.value.clone()) })?; state_transaction diff --git a/core/src/smartcontracts/isi/asset.rs b/core/src/smartcontracts/isi/asset.rs index 18cdedfae47..d3221810474 100644 --- a/core/src/smartcontracts/isi/asset.rs +++ b/core/src/smartcontracts/isi/asset.rs @@ -62,21 +62,16 @@ pub mod isi { .increase_asset_total_amount(&asset_id.definition, Numeric::ONE)?; } - let asset_metadata_limits = state_transaction.config.asset_metadata_limits; let asset = state_transaction .world - .asset_or_insert(asset_id.clone(), Metadata::new())?; + .asset_or_insert(asset_id.clone(), Metadata::default())?; { let AssetValue::Store(store) = &mut asset.value else { return Err(Error::Conversion("Expected store asset type".to_owned())); }; - store.insert_with_limits( - self.key.clone(), - self.value.clone(), - asset_metadata_limits, - )?; + store.insert(self.key.clone(), self.value.clone()); } state_transaction diff --git a/core/src/smartcontracts/isi/domain.rs b/core/src/smartcontracts/isi/domain.rs index b1752581779..81d80232226 100644 --- a/core/src/smartcontracts/isi/domain.rs +++ b/core/src/smartcontracts/isi/domain.rs @@ -134,11 +134,6 @@ pub mod isi { state_transaction: &mut StateTransaction<'_, '_>, ) -> Result<(), Error> { let asset_definition = self.object.build(authority); - asset_definition - .id() - .name - .validate_len(state_transaction.config.ident_length_limits) - .map_err(Error::from)?; let asset_definition_id = asset_definition.id().clone(); let domain = state_transaction @@ -237,16 +232,14 @@ pub mod isi { ) -> Result<(), Error> { let asset_definition_id = self.object; - let metadata_limits = state_transaction.config.asset_definition_metadata_limits; state_transaction .world .asset_definition_mut(&asset_definition_id) .map_err(Error::from) - .and_then(|asset_definition| { + .map(|asset_definition| { asset_definition .metadata - .insert_with_limits(self.key.clone(), self.value.clone(), metadata_limits) - .map_err(Error::from) + .insert(self.key.clone(), self.value.clone()) })?; state_transaction @@ -305,12 +298,8 @@ pub mod isi { ) -> Result<(), Error> { let domain_id = self.object; - let limits = state_transaction.config.domain_metadata_limits; - let domain = state_transaction.world.domain_mut(&domain_id)?; - domain - .metadata - .insert_with_limits(self.key.clone(), self.value.clone(), limits)?; + domain.metadata.insert(self.key.clone(), self.value.clone()); state_transaction .world diff --git a/core/src/smartcontracts/isi/mod.rs b/core/src/smartcontracts/isi/mod.rs index deb2c4d0657..156473d0a7d 100644 --- a/core/src/smartcontracts/isi/mod.rs +++ b/core/src/smartcontracts/isi/mod.rs @@ -54,7 +54,6 @@ impl Execute for InstructionBox { Self::Revoke(isi) => isi.execute(authority, state_transaction), Self::ExecuteTrigger(isi) => isi.execute(authority, state_transaction), Self::SetParameter(isi) => isi.execute(authority, state_transaction), - Self::NewParameter(isi) => isi.execute(authority, state_transaction), Self::Upgrade(isi) => isi.execute(authority, state_transaction), Self::Log(isi) => isi.execute(authority, state_transaction), Self::Custom(_) => { @@ -442,7 +441,7 @@ mod tests { let tx = TransactionBuilder::new(chain_id.clone(), SAMPLE_GENESIS_ACCOUNT_ID.clone()) .with_instructions(instructions) .sign(SAMPLE_GENESIS_ACCOUNT_KEYPAIR.private_key()); - let tx_limits = state_block.transaction_executor().transaction_limits; + let tx_limits = state_block.transaction_executor().limits; assert!(matches!( AcceptedTransaction::accept(tx, &chain_id, tx_limits), Err(AcceptTransactionFail::UnexpectedGenesisAccountSignature) diff --git a/core/src/smartcontracts/isi/query.rs b/core/src/smartcontracts/isi/query.rs index fcc0358ed8e..4702533ad8c 100644 --- a/core/src/smartcontracts/isi/query.rs +++ b/core/src/smartcontracts/isi/query.rs @@ -169,6 +169,7 @@ impl_lazy! { iroha_data_model::query::TransactionQueryOutput, iroha_data_model::executor::ExecutorDataModel, iroha_data_model::trigger::Trigger, + iroha_data_model::parameter::Parameters, } /// Query Request statefully validated on the Iroha node side. @@ -256,6 +257,7 @@ impl ValidQuery for QueryBox { FindAssetDefinitionKeyValueByIdAndKey, FindTriggerKeyValueByIdAndKey, FindExecutorDataModel, + FindAllParameters, } FindAllAccounts, @@ -281,7 +283,6 @@ impl ValidQuery for QueryBox { FindAllRoles, FindAllRoleIds, FindRolesByAccountId, - FindAllParameters, } } } @@ -291,8 +292,9 @@ mod tests { use std::str::FromStr as _; use iroha_crypto::{Hash, HashOf, KeyPair}; - use iroha_data_model::{query::error::FindError, transaction::TransactionLimits}; + use iroha_data_model::{parameter::TransactionParameters, query::error::FindError}; use iroha_primitives::json::JsonString; + use nonzero_ext::nonzero; use test_samples::{gen_account_in, ALICE_ID, ALICE_KEYPAIR}; use tokio::test; @@ -330,12 +332,11 @@ mod tests { ) .is_none()); - let mut store = Metadata::new(); + let mut store = Metadata::default(); store - .insert_with_limits( + .insert( Name::from_str("Bytes").expect("Valid"), vec![1_u32, 2_u32, 3_u32], - MetadataLimits::new(10, 100), ) .unwrap(); let asset_id = AssetId::new(asset_definition_id, account.id().clone()); @@ -346,12 +347,8 @@ mod tests { } fn world_with_test_account_with_metadata() -> Result { - let mut metadata = Metadata::new(); - metadata.insert_with_limits( - Name::from_str("Bytes")?, - vec![1_u32, 2_u32, 3_u32], - MetadataLimits::new(10, 100), - )?; + let mut metadata = Metadata::default(); + metadata.insert(Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32]); let mut domain = Domain::new(DomainId::from_str("wonderland")?).build(&ALICE_ID); let account = Account::new(ALICE_ID.clone()) @@ -376,16 +373,16 @@ mod tests { let state = State::new(world_with_test_domains(), kura.clone(), query_handle); { let mut state_block = state.block(); - let limits = TransactionLimits { - max_instruction_number: 1, - max_wasm_size_bytes: 0, + let limits = TransactionParameters { + max_instructions: nonzero!(1000_u64), + smart_contract_size: nonzero!(1024_u64), }; - let huge_limits = TransactionLimits { - max_instruction_number: 1000, - max_wasm_size_bytes: 0, + let huge_limits = TransactionParameters { + max_instructions: nonzero!(1000_u64), + smart_contract_size: nonzero!(1024_u64), }; - state_block.config.transaction_limits = limits; + state_block.world.parameters.transaction = limits; let valid_tx = { let instructions: [InstructionBox; 0] = []; @@ -554,7 +551,7 @@ mod tests { .with_instructions(instructions) .sign(ALICE_KEYPAIR.private_key()); - let tx_limits = state_block.transaction_executor().transaction_limits; + let tx_limits = state_block.transaction_executor().limits; let va_tx = AcceptedTransaction::accept(tx, &chain_id, tx_limits)?; let (peer_public_key, _) = KeyPair::random().into_parts(); @@ -599,12 +596,8 @@ mod tests { async fn domain_metadata() -> Result<()> { let kura = Kura::blank_kura_for_testing(); let state = { - let mut metadata = Metadata::new(); - metadata.insert_with_limits( - Name::from_str("Bytes")?, - vec![1_u32, 2_u32, 3_u32], - MetadataLimits::new(10, 100), - )?; + let mut metadata = Metadata::default(); + metadata.insert(Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32]); let mut domain = Domain::new(DomainId::from_str("wonderland")?) .with_metadata(metadata) .build(&ALICE_ID); diff --git a/core/src/smartcontracts/isi/triggers/mod.rs b/core/src/smartcontracts/isi/triggers/mod.rs index 57e82ed0994..03e15e11d27 100644 --- a/core/src/smartcontracts/isi/triggers/mod.rs +++ b/core/src/smartcontracts/isi/triggers/mod.rs @@ -213,18 +213,15 @@ pub mod isi { ) -> Result<(), Error> { let trigger_id = self.object; - let trigger_metadata_limits = state_transaction.config.account_metadata_limits; state_transaction .world .triggers .inspect_by_id_mut(&trigger_id, |action| { - action.metadata_mut().insert_with_limits( - self.key.clone(), - self.value.clone(), - trigger_metadata_limits, - ) + action + .metadata_mut() + .insert(self.key.clone(), self.value.clone()) }) - .ok_or(FindError::Trigger(trigger_id.clone()))??; + .ok_or(FindError::Trigger(trigger_id.clone()))?; state_transaction .world diff --git a/core/src/smartcontracts/isi/triggers/specialized.rs b/core/src/smartcontracts/isi/triggers/specialized.rs index 5409cf8a070..a3deafa5d83 100644 --- a/core/src/smartcontracts/isi/triggers/specialized.rs +++ b/core/src/smartcontracts/isi/triggers/specialized.rs @@ -45,7 +45,7 @@ impl SpecializedAction { // TODO: At this point the authority is meaningless. authority, filter, - metadata: Metadata::new(), + metadata: Metadata::default(), } } } diff --git a/core/src/smartcontracts/isi/world.rs b/core/src/smartcontracts/isi/world.rs index 5e26fb4e2bf..ff65fdee954 100644 --- a/core/src/smartcontracts/isi/world.rs +++ b/core/src/smartcontracts/isi/world.rs @@ -20,11 +20,12 @@ pub mod isi { use eyre::Result; use iroha_data_model::{ isi::error::{InstructionExecutionError, InvalidParameterError, RepetitionError}, + parameter::{CustomParameter, Parameter}, prelude::*, query::error::FindError, Level, }; - use iroha_primitives::unique_vec::PushResult; + use iroha_primitives::{json::JsonString, unique_vec::PushResult}; use super::*; @@ -84,11 +85,6 @@ pub mod isi { let domain: Domain = self.object.build(authority); let domain_id = domain.id().clone(); - domain_id - .name - .validate_len(state_transaction.config.ident_length_limits) - .map_err(Error::from)?; - if domain_id == *iroha_genesis::GENESIS_DOMAIN_ID { return Err(InstructionExecutionError::InvariantViolation( "Not allowed to register genesis domain".to_owned(), @@ -105,7 +101,6 @@ pub mod isi { } world.domains.insert(domain_id, domain.clone()); - world.emit_events(Some(DomainEvent::Created(domain))); Ok(()) @@ -306,43 +301,70 @@ pub mod isi { _authority: &AccountId, state_transaction: &mut StateTransaction<'_, '_>, ) -> Result<(), Error> { - let parameter = self.parameter; - let parameter_id = parameter.id.clone(); - - if !state_transaction.world.parameters.swap_remove(¶meter) { - return Err(FindError::Parameter(parameter_id).into()); + macro_rules! set_parameter { + ($($container:ident($param:ident.$field:ident) => $single:ident::$variant:ident),* $(,)?) => { + match self.0 { $( + Parameter::$container(iroha_data_model::parameter::$single::$variant(next)) => { + let prev = core::mem::replace( + &mut state_transaction.world.parameters.$param.$field, + next, + ); + + state_transaction.world.emit_events( + Some(ConfigurationEvent::Changed(ParameterChanged { + old_value: Parameter::$container(iroha_data_model::parameter::$single::$variant( + prev, + )), + new_value: Parameter::$container(iroha_data_model::parameter::$single::$variant( + next, + )), + })) + ); + })* + Parameter::Custom(next) => { + let prev = state_transaction + .world + .parameters + .custom + .insert(next.id.clone(), next.clone()) + .unwrap_or_else(|| { + iroha_logger::error!( + "{}: Initial parameter value not set during executor migration", + next.id + ); + + CustomParameter { + id: next.id.clone(), + payload: JsonString::default(), + } + }); + + state_transaction + .world + .emit_events(Some(ConfigurationEvent::Changed(ParameterChanged { + old_value: Parameter::Custom(prev), + new_value: Parameter::Custom(next), + }))); + } + } + }; } - state_transaction.world.parameters.insert(parameter.clone()); - state_transaction - .world - .emit_events(Some(ConfigurationEvent::Changed(parameter_id))); - state_transaction.try_apply_core_parameter(parameter); - Ok(()) - } - } + set_parameter!( + Sumeragi(sumeragi.block_time_ms) => SumeragiParameter::BlockTimeMs, + Sumeragi(sumeragi.commit_time_ms) => SumeragiParameter::CommitTimeMs, - impl Execute for NewParameter { - #[metrics(+"new_parameter")] - fn execute( - self, - _authority: &AccountId, - state_transaction: &mut StateTransaction<'_, '_>, - ) -> Result<(), Error> { - let parameter = self.parameter; - let parameter_id = parameter.id.clone(); + Block(block.max_transactions) => BlockParameter::MaxTransactions, - if !state_transaction.world.parameters.insert(parameter.clone()) { - return Err(RepetitionError { - instruction: InstructionType::NewParameter, - id: IdBox::ParameterId(parameter_id), - } - .into()); - } - state_transaction - .world - .emit_events(Some(ConfigurationEvent::Created(parameter_id))); - state_transaction.try_apply_core_parameter(parameter); + Transaction(transaction.max_instructions) => TransactionParameter::MaxInstructions, + Transaction(transaction.smart_contract_size) => TransactionParameter::SmartContractSize, + + SmartContract(smart_contract.fuel) => SmartContractParameter::Fuel, + SmartContract(smart_contract.memory) => SmartContractParameter::Memory, + + Executor(executor.fuel) => SmartContractParameter::Fuel, + Executor(executor.memory) => SmartContractParameter::Memory, + ); Ok(()) } @@ -406,7 +428,7 @@ pub mod isi { pub mod query { use eyre::Result; use iroha_data_model::{ - parameter::Parameter, + parameter::Parameters, peer::Peer, prelude::*, query::error::{FindError, QueryExecutionFail as Error}, @@ -484,11 +506,8 @@ pub mod query { impl ValidQuery for FindAllParameters { #[metrics("find_all_parameters")] - fn execute<'state>( - &self, - state_ro: &'state impl StateReadOnly, - ) -> Result + 'state>, Error> { - Ok(Box::new(state_ro.world().parameters_iter().cloned())) + fn execute(&self, state_ro: &impl StateReadOnly) -> Result { + Ok(state_ro.world().parameters().clone()) } } } diff --git a/core/src/smartcontracts/wasm.rs b/core/src/smartcontracts/wasm.rs index 64f385c038f..733deb3ccd2 100644 --- a/core/src/smartcontracts/wasm.rs +++ b/core/src/smartcontracts/wasm.rs @@ -2,15 +2,15 @@ //! `WebAssembly` VM Smartcontracts can be written in Rust, compiled //! to wasm format and submitted in a transaction -use std::borrow::Borrow; +use std::{borrow::Borrow, num::NonZeroU64}; use error::*; use import::traits::{ExecuteOperations as _, GetExecutorPayloads as _, SetDataModel as _}; -use iroha_config::parameters::actual::WasmRuntime as Config; use iroha_data_model::{ account::AccountId, executor::{self, ExecutorDataModel, MigrationResult}, isi::InstructionBox, + parameter::SmartContractParameters as Config, prelude::*, query::{cursor::QueryId, QueryBox, QueryOutputBox, QueryRequest, SmartContractQuery}, smart_contract::payloads::{self, Validate}, @@ -299,12 +299,12 @@ struct LimitsExecutor { /// Number of instructions in the smartcontract instruction_count: u64, /// Max allowed number of instructions in the smartcontract - max_instruction_count: u64, + max_instruction_count: NonZeroU64, } impl LimitsExecutor { /// Create new [`LimitsExecutor`] - pub fn new(max_instruction_count: u64) -> Self { + pub fn new(max_instruction_count: NonZeroU64) -> Self { Self { instruction_count: 0, max_instruction_count, @@ -320,7 +320,7 @@ impl LimitsExecutor { pub fn check_instruction_limits(&mut self) -> Result<(), ValidationFail> { self.instruction_count += 1; - if self.instruction_count > self.max_instruction_count { + if self.instruction_count > self.max_instruction_count.get() { return Err(ValidationFail::TooComplex); } @@ -344,8 +344,14 @@ pub mod state { /// Panics if failed to convert `u32` into `usize` which should not happen /// on any supported platform pub fn store_limits_from_config(config: &Config) -> StoreLimits { + let memory_size = config + .memory + .get() + .try_into() + .expect("`SmarContractParameters::memory` exceeds usize::MAX"); + StoreLimitsBuilder::new() - .memory_size(config.max_memory.get() as usize) + .memory_size(memory_size) .instances(1) .memories(1) .tables(1) @@ -738,7 +744,7 @@ impl Runtime> { store.limiter(|s| &mut s.store_limits); store - .set_fuel(self.config.fuel_limit) + .set_fuel(self.config.fuel.get()) .expect("Wasm Runtime config is malformed, this is a bug"); store @@ -899,7 +905,7 @@ impl<'wrld, 'block: 'wrld, 'state: 'block> Runtime, authority: AccountId, bytes: impl AsRef<[u8]>, - max_instruction_count: u64, + max_instruction_count: NonZeroU64, ) -> Result<()> { let span = wasm_log_span!("Smart contract validation", %authority); let state = state::SmartContract::new( @@ -1706,6 +1712,7 @@ impl GetExport for (&wasmtime::Instance, C) { #[cfg(test)] mod tests { use iroha_data_model::query::{predicate::PredicateBox, sorting::Sorting, Pagination}; + use nonzero_ext::nonzero; use parity_scale_codec::Encode; use test_samples::gen_account_in; use tokio::test; @@ -1893,7 +1900,12 @@ mod tests { ); let mut runtime = RuntimeBuilder::::new().build()?; - let res = runtime.validate(&mut state.block().transaction(), authority, wat, 1); + let res = runtime.validate( + &mut state.block().transaction(), + authority, + wat, + nonzero!(1_u64), + ); if let Error::ExportFnCall(ExportFnCallError::Other(report)) = res.expect_err("Execution should fail") @@ -1942,7 +1954,12 @@ mod tests { ); let mut runtime = RuntimeBuilder::::new().build()?; - let res = runtime.validate(&mut state.block().transaction(), authority, wat, 1); + let res = runtime.validate( + &mut state.block().transaction(), + authority, + wat, + nonzero!(1_u64), + ); if let Error::ExportFnCall(ExportFnCallError::HostExecution(report)) = res.expect_err("Execution should fail") @@ -1986,7 +2003,12 @@ mod tests { ); let mut runtime = RuntimeBuilder::::new().build()?; - let res = runtime.validate(&mut state.block().transaction(), authority, wat, 1); + let res = runtime.validate( + &mut state.block().transaction(), + authority, + wat, + nonzero!(1_u64), + ); if let Error::ExportFnCall(ExportFnCallError::HostExecution(report)) = res.expect_err("Execution should fail") diff --git a/core/src/state.rs b/core/src/state.rs index 42c43d98174..2c711280929 100644 --- a/core/src/state.rs +++ b/core/src/state.rs @@ -1,15 +1,9 @@ //! This module provides the [`State`] — an in-memory representation of the current blockchain state. use std::{ - borrow::Borrow, - collections::BTreeSet, - marker::PhantomData, - num::{NonZeroU32, NonZeroUsize}, - sync::Arc, - time::Duration, + collections::BTreeSet, marker::PhantomData, num::NonZeroUsize, sync::Arc, time::Duration, }; use eyre::Result; -use iroha_config::{base::util::Bytes, parameters::actual::ChainWide as Config}; use iroha_crypto::HashOf; use iroha_data_model::{ account::AccountId, @@ -22,7 +16,7 @@ use iroha_data_model::{ }, executor::ExecutorDataModel, isi::error::{InstructionExecutionError as Error, MathError}, - parameter::{Parameter, ParameterValueBox}, + parameter::Parameters, permission::Permissions, prelude::*, query::error::{FindError, QueryExecutionFail}, @@ -62,14 +56,14 @@ use crate::{ wasm, Execute, }, tx::TransactionExecutor, - Parameters, PeersIds, + PeersIds, }; /// The global entity consisting of `domains`, `triggers` and etc. /// For example registration of domain, will have this as an ISI target. #[derive(Default, Serialize)] pub struct World { - /// Iroha config parameters. + /// Iroha on-chain parameters. pub(crate) parameters: Cell, /// Identifications of discovered trusted peers. pub(crate) trusted_peers_ids: Cell, @@ -93,8 +87,8 @@ pub struct World { /// Struct for block's aggregated changes pub struct WorldBlock<'world> { - /// Iroha config parameters. - pub(crate) parameters: CellBlock<'world, Parameters>, + /// Iroha on-chain parameters. + pub parameters: CellBlock<'world, Parameters>, /// Identifications of discovered trusted peers. pub(crate) trusted_peers_ids: CellBlock<'world, PeersIds>, /// Registered domains. @@ -119,7 +113,7 @@ pub struct WorldBlock<'world> { /// Struct for single transaction's aggregated changes pub struct WorldTransaction<'block, 'world> { - /// Iroha config parameters. + /// Iroha on-chain parameters. pub(crate) parameters: CellTransaction<'block, 'world, Parameters>, /// Identifications of discovered trusted peers. pub(crate) trusted_peers_ids: CellTransaction<'block, 'world, PeersIds>, @@ -153,7 +147,7 @@ struct TransactionEventBuffer<'block> { /// Consistent point in time view of the [`World`] pub struct WorldView<'world> { - /// Iroha config parameters. + /// Iroha on-chain parameters. pub(crate) parameters: CellView<'world, Parameters>, /// Identifications of discovered trusted peers. pub(crate) trusted_peers_ids: CellView<'world, PeersIds>, @@ -180,13 +174,11 @@ pub struct WorldView<'world> { pub struct State { /// The world. Contains `domains`, `triggers`, `roles` and other data representing the current state of the blockchain. pub world: World, - /// Configuration of World State View. - pub config: Cell, /// Blockchain. // TODO: Cell is redundant here since block_hashes is very easy to rollback by just popping the last element pub block_hashes: Cell>>, /// Hashes of transactions mapped onto block height where they stored - pub transactions: Storage, usize>, + pub transactions: Storage, NonZeroUsize>, /// Engine for WASM [`Runtime`](wasm::Runtime) to execute triggers. #[serde(skip)] pub engine: wasmtime::Engine, @@ -207,12 +199,10 @@ pub struct State { pub struct StateBlock<'state> { /// The world. Contains `domains`, `triggers`, `roles` and other data representing the current state of the blockchain. pub world: WorldBlock<'state>, - /// Configuration of World State View. - pub config: CellBlock<'state, Config>, /// Blockchain. pub block_hashes: CellBlock<'state, Vec>>, /// Hashes of transactions mapped onto block height where they stored - pub transactions: StorageBlock<'state, HashOf, usize>, + pub transactions: StorageBlock<'state, HashOf, NonZeroUsize>, /// Engine for WASM [`Runtime`](wasm::Runtime) to execute triggers. pub engine: &'state wasmtime::Engine, @@ -229,12 +219,10 @@ pub struct StateBlock<'state> { pub struct StateTransaction<'block, 'state> { /// The world. Contains `domains`, `triggers`, `roles` and other data representing the current state of the blockchain. pub world: WorldTransaction<'block, 'state>, - /// Configuration of World State View. - pub config: CellTransaction<'block, 'state, Config>, /// Blockchain. pub block_hashes: CellTransaction<'block, 'state, Vec>>, /// Hashes of transactions mapped onto block height where they stored - pub transactions: StorageTransaction<'block, 'state, HashOf, usize>, + pub transactions: StorageTransaction<'block, 'state, HashOf, NonZeroUsize>, /// Engine for WASM [`Runtime`](wasm::Runtime) to execute triggers. pub engine: &'state wasmtime::Engine, @@ -251,12 +239,10 @@ pub struct StateTransaction<'block, 'state> { pub struct StateView<'state> { /// The world. Contains `domains`, `triggers`, `roles` and other data representing the current state of the blockchain. pub world: WorldView<'state>, - /// Configuration of World State View. - pub config: CellView<'state, Config>, /// Blockchain. pub block_hashes: CellView<'state, Vec>>, /// Hashes of transactions mapped onto block height where they stored - pub transactions: StorageView<'state, HashOf, usize>, + pub transactions: StorageView<'state, HashOf, NonZeroUsize>, /// Engine for WASM [`Runtime`](wasm::Runtime) to execute triggers. pub engine: &'state wasmtime::Engine, @@ -289,11 +275,11 @@ impl World { .into_iter() .map(|account| (account.id().clone(), account)) .collect(); - World { + Self { trusted_peers_ids: Cell::new(trusted_peers_ids), domains, accounts, - ..World::new() + ..Self::new() } } @@ -331,7 +317,7 @@ impl World { } } - /// Create point in time view of the [`World`] + /// Create point in time view of the [`Self`] pub fn view(&self) -> WorldView { WorldView { parameters: self.parameters.view(), @@ -573,26 +559,6 @@ pub trait WorldReadOnly { self.trusted_peers_ids().iter() } - /// Get all `Parameter`s registered in the world. - fn parameters_iter(&self) -> impl Iterator { - self.parameters().iter() - } - - /// Query parameter and convert it to a proper type - fn query_param, P: core::hash::Hash + Eq + ?Sized>( - &self, - param: &P, - ) -> Option - where - Parameter: Borrow

, - { - Parameters::get(self.parameters(), param) - .as_ref() - .map(|param| ¶m.val) - .cloned() - .and_then(|param_val| param_val.try_into().ok()) - } - /// Returns reference for trusted peer ids #[inline] fn peers_ids(&self) -> &PeersIds { @@ -748,7 +714,7 @@ impl WorldTransaction<'_, '_> { /// Remove all [`Role`]s from the [`Account`] pub fn remove_account_roles(&mut self, account: &AccountId) { let roles_to_remove = self - .account_roles_iter(&account) + .account_roles_iter(account) .cloned() .map(|role| RoleIdWithOwner::new(account.clone(), role.clone())) .collect::>(); @@ -899,7 +865,34 @@ impl WorldTransaction<'_, '_> { /// Set executor data model. pub fn set_executor_data_model(&mut self, executor_data_model: ExecutorDataModel) { - *self.executor_data_model.get_mut() = executor_data_model; + let prev_executor_data_model = + core::mem::replace(self.executor_data_model.get_mut(), executor_data_model); + + self.update_parameters(&prev_executor_data_model); + } + + fn update_parameters(&mut self, prev_executor_data_model: &ExecutorDataModel) { + let removed_parameters = prev_executor_data_model + .parameters + .keys() + .filter(|param_id| !self.executor_data_model.parameters.contains_key(param_id)); + let new_parameters = self + .executor_data_model + .parameters + .iter() + .filter(|(param_id, _)| !prev_executor_data_model.parameters.contains_key(param_id)); + + for param in removed_parameters { + iroha_logger::info!("{}: parameter removed", param); + self.parameters.custom.remove(param); + } + + for (param_id, param) in new_parameters { + self.parameters + .custom + .insert(param_id.clone(), param.clone()); + iroha_logger::info!("{}: parameter created", param); + } } /// Execute trigger with `trigger_id` as id and `authority` as owner @@ -978,21 +971,8 @@ impl State { #[must_use] #[inline] pub fn new(world: World, kura: Arc, query_handle: LiveQueryStoreHandle) -> Self { - // Added to remain backward compatible with other code primary in tests - Self::from_config(Config::default(), world, kura, query_handle) - } - - /// Construct [`State`] with specific [`Configuration`]. - #[inline] - pub fn from_config( - config: Config, - world: World, - kura: Arc, - query_handle: LiveQueryStoreHandle, - ) -> Self { Self { world, - config: Cell::new(config), transactions: Storage::new(), block_hashes: Cell::new(Vec::new()), new_tx_amounts: Arc::new(Mutex::new(Vec::new())), @@ -1006,7 +986,6 @@ impl State { pub fn block(&self) -> StateBlock<'_> { StateBlock { world: self.world.block(), - config: self.config.block(), block_hashes: self.block_hashes.block(), transactions: self.transactions.block(), engine: &self.engine, @@ -1020,7 +999,6 @@ impl State { pub fn block_and_revert(&self) -> StateBlock<'_> { StateBlock { world: self.world.block_and_revert(), - config: self.config.block_and_revert(), block_hashes: self.block_hashes.block_and_revert(), transactions: self.transactions.block_and_revert(), engine: &self.engine, @@ -1034,7 +1012,6 @@ impl State { pub fn view(&self) -> StateView<'_> { StateView { world: self.world.view(), - config: self.config.view(), block_hashes: self.block_hashes.view(), transactions: self.transactions.view(), engine: &self.engine, @@ -1049,9 +1026,8 @@ impl State { #[allow(missing_docs)] pub trait StateReadOnly { fn world(&self) -> &impl WorldReadOnly; - fn config(&self) -> &Config; fn block_hashes(&self) -> &[HashOf]; - fn transactions(&self) -> &impl StorageReadOnly, usize>; + fn transactions(&self) -> &impl StorageReadOnly, NonZeroUsize>; fn engine(&self) -> &wasmtime::Engine; fn kura(&self) -> &Kura; fn query_handle(&self) -> &LiveQueryStoreHandle; @@ -1121,8 +1097,7 @@ pub trait StateReadOnly { fn block_with_tx(&self, hash: &HashOf) -> Option> { self.transactions() .get(hash) - .and_then(|&height| NonZeroUsize::new(height)) - .and_then(|height| self.kura().get_block_by_height(height)) + .and_then(|&height| self.kura().get_block_by_height(height)) } /// Returns [`Some`] milliseconds since the genesis block was @@ -1153,7 +1128,7 @@ pub trait StateReadOnly { /// Get transaction executor fn transaction_executor(&self) -> TransactionExecutor { - TransactionExecutor::new(self.config().transaction_limits) + TransactionExecutor::new(self.world().parameters().transaction) } } @@ -1163,13 +1138,10 @@ macro_rules! impl_state_ro { fn world(&self) -> &impl WorldReadOnly { &self.world } - fn config(&self) -> &Config { - &self.config - } fn block_hashes(&self) -> &[HashOf] { &self.block_hashes } - fn transactions(&self) -> &impl StorageReadOnly, usize> { + fn transactions(&self) -> &impl StorageReadOnly, NonZeroUsize> { &self.transactions } fn engine(&self) -> &wasmtime::Engine { @@ -1197,7 +1169,6 @@ impl<'state> StateBlock<'state> { pub fn transaction(&mut self) -> StateTransaction<'_, 'state> { StateTransaction { world: self.world.trasaction(), - config: self.config.transaction(), block_hashes: self.block_hashes.transaction(), transactions: self.transactions.transaction(), engine: self.engine, @@ -1211,7 +1182,6 @@ impl<'state> StateBlock<'state> { pub fn commit(self) { self.transactions.commit(); self.block_hashes.commit(); - self.config.commit(); self.world.commit(); } @@ -1387,101 +1357,9 @@ impl StateTransaction<'_, '_> { pub fn apply(self) { self.transactions.apply(); self.block_hashes.apply(); - self.config.apply(); self.world.apply(); } - /// If given [`Parameter`] represents some of the core chain-wide - /// parameters ([`Config`]), apply it - pub fn try_apply_core_parameter(&mut self, parameter: Parameter) { - use iroha_data_model::parameter::default::*; - - struct Reader(Option); - - impl Reader { - fn try_and_then>( - self, - id: &str, - fun: impl FnOnce(T), - ) -> Self { - if let Some(param) = self.0 { - if param.id().name().as_ref() == id { - if let Ok(value) = param.val.try_into() { - fun(value); - } - Self(None) - } else { - Self(Some(param)) - } - } else { - Self(None) - } - } - - fn try_and_write>( - self, - id: &str, - destination: &mut T, - ) -> Self { - self.try_and_then(id, |value| { - *destination = value; - }) - } - - fn try_and_write_duration(self, id: &str, destination: &mut Duration) -> Self { - self.try_and_then(id, |value| *destination = Duration::from_millis(value)) - } - - fn try_and_write_bytes(self, id: &str, destination: &mut Bytes) -> Self { - self.try_and_then(id, |value| *destination = Bytes(value)) - } - } - - Reader(Some(parameter)) - .try_and_then(MAX_TRANSACTIONS_IN_BLOCK, |value| { - if let Some(checked) = NonZeroU32::new(value) { - self.config.max_transactions_in_block = checked; - } - }) - .try_and_write_duration(BLOCK_TIME, &mut self.config.block_time) - .try_and_write_duration(COMMIT_TIME_LIMIT, &mut self.config.commit_time) - .try_and_write( - WSV_DOMAIN_METADATA_LIMITS, - &mut self.config.domain_metadata_limits, - ) - .try_and_write( - WSV_ASSET_DEFINITION_METADATA_LIMITS, - &mut self.config.asset_definition_metadata_limits, - ) - .try_and_write( - WSV_ACCOUNT_METADATA_LIMITS, - &mut self.config.account_metadata_limits, - ) - .try_and_write( - WSV_ASSET_METADATA_LIMITS, - &mut self.config.asset_metadata_limits, - ) - .try_and_write( - WSV_TRIGGER_METADATA_LIMITS, - &mut self.config.trigger_metadata_limits, - ) - .try_and_write( - WSV_IDENT_LENGTH_LIMITS, - &mut self.config.ident_length_limits, - ) - .try_and_write( - EXECUTOR_FUEL_LIMIT, - &mut self.config.executor_runtime.fuel_limit, - ) - .try_and_write_bytes( - EXECUTOR_MAX_MEMORY, - &mut self.config.executor_runtime.max_memory, - ) - .try_and_write(WASM_FUEL_LIMIT, &mut self.config.wasm_runtime.fuel_limit) - .try_and_write_bytes(WASM_MAX_MEMORY, &mut self.config.wasm_runtime.max_memory) - .try_and_write(TRANSACTION_LIMITS, &mut self.config.transaction_limits); - } - fn process_executable(&mut self, executable: &Executable, authority: AccountId) -> Result<()> { match executable { Executable::Instructions(instructions) => { @@ -1489,7 +1367,7 @@ impl StateTransaction<'_, '_> { } Executable::Wasm(bytes) => { let mut wasm_runtime = wasm::RuntimeBuilder::::new() - .with_config(self.config.wasm_runtime) + .with_config(self.world().parameters().smart_contract) .with_engine(self.engine.clone()) // Cloning engine is cheap .build()?; wasm_runtime @@ -1531,7 +1409,7 @@ impl StateTransaction<'_, '_> { .expect("INTERNAL BUG: contract is not present") .clone(); let mut wasm_runtime = wasm::RuntimeBuilder::::new() - .with_config(self.config.wasm_runtime) + .with_config(self.world().parameters().smart_contract) .with_engine(self.engine.clone()) // Cloning engine is cheap .build()?; wasm_runtime @@ -1877,7 +1755,6 @@ pub(crate) mod deserialize { M: MapAccess<'de>, { let mut world = None; - let mut config = None; let mut block_hashes = None; let mut transactions = None; @@ -1893,9 +1770,6 @@ pub(crate) mod deserialize { "world" => { world = Some(map.next_value_seed(wasm_seed.cast::())?); } - "config" => { - config = Some(map.next_value()?); - } "block_hashes" => { block_hashes = Some(map.next_value()?); } @@ -1908,7 +1782,6 @@ pub(crate) mod deserialize { Ok(State { world: world.ok_or_else(|| serde::de::Error::missing_field("world"))?, - config: config.ok_or_else(|| serde::de::Error::missing_field("config"))?, block_hashes: block_hashes .ok_or_else(|| serde::de::Error::missing_field("block_hashes"))?, transactions: transactions @@ -1923,7 +1796,7 @@ pub(crate) mod deserialize { deserializer.deserialize_struct( "WorldState", - &["world", "config", "block_hashes", "transactions"], + &["world", "block_hashes", "transactions"], StateVisitor { loader: self }, ) } @@ -1932,6 +1805,8 @@ pub(crate) mod deserialize { #[cfg(test)] mod tests { + use core::num::NonZeroU64; + use iroha_data_model::block::BlockPayload; use test_samples::gen_account_in; @@ -1965,7 +1840,7 @@ mod tests { let mut block_hashes = vec![]; for i in 1..=BLOCK_CNT { let block = new_dummy_block_with_payload(|payload| { - payload.header.height = i as u64; + payload.header.height = NonZeroU64::new(i as u64).unwrap(); payload.header.prev_block_hash = block_hashes.last().copied(); }); @@ -1990,7 +1865,7 @@ mod tests { for i in 1..=BLOCK_CNT { let block = new_dummy_block_with_payload(|payload| { - payload.header.height = i as u64; + payload.header.height = NonZeroU64::new(i as u64).unwrap(); }); let _events = state_block.apply(&block).unwrap(); @@ -2001,7 +1876,7 @@ mod tests { &state_block .all_blocks() .skip(7) - .map(|block| block.header().height()) + .map(|block| block.header().height().get()) .collect::>(), &[8, 9, 10] ); diff --git a/core/src/sumeragi/main_loop.rs b/core/src/sumeragi/main_loop.rs index 856b94e4d78..f8c2d8dd17b 100644 --- a/core/src/sumeragi/main_loop.rs +++ b/core/src/sumeragi/main_loop.rs @@ -21,14 +21,6 @@ pub struct Sumeragi { pub peer_id: PeerId, /// An actor that sends events pub events_sender: EventsSender, - /// Time by which a newly created block should be committed. Prevents malicious nodes - /// from stalling the network by not participating in consensus - pub commit_time: Duration, - /// Time by which a new block should be created regardless if there were enough transactions or not. - /// Used to force block commits when there is a small influx of new transactions. - pub block_time: Duration, - /// The maximum number of transactions in the block - pub max_txs_in_block: usize, /// Kura instance used for IO pub kura: Arc, /// [`iroha_p2p::Network`] actor address @@ -122,12 +114,6 @@ impl Sumeragi { self.network.update_topology(UpdateTopology(peers)); } - /// The maximum time a sumeragi round can take to produce a block when - /// there are no faulty peers in the a set. - fn pipeline_time(&self) -> Duration { - self.block_time + self.commit_time - } - fn send_event(&self, event: impl Into) { let _ = self.events_sender.send(event.into()); } @@ -347,8 +333,6 @@ impl Sumeragi { let state_events = state_block.apply_without_execution(&block); - // Parameters are updated before updating public copy of sumeragi - self.update_params(&state_block); self.cache_transaction(&state_block); self.topology @@ -385,12 +369,6 @@ impl Sumeragi { self.was_commit = true; } - fn update_params(&mut self, state_block: &StateBlock<'_>) { - self.block_time = state_block.config.block_time; - self.commit_time = state_block.config.commit_time; - self.max_txs_in_block = state_block.config.max_transactions_in_block.get() as usize; - } - fn cache_transaction(&mut self, state_block: &StateBlock<'_>) { self.transaction_cache.retain(|tx| { !state_block.has_transaction(tx.as_ref().hash()) && !self.queue.is_expired(tx) @@ -406,7 +384,7 @@ impl Sumeragi { ) -> Option> { let mut state_block = state.block(); - if state_block.height() == 1 && block.header().height == 1 { + if state_block.height() == 1 && block.header().height.get() == 1 { // Consider our peer has genesis, // and some other peer has genesis and broadcast it to our peer, // then we can ignore such genesis block because we already has genesis. @@ -818,7 +796,14 @@ impl Sumeragi { #[cfg(debug_assertions)] if is_genesis_peer && self.debug_force_soft_fork { - std::thread::sleep(self.pipeline_time() * 2); + let pipeline_time = voting_block + .state_block + .world + .parameters() + .sumeragi + .pipeline_time(); + + std::thread::sleep(pipeline_time * 2); } else { let msg = BlockCommitted::from(&committed_block); self.broadcast_packet(msg); @@ -846,8 +831,17 @@ impl Sumeragi { ) { assert_eq!(self.role(), Role::Leader); - let tx_cache_full = self.transaction_cache.len() >= self.max_txs_in_block; - let deadline_reached = self.round_start_time.elapsed() > self.block_time; + let max_transactions: NonZeroUsize = state + .world + .view() + .parameters + .block + .max_transactions + .try_into() + .expect("INTERNAL BUG: transactions in block exceed usize::MAX"); + let block_time = state.world.view().parameters.sumeragi.block_time(); + let tx_cache_full = self.transaction_cache.len() >= max_transactions.get(); + let deadline_reached = self.round_start_time.elapsed() > block_time; let tx_cache_non_empty = !self.transaction_cache.is_empty(); if tx_cache_full || (deadline_reached && tx_cache_non_empty) { @@ -864,7 +858,8 @@ impl Sumeragi { .unpack(|e| self.send_event(e)); let created_in = create_block_start_time.elapsed(); - if created_in > self.pipeline_time() / 2 { + let pipeline_time = state.world.view().parameters().sumeragi.pipeline_time(); + if created_in > pipeline_time / 2 { warn!( role=%self.role(), peer_id=%self.peer_id, @@ -1010,7 +1005,7 @@ pub(crate) fn run( let mut should_sleep = false; let mut view_change_proof_chain = ProofChain::default(); // Duration after which a view change is suggested - let mut view_change_time = sumeragi.pipeline_time(); + let mut view_change_time = state.world.view().parameters().sumeragi.pipeline_time(); // Instant when the previous view change or round happened. let mut last_view_change_time = Instant::now(); @@ -1040,7 +1035,14 @@ pub(crate) fn run( sumeragi.queue.get_transactions_for_block( &state_view, - sumeragi.max_txs_in_block, + state + .world + .view() + .parameters + .block + .max_transactions + .try_into() + .expect("INTERNAL BUG: transactions in block exceed usize::MAX"), &mut sumeragi.transaction_cache, ); @@ -1053,7 +1055,7 @@ pub(crate) fn run( reset_state( &sumeragi.peer_id, - sumeragi.pipeline_time(), + state.world.view().parameters().sumeragi.pipeline_time(), view_change_index, &mut sumeragi.was_commit, &mut sumeragi.topology, @@ -1138,12 +1140,12 @@ pub(crate) fn run( // NOTE: View change must be periodically suggested until it is accepted. // Must be initialized to pipeline time but can increase by chosen amount - view_change_time += sumeragi.pipeline_time(); + view_change_time += state.world.view().parameters().sumeragi.pipeline_time(); } reset_state( &sumeragi.peer_id, - sumeragi.pipeline_time(), + state.world.view().parameters().sumeragi.pipeline_time(), view_change_index, &mut sumeragi.was_commit, &mut sumeragi.topology, @@ -1235,7 +1237,7 @@ enum BlockSyncError { }, BlockNotProperHeight { peer_height: usize, - block_height: usize, + block_height: NonZeroUsize, }, } @@ -1246,18 +1248,18 @@ fn handle_block_sync<'state, F: Fn(PipelineEventBox)>( genesis_account: &AccountId, handle_events: &F, ) -> Result, (SignedBlock, BlockSyncError)> { - let block_height = block + let block_height: NonZeroUsize = block .header() .height .try_into() .expect("INTERNAL BUG: Block height exceeds usize::MAX"); let state_height = state.view().height(); - let (mut state_block, soft_fork) = if state_height + 1 == block_height { + let (mut state_block, soft_fork) = if state_height + 1 == block_height.get() { // NOTE: Normal branch for adding new block on top of current (state.block(), false) - } else if state_height == block_height && block_height > 1 { + } else if state_height == block_height.get() && block_height.get() > 1 { // NOTE: Soft fork branch for replacing current block with valid one let latest_block = state @@ -1330,6 +1332,7 @@ fn handle_block_sync<'state, F: Fn(PipelineEventBox)>( #[cfg(test)] mod tests { use iroha_genesis::GENESIS_DOMAIN_ID; + use nonzero_ext::nonzero; use test_samples::gen_account_in; use tokio::test; @@ -1376,12 +1379,9 @@ mod tests { let tx = TransactionBuilder::new(chain_id.clone(), alice_id.clone()) .with_instructions([fail_isi]) .sign(alice_keypair.private_key()); - let tx = AcceptedTransaction::accept( - tx, - chain_id, - state_block.transaction_executor().transaction_limits, - ) - .expect("Valid"); + let tx = + AcceptedTransaction::accept(tx, chain_id, state_block.transaction_executor().limits) + .expect("Valid"); // Creating a block of two identical transactions and validating it let block = BlockBuilder::new(vec![tx.clone(), tx], topology.clone(), Vec::new()) @@ -1413,7 +1413,7 @@ mod tests { let tx1 = AcceptedTransaction::accept( tx1, chain_id, - state_block.transaction_executor().transaction_limits, + state_block.transaction_executor().limits, ) .map(Into::into) .expect("Valid"); @@ -1423,7 +1423,7 @@ mod tests { let tx2 = AcceptedTransaction::accept( tx2, chain_id, - state_block.transaction_executor().transaction_limits, + state_block.transaction_executor().limits, ) .map(Into::into) .expect("Valid"); @@ -1511,20 +1511,26 @@ mod tests { // Change block height let block = clone_and_modify_payload(&block, &leader_private_key, |payload| { - payload.header.height = 42; + payload.header.height = nonzero!(42_u64); }); let result = handle_block_sync(&chain_id, block, &state, &genesis_public_key, &|_| {}); + assert!(matches!( result, - Err(( - _, - BlockSyncError::BlockNotProperHeight { - peer_height: 1, - block_height: 42 - } - )) - )) + Err((_, BlockSyncError::BlockNotProperHeight { .. })) + )); + if let Err(( + _, + BlockSyncError::BlockNotProperHeight { + peer_height, + block_height, + }, + )) = result + { + assert_eq!(peer_height, 1); + assert_eq!(block_height, nonzero!(42_usize)); + } } #[test] @@ -1655,19 +1661,25 @@ mod tests { // Soft-fork on genesis block is not possible let block = clone_and_modify_payload(&block, &leader_private_key, |payload| { payload.header.view_change_index = 42; - payload.header.height = 1; + payload.header.height = nonzero!(1_u64); }); let result = handle_block_sync(&chain_id, block, &state, &genesis_public_key, &|_| {}); + assert!(matches!( result, - Err(( - _, - BlockSyncError::BlockNotProperHeight { - peer_height: 1, - block_height: 1, - } - )) - )) + Err((_, BlockSyncError::BlockNotProperHeight { .. })) + )); + if let Err(( + _, + BlockSyncError::BlockNotProperHeight { + peer_height, + block_height, + }, + )) = result + { + assert_eq!(peer_height, 1); + assert_eq!(block_height, nonzero!(1_usize)); + } } } diff --git a/core/src/sumeragi/mod.rs b/core/src/sumeragi/mod.rs index 9763fe7324c..42a7921a617 100644 --- a/core/src/sumeragi/mod.rs +++ b/core/src/sumeragi/mod.rs @@ -211,9 +211,6 @@ impl SumeragiHandle { peer_id: peer_id.clone(), queue: Arc::clone(&queue), events_sender, - commit_time: state.view().config.commit_time, - block_time: state.view().config.block_time, - max_txs_in_block: state.view().config.max_transactions_in_block.get() as usize, kura: Arc::clone(&kura), network: network.clone(), control_message_receiver, diff --git a/core/src/tx.rs b/core/src/tx.rs index 6b9f04413ed..2b841f7bba2 100644 --- a/core/src/tx.rs +++ b/core/src/tx.rs @@ -14,7 +14,7 @@ pub use iroha_data_model::prelude::*; use iroha_data_model::{ isi::error::Mismatch, query::error::FindError, - transaction::{error::TransactionLimitError, TransactionLimits, TransactionPayload}, + transaction::{error::TransactionLimitError, TransactionPayload}, }; use iroha_logger::{debug, error}; use iroha_macro::FromVariant; @@ -95,7 +95,7 @@ impl AcceptedTransaction { pub fn accept( tx: SignedTransaction, expected_chain_id: &ChainId, - limits: TransactionLimits, + limits: TransactionParameters, ) -> Result { let actual_chain_id = tx.chain(); @@ -112,13 +112,19 @@ impl AcceptedTransaction { match &tx.instructions() { Executable::Instructions(instructions) => { - let instruction_count = instructions.len(); - if Self::len_u64(instruction_count) > limits.max_instruction_number { + let instruction_limit = limits + .max_instructions + .get() + .try_into() + .expect("INTERNAL BUG: max instructions exceeds usize::MAX"); + + if instructions.len() > instruction_limit { return Err(AcceptTransactionFail::TransactionLimit( TransactionLimitError { reason: format!( "Too many instructions in payload, max number is {}, but got {}", - limits.max_instruction_number, instruction_count + limits.max_instructions, + instructions.len() ), }, )); @@ -129,13 +135,21 @@ impl AcceptedTransaction { // // Should we allow infinite instructions in wasm? And deny only based on fuel and size Executable::Wasm(smart_contract) => { - let size_bytes = Self::len_u64(smart_contract.size_bytes()); - let max_wasm_size_bytes = limits.max_wasm_size_bytes; + let smart_contract_size_limit = limits + .smart_contract_size + .get() + .try_into() + .expect("INTERNAL BUG: smart contract size exceeds usize::MAX"); - if size_bytes > max_wasm_size_bytes { + if smart_contract.size_bytes() > smart_contract_size_limit { return Err(AcceptTransactionFail::TransactionLimit( TransactionLimitError { - reason: format!("Wasm binary too large, max size is {max_wasm_size_bytes}, but got {size_bytes}"), + reason: format!( + "WASM binary size is too large: max {}, got {} \ + (configured by \"Parameter::SmartContractLimits\")", + limits.smart_contract_size, + smart_contract.size_bytes() + ), }, )); } @@ -144,11 +158,6 @@ impl AcceptedTransaction { Ok(Self(tx)) } - - #[inline] - fn len_u64(instruction_count: usize) -> u64 { - u64::try_from(instruction_count).expect("`usize` should always fit into `u64`") - } } impl From for SignedTransaction { @@ -174,14 +183,16 @@ impl AsRef for AcceptedTransaction { /// Validation is skipped for genesis. #[derive(Clone, Copy)] pub struct TransactionExecutor { - /// [`TransactionLimits`] field - pub transaction_limits: TransactionLimits, + /// [`TransactionParameters`] field + pub limits: TransactionParameters, } impl TransactionExecutor { /// Construct [`TransactionExecutor`] - pub fn new(transaction_limits: TransactionLimits) -> Self { - Self { transaction_limits } + pub fn new(transaction_limits: TransactionParameters) -> Self { + Self { + limits: transaction_limits, + } } /// Move transaction lifecycle forward by checking if the @@ -244,7 +255,7 @@ impl TransactionExecutor { state_transaction, authority, wasm, - self.transaction_limits.max_instruction_number, + self.limits.max_instructions, ) }) .map_err(|error| WasmExecutionFail { diff --git a/core/test_network/src/lib.rs b/core/test_network/src/lib.rs index 61d30429024..54e4fc01dc3 100644 --- a/core/test_network/src/lib.rs +++ b/core/test_network/src/lib.rs @@ -665,13 +665,9 @@ impl PeerBuilder { /// Create and start a peer, create a client and connect it to the peer and return both. pub async fn start_with_client(self) -> (Peer, Client) { - let config = self.config.clone().unwrap_or_else(Config::test); - let peer = self.start().await; - let client = Client::test(&peer.api_address); - - time::sleep(config.chain_wide.pipeline_time()).await; + time::sleep(::pipeline_time()).await; (peer, client) } @@ -818,7 +814,8 @@ impl TestConfig for Config { } fn pipeline_time() -> Duration { - Self::test().chain_wide.pipeline_time() + let defaults = iroha_data_model::parameter::SumeragiParameters::default(); + defaults.block_time() + defaults.commit_time() } fn block_sync_gossip_time() -> Duration { diff --git a/data_model/derive/src/id.rs b/data_model/derive/src/id.rs index c9a64c64537..baaa45daa31 100644 --- a/data_model/derive/src/id.rs +++ b/data_model/derive/src/id.rs @@ -85,29 +85,29 @@ pub fn impl_id_eq_ord_hash(emitter: &mut Emitter, input: &syn::DeriveInput) -> T quote! { #identifiable_derive - impl #impl_generics ::core::cmp::PartialOrd for #name #ty_generics #where_clause where Self: Identifiable { + impl #impl_generics ::core::cmp::PartialOrd for #name #ty_generics #where_clause where Self: crate::Identifiable { #[inline] fn partial_cmp(&self, other: &Self) -> Option<::core::cmp::Ordering> { Some(self.cmp(other)) } } - impl #impl_generics ::core::cmp::Ord for #name #ty_generics #where_clause where Self: Identifiable { + impl #impl_generics ::core::cmp::Ord for #name #ty_generics #where_clause where Self: crate::Identifiable { fn cmp(&self, other: &Self) -> ::core::cmp::Ordering { - self.id().cmp(other.id()) + ::id(self).cmp(::id(other)) } } - impl #impl_generics ::core::cmp::Eq for #name #ty_generics #where_clause where Self: Identifiable {} - impl #impl_generics ::core::cmp::PartialEq for #name #ty_generics #where_clause where Self: Identifiable { + impl #impl_generics ::core::cmp::Eq for #name #ty_generics #where_clause where Self: crate::Identifiable {} + impl #impl_generics ::core::cmp::PartialEq for #name #ty_generics #where_clause where Self: crate::Identifiable { fn eq(&self, other: &Self) -> bool { - self.id() == other.id() + ::id(self) == ::id(other) } } - impl #impl_generics ::core::hash::Hash for #name #ty_generics #where_clause where Self: Identifiable { + impl #impl_generics ::core::hash::Hash for #name #ty_generics #where_clause where Self: crate::Identifiable { fn hash(&self, state: &mut H) { - self.id().hash(state); + ::id(self).hash(state) } } } @@ -119,7 +119,7 @@ fn derive_identifiable(emitter: &mut Emitter, input: &IdDeriveInput) -> TokenStr let (id_type, id_expr) = get_id_type(emitter, input); quote! { - impl #impl_generics Identifiable for #name #ty_generics #where_clause { + impl #impl_generics crate::Identifiable for #name #ty_generics #where_clause { type Id = #id_type; #[inline] @@ -142,8 +142,8 @@ fn get_id_type(emitter: &mut Emitter, input: &IdDeriveInput) -> (syn::Type, syn: } IdAttr::Transparent => { return ( - parse_quote! {<#ty as Identifiable>::Id}, - parse_quote! {Identifiable::id(&self.#field_name)}, + parse_quote! {<#ty as crate::Identifiable>::Id}, + parse_quote! {crate::Identifiable::id(&self.#field_name)}, ); } IdAttr::Missing => { diff --git a/data_model/derive/tests/has_origin_generics.rs b/data_model/derive/tests/has_origin_generics.rs index a1090a312cc..69724714bb1 100644 --- a/data_model/derive/tests/has_origin_generics.rs +++ b/data_model/derive/tests/has_origin_generics.rs @@ -16,12 +16,6 @@ struct Object { id: ObjectId, } -impl Object { - fn id(&self) -> &ObjectId { - &self.id - } -} - #[allow(clippy::enum_variant_names)] // it's a test, duh #[derive(Debug, HasOrigin)] #[has_origin(origin = Object)] diff --git a/data_model/src/account.rs b/data_model/src/account.rs index 6d75fc54358..c8daf5c8cb3 100644 --- a/data_model/src/account.rs +++ b/data_model/src/account.rs @@ -4,7 +4,6 @@ use alloc::{format, string::String, vec::Vec}; use core::str::FromStr; use derive_more::{Constructor, DebugCustom, Display}; -use getset::Getters; use iroha_data_model_derive::{model, IdEqOrdHash}; use iroha_schema::IntoSchema; use parity_scale_codec::{Decode, Encode}; @@ -16,11 +15,13 @@ use crate::{ asset::{Asset, AssetDefinitionId, AssetsMap}, domain::prelude::*, metadata::Metadata, - HasMetadata, Identifiable, ParseError, PublicKey, Registered, + HasMetadata, ParseError, PublicKey, Registered, }; #[model] mod model { + use getset::Getters; + use super::*; /// Identification of [`Account`] by the combination of the [`PublicKey`] as its sole signatory and the [`Domain`](crate::domain::Domain) it belongs to. @@ -66,16 +67,7 @@ mod model { /// Account entity is an authority which is used to execute `Iroha Special Instructions`. #[derive( - Debug, - Display, - Clone, - IdEqOrdHash, - Getters, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, + Debug, Display, Clone, IdEqOrdHash, Decode, Encode, Deserialize, Serialize, IntoSchema, )] #[allow(clippy::multiple_inherent_impl)] #[display(fmt = "({id})")] // TODO: Add more? diff --git a/data_model/src/asset.rs b/data_model/src/asset.rs index 005911bf1f5..7e4e1c43f9b 100644 --- a/data_model/src/asset.rs +++ b/data_model/src/asset.rs @@ -7,7 +7,6 @@ use core::{fmt, str::FromStr}; use std::collections::btree_map; use derive_more::{Constructor, DebugCustom, Display}; -use getset::{CopyGetters, Getters}; use iroha_data_model_derive::{model, IdEqOrdHash}; use iroha_primitives::numeric::{Numeric, NumericSpec, NumericSpecParseError}; use iroha_schema::IntoSchema; @@ -17,8 +16,8 @@ use serde_with::{DeserializeFromStr, SerializeDisplay}; pub use self::model::*; use crate::{ - account::prelude::*, domain::prelude::*, ipfs::IpfsPath, metadata::Metadata, HasMetadata, - Identifiable, Name, ParseError, Registered, + account::prelude::*, domain::prelude::*, ipfs::IpfsPath, metadata::Metadata, HasMetadata, Name, + ParseError, Registered, }; /// API to work with collections of [`Id`] : [`Asset`] mappings. @@ -34,6 +33,7 @@ pub type AssetTotalQuantityMap = btree_map::BTreeMap #[model] mod model { + use getset::{CopyGetters, Getters}; use iroha_macro::FromVariant; use super::*; diff --git a/data_model/src/block.rs b/data_model/src/block.rs index 7a3f6781453..f355b462a4b 100644 --- a/data_model/src/block.rs +++ b/data_model/src/block.rs @@ -14,6 +14,7 @@ use iroha_data_model_derive::model; use iroha_macro::FromVariant; use iroha_schema::IntoSchema; use iroha_version::{declare_versioned, version_with_scale}; +use nonzero_ext::nonzero; use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; @@ -22,6 +23,8 @@ use crate::{events::prelude::*, peer, peer::PeerId, transaction::prelude::*}; #[model] mod model { + use core::num::NonZeroU64; + use getset::{CopyGetters, Getters}; use super::*; @@ -52,7 +55,7 @@ mod model { pub struct BlockHeader { /// Number of blocks in the chain including this block. #[getset(get_copy = "pub")] - pub height: u64, + pub height: NonZeroU64, /// Hash of the previous block in the chain. #[getset(get_copy = "pub")] pub prev_block_hash: Option>, @@ -143,7 +146,7 @@ impl BlockHeader { #[inline] #[cfg(feature = "transparent_api")] pub const fn is_genesis(&self) -> bool { - self.height == 1 + self.height.get() == 1 } /// Creation timestamp @@ -294,7 +297,7 @@ impl SignedBlock { let creation_time_ms = u64::try_from(first_transaction.creation_time().as_millis()) .expect("Must fit since Duration was created from u64 in creation_time()"); let header = BlockHeader { - height: 1, + height: nonzero!(1_u64), prev_block_hash: None, transactions_hash, creation_time_ms, @@ -345,7 +348,7 @@ mod candidate { fn validate(self) -> Result { self.validate_signatures()?; self.validate_header()?; - if self.payload.header.height == 1 { + if self.payload.header.height.get() == 1 { self.validate_genesis()?; } @@ -390,7 +393,7 @@ mod candidate { } fn validate_signatures(&self) -> Result<(), &'static str> { - if self.signatures.is_empty() && self.payload.header.height != 1 { + if self.signatures.is_empty() && self.payload.header.height.get() != 1 { return Err("Block missing signatures"); } diff --git a/data_model/src/domain.rs b/data_model/src/domain.rs index db01cd686c1..fba99212f45 100644 --- a/data_model/src/domain.rs +++ b/data_model/src/domain.rs @@ -4,7 +4,6 @@ use alloc::{format, string::String, vec::Vec}; use derive_more::{Constructor, Display, FromStr}; -use getset::Getters; use iroha_data_model_derive::{model, IdEqOrdHash}; use iroha_primitives::numeric::Numeric; use iroha_schema::IntoSchema; @@ -23,6 +22,8 @@ use crate::{ #[model] mod model { + use getset::Getters; + use super::*; /// Identification of a [`Domain`]. diff --git a/data_model/src/events/data/events.rs b/data_model/src/events/data/events.rs index 98c703f3fa9..92a841a0e61 100644 --- a/data_model/src/events/data/events.rs +++ b/data_model/src/events/data/events.rs @@ -128,16 +128,16 @@ mod asset { pub enum AssetDefinitionEvent { #[has_origin(asset_definition => asset_definition.id())] Created(AssetDefinition), - MintabilityChanged(AssetDefinitionId), - #[has_origin(ownership_changed => &ownership_changed.asset_definition)] - OwnerChanged(AssetDefinitionOwnerChanged), Deleted(AssetDefinitionId), #[has_origin(metadata_changed => &metadata_changed.target)] MetadataInserted(AssetDefinitionMetadataChanged), #[has_origin(metadata_changed => &metadata_changed.target)] MetadataRemoved(AssetDefinitionMetadataChanged), + MintabilityChanged(AssetDefinitionId), #[has_origin(total_quantity_changed => &total_quantity_changed.asset_definition)] TotalQuantityChanged(AssetDefinitionTotalQuantityChanged), + #[has_origin(ownership_changed => &ownership_changed.asset_definition)] + OwnerChanged(AssetDefinitionOwnerChanged), } } @@ -243,14 +243,12 @@ mod role { #[has_origin(role => role.id())] Created(Role), Deleted(RoleId), - /// [`Permission`]s with particular [`Permission`] - /// were removed from the role. - #[has_origin(permission_removed => &permission_removed.role)] - PermissionRemoved(RolePermissionChanged), - /// [`Permission`]s with particular [`Permission`] - /// were removed added to the role. + /// [`Permission`] were added to the role. #[has_origin(permission_added => &permission_added.role)] PermissionAdded(RolePermissionChanged), + /// [`Permission`] were removed from the role. + #[has_origin(permission_removed => &permission_removed.role)] + PermissionRemoved(RolePermissionChanged), } } @@ -297,21 +295,19 @@ mod account { data_event! { #[has_origin(origin = Account)] pub enum AccountEvent { - #[has_origin(asset_event => &asset_event.origin().account)] - Asset(AssetEvent), #[has_origin(account => account.id())] Created(Account), Deleted(AccountId), - AuthenticationAdded(AccountId), - AuthenticationRemoved(AccountId), + #[has_origin(asset_event => &asset_event.origin().account)] + Asset(AssetEvent), #[has_origin(permission_changed => &permission_changed.account)] PermissionAdded(AccountPermissionChanged), #[has_origin(permission_changed => &permission_changed.account)] PermissionRemoved(AccountPermissionChanged), #[has_origin(role_changed => &role_changed.account)] - RoleRevoked(AccountRoleChanged), - #[has_origin(role_changed => &role_changed.account)] RoleGranted(AccountRoleChanged), + #[has_origin(role_changed => &role_changed.account)] + RoleRevoked(AccountRoleChanged), #[has_origin(metadata_changed => &metadata_changed.target)] MetadataInserted(AccountMetadataChanged), #[has_origin(metadata_changed => &metadata_changed.target)] @@ -389,13 +385,13 @@ mod domain { data_event! { #[has_origin(origin = Domain)] pub enum DomainEvent { - #[has_origin(account_event => &account_event.origin().domain)] - Account(AccountEvent), - #[has_origin(asset_definition_event => &asset_definition_event.origin().domain)] - AssetDefinition(AssetDefinitionEvent), #[has_origin(domain => domain.id())] Created(Domain), Deleted(DomainId), + #[has_origin(asset_definition_event => &asset_definition_event.origin().domain)] + AssetDefinition(AssetDefinitionEvent), + #[has_origin(account_event => &account_event.origin().domain)] + Account(AccountEvent), #[has_origin(metadata_changed => &metadata_changed.target)] MetadataInserted(DomainMetadataChanged), #[has_origin(metadata_changed => &metadata_changed.target)] @@ -488,14 +484,54 @@ mod trigger { } mod config { + pub use self::model::*; use super::*; + use crate::parameter::Parameter; - data_event! { - #[has_origin(origin = Parameter)] + #[model] + mod model { + use super::*; + + /// Changed parameter event + #[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + #[ffi_type] + pub struct ParameterChanged { + /// Previous value for the parameter + pub old_value: Parameter, + /// Next value for the parameter + pub new_value: Parameter, + } + + #[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + EventSet, + FromVariant, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + #[ffi_type] pub enum ConfigurationEvent { - Changed(ParameterId), - Created(ParameterId), - Deleted(ParameterId), + Changed(ParameterChanged), } } } @@ -630,7 +666,7 @@ pub mod prelude { AssetDefinitionOwnerChanged, AssetDefinitionTotalQuantityChanged, AssetEvent, AssetEventSet, }, - config::{ConfigurationEvent, ConfigurationEventSet}, + config::{ConfigurationEvent, ConfigurationEventSet, ParameterChanged}, domain::{DomainEvent, DomainEventSet, DomainOwnerChanged}, executor::{ExecutorEvent, ExecutorEventSet, ExecutorUpgrade}, peer::{PeerEvent, PeerEventSet}, diff --git a/data_model/src/events/data/filters.rs b/data_model/src/events/data/filters.rs index 86bb1a886f3..0bda7058181 100644 --- a/data_model/src/events/data/filters.rs +++ b/data_model/src/events/data/filters.rs @@ -223,8 +223,6 @@ mod model { IntoSchema, )] pub struct ConfigurationEventFilter { - /// If specified matches only events originating from this configuration - pub(super) id_matcher: Option, /// Matches only event from this set pub(super) event_set: ConfigurationEventSet, } @@ -598,18 +596,10 @@ impl ConfigurationEventFilter { /// Creates a new [`ConfigurationEventFilter`] accepting all [`ConfigurationEvent`]s. pub const fn new() -> Self { Self { - id_matcher: None, event_set: ConfigurationEventSet::all(), } } - /// Modifies a [`ConfigurationEventFilter`] to accept only [`ConfigurationEvent`]s originating from ids matching `id_matcher`. - #[must_use] - pub fn for_parameter(mut self, id_matcher: ParameterId) -> Self { - self.id_matcher = Some(id_matcher); - self - } - /// Modifies a [`ConfigurationEventFilter`] to accept only [`ConfigurationEvent`]s of types matching `event_set`. #[must_use] pub const fn for_events(mut self, event_set: ConfigurationEventSet) -> Self { @@ -629,12 +619,6 @@ impl super::EventFilter for ConfigurationEventFilter { type Event = super::ConfigurationEvent; fn matches(&self, event: &Self::Event) -> bool { - if let Some(id_matcher) = &self.id_matcher { - if id_matcher != event.origin() { - return false; - } - } - if !self.event_set.matches(event) { return false; } diff --git a/data_model/src/events/pipeline.rs b/data_model/src/events/pipeline.rs index 6d74eec41e1..bbecb5ac47f 100644 --- a/data_model/src/events/pipeline.rs +++ b/data_model/src/events/pipeline.rs @@ -2,6 +2,7 @@ #[cfg(not(feature = "std"))] use alloc::{boxed::Box, format, string::String, vec::Vec}; +use core::num::NonZeroU64; use iroha_crypto::HashOf; use iroha_data_model_derive::model; @@ -84,7 +85,7 @@ mod model { #[getset(get = "pub")] pub hash: HashOf, #[getset(get_copy = "pub")] - pub block_height: Option, + pub block_height: Option, #[getset(get = "pub")] pub status: TransactionStatus, } @@ -181,7 +182,7 @@ mod model { #[ffi_type] pub struct BlockEventFilter { #[getset(get_copy = "pub")] - pub height: Option, + pub height: Option, #[getset(get = "pub")] pub status: Option, } @@ -205,7 +206,7 @@ mod model { pub struct TransactionEventFilter { #[getset(get = "pub")] pub hash: Option>, - pub block_height: Option>, + pub block_height: Option>, #[getset(get = "pub")] pub status: Option, } @@ -223,7 +224,7 @@ impl BlockEventFilter { /// Match only block with the given height #[must_use] - pub fn for_height(mut self, height: u64) -> Self { + pub fn for_height(mut self, height: NonZeroU64) -> Self { self.height = Some(height); self } @@ -249,7 +250,7 @@ impl TransactionEventFilter { /// Match only transactions with the given block height #[must_use] - pub fn for_block_height(mut self, block_height: Option) -> Self { + pub fn for_block_height(mut self, block_height: Option) -> Self { self.block_height = Some(block_height); self } @@ -270,7 +271,7 @@ impl TransactionEventFilter { /// Block height // TODO: Derive with getset - pub fn block_height(&self) -> Option> { + pub fn block_height(&self) -> Option> { self.block_height } } @@ -345,12 +346,13 @@ mod tests { use alloc::{string::ToString as _, vec, vec::Vec}; use iroha_crypto::Hash; + use nonzero_ext::nonzero; use super::{super::EventFilter, *}; use crate::{transaction::error::TransactionRejectionReason::*, ValidationFail}; impl BlockHeader { - fn dummy(height: u64) -> Self { + fn dummy(height: NonZeroU64) -> Self { Self { height, prev_block_hash: None, @@ -375,7 +377,7 @@ mod tests { .into(), TransactionEvent { hash: HashOf::from_untyped_unchecked(Hash::prehashed([0_u8; Hash::LENGTH])), - block_height: Some(3), + block_height: Some(nonzero!(3_u64)), status: TransactionStatus::Rejected(Box::new(Validation( ValidationFail::TooComplex, ))), @@ -388,7 +390,7 @@ mod tests { } .into(), BlockEvent { - header: BlockHeader::dummy(7), + header: BlockHeader::dummy(nonzero!(7_u64)), hash: HashOf::from_untyped_unchecked(Hash::prehashed([7_u8; Hash::LENGTH])), status: BlockStatus::Committed, } @@ -418,7 +420,7 @@ mod tests { .into(), TransactionEvent { hash: HashOf::from_untyped_unchecked(Hash::prehashed([0_u8; Hash::LENGTH])), - block_height: Some(3), + block_height: Some(nonzero!(3_u64)), status: TransactionStatus::Rejected(Box::new(Validation( ValidationFail::TooComplex, ))), @@ -439,7 +441,7 @@ mod tests { vec![BlockEvent { status: BlockStatus::Committed, hash: HashOf::from_untyped_unchecked(Hash::prehashed([7_u8; Hash::LENGTH])), - header: BlockHeader::dummy(7), + header: BlockHeader::dummy(nonzero!(7_u64)), } .into()], ); diff --git a/data_model/src/executor.rs b/data_model/src/executor.rs index 964d9f20d8d..5309c6ed477 100644 --- a/data_model/src/executor.rs +++ b/data_model/src/executor.rs @@ -5,20 +5,22 @@ use alloc::{collections::BTreeSet, format, string::String, vec::Vec}; #[cfg(feature = "std")] use std::collections::BTreeSet; -use derive_more::{Constructor, Display}; -use getset::Getters; use iroha_data_model_derive::model; use iroha_primitives::json::JsonString; use iroha_schema::{Ident, IntoSchema}; -use parity_scale_codec::{Decode, Encode}; -use serde::{Deserialize, Serialize}; pub use self::model::*; use crate::transaction::WasmSmartContract; #[model] mod model { + use derive_more::{Constructor, Display}; + use getset::Getters; + use parity_scale_codec::{Decode, Encode}; + use serde::{Deserialize, Serialize}; + use super::*; + use crate::parameter::CustomParameters; /// executor that checks if an operation satisfies some conditions. /// @@ -78,19 +80,15 @@ mod model { #[ffi_type] #[display(fmt = "{self:?}")] pub struct ExecutorDataModel { - /// Permission tokens supported by the executor. - /// - /// These IDs refer to the types in the schema. + /// Corresponds to the [`Parameter::Custom`]. + /// Holds the initial value of the parameter + pub parameters: CustomParameters, + /// Corresponds to the [`InstructionBox::Custom`]. + /// Any type that implements [`Instruction`] should be listed here. + pub instructions: BTreeSet, + /// Ids of permission tokens supported by the executor. pub permissions: BTreeSet, - /// Type id in the schema. - /// Corresponds to payload of `InstructionBox::Custom`. - /// - /// Note that technically it is not needed - /// (custom instructions can be used without specifying it), - /// however it is recommended to set it, - /// so clients could retrieve it through Iroha API. - pub custom_instruction: Option, - /// Data model JSON schema, typically produced by [`IntoSchema`]. + /// Schema of executor defined data types (instructions, parameters, permissions) pub schema: JsonString, } diff --git a/data_model/src/ipfs.rs b/data_model/src/ipfs.rs index 635900ba5c2..e6dbaca5c76 100644 --- a/data_model/src/ipfs.rs +++ b/data_model/src/ipfs.rs @@ -4,18 +4,19 @@ use alloc::{format, string::String, vec::Vec}; use core::str::FromStr; -use derive_more::Display; use iroha_data_model_derive::model; use iroha_primitives::conststr::ConstString; -use iroha_schema::IntoSchema; use parity_scale_codec::{Decode, Encode, Input}; -use serde_with::{DeserializeFromStr, SerializeDisplay}; pub use self::model::*; use crate::ParseError; #[model] mod model { + use derive_more::Display; + use iroha_schema::IntoSchema; + use serde_with::{DeserializeFromStr, SerializeDisplay}; + use super::*; /// Represents path in IPFS. Performs checks to ensure path validity. diff --git a/data_model/src/isi.rs b/data_model/src/isi.rs index 0ceeb0589ba..67ac4876d24 100644 --- a/data_model/src/isi.rs +++ b/data_model/src/isi.rs @@ -114,8 +114,6 @@ mod model { #[debug(fmt = "{_0:?}")] SetParameter(SetParameter), #[debug(fmt = "{_0:?}")] - NewParameter(NewParameter), - #[debug(fmt = "{_0:?}")] Upgrade(Upgrade), #[debug(fmt = "{_0:?}")] Log(Log), @@ -177,7 +175,6 @@ impl_instruction! { Revoke, Revoke, SetParameter, - NewParameter, Upgrade, ExecuteTrigger, Log, @@ -253,31 +250,16 @@ mod transparent { }; } - isi! { - /// Generic instruction for setting a chain-wide config parameter. - #[derive(Constructor, Display)] - #[display(fmt = "SET `{parameter}`")] - #[serde(transparent)] - #[repr(transparent)] - pub struct SetParameter { - /// The configuration parameter being changed. - #[serde(flatten)] - pub parameter: Parameter, - } - } - - isi! { - /// Sized structure for all possible on-chain configuration parameters when they are first created. + iroha_data_model_derive::model_single! { /// Generic instruction for setting a chain-wide config parameter. - #[derive(Constructor, Display)] - #[display(fmt = "SET `{parameter}`")] + #[derive(Debug, Display, Clone, PartialEq, Eq, PartialOrd, Ord, Constructor)] + #[derive(parity_scale_codec::Decode, parity_scale_codec::Encode)] + #[derive(serde::Deserialize, serde::Serialize)] + #[derive(iroha_schema::IntoSchema)] + #[display(fmt = "SET `{_0}`")] #[serde(transparent)] #[repr(transparent)] - pub struct NewParameter { - /// Parameter to be changed. - #[serde(flatten)] - pub parameter: Parameter, - } + pub struct SetParameter(pub Parameter); } isi! { @@ -783,7 +765,7 @@ mod transparent { pub fn asset_store(asset_id: AssetId, to: AccountId) -> Self { Self { source: asset_id, - object: Metadata::new(), + object: Metadata::default(), destination: to, } } @@ -979,14 +961,16 @@ mod transparent { } isi! { - /// Custom instruction with arbitrary payload. - /// Should be handled in custom executor, where it will be translated to usual ISIs. + /// Blockchain specific instruction (defined in the executor). /// Can be used to extend instruction set or add expression system. - /// See `executor_custom_instructions_simple` and `executor_custom_instructions_complex` - /// examples in `client/tests/integration/smartcontracts`. /// - /// Note: If using custom instructions, it is recommended - /// to set `ExecutorDataModel::custom_instruction` in custom executor `migrate` entrypoint. + /// Note: If using custom instructions remember to set (during the executor migration) + /// [`ExecutorDataModel::instructions`] + /// + /// # Examples + /// + /// Check `executor_custom_instructions_simple` and `executor_custom_instructions_complex` + /// integration tests #[derive(Display)] #[display(fmt = "CUSTOM({payload})")] pub struct CustomInstruction { @@ -1236,7 +1220,6 @@ pub mod error { use super::InstructionType; use crate::{ asset::AssetType, - metadata, query::error::{FindError, QueryExecutionFail}, IdBox, }; @@ -1286,8 +1269,6 @@ pub mod error { Mintability(#[cfg_attr(feature = "std", source)] MintabilityError), /// Illegal math operation Math(#[cfg_attr(feature = "std", source)] MathError), - /// Metadata error - Metadata(#[cfg_attr(feature = "std", source)] metadata::MetadataError), /// Invalid instruction parameter InvalidParameter(#[cfg_attr(feature = "std", source)] InvalidParameterError), /// Iroha invariant violation: {0} @@ -1513,7 +1494,7 @@ pub mod error { pub mod prelude { pub use super::{ AssetTransferBox, Burn, BurnBox, CustomInstruction, ExecuteTrigger, Grant, GrantBox, - InstructionBox, Log, Mint, MintBox, NewParameter, Register, RegisterBox, RemoveKeyValue, + InstructionBox, Log, Mint, MintBox, Register, RegisterBox, RemoveKeyValue, RemoveKeyValueBox, Revoke, RevokeBox, SetKeyValue, SetKeyValueBox, SetParameter, Transfer, TransferBox, Unregister, UnregisterBox, Upgrade, }; diff --git a/data_model/src/lib.rs b/data_model/src/lib.rs index 55cf4c4828e..5b7417b53fa 100644 --- a/data_model/src/lib.rs +++ b/data_model/src/lib.rs @@ -10,19 +10,16 @@ extern crate alloc; #[cfg(not(feature = "std"))] use alloc::{boxed::Box, format, string::String, vec::Vec}; -use core::{fmt, fmt::Debug, ops::RangeInclusive, str::FromStr}; -use derive_more::{Constructor, Display, From, FromStr}; -use getset::Getters; +use derive_more::{Constructor, Display}; use iroha_crypto::PublicKey; -use iroha_data_model_derive::{model, EnumRef, IdEqOrdHash}; +use iroha_data_model_derive::{model, EnumRef}; use iroha_macro::FromVariant; use iroha_schema::IntoSchema; use iroha_version::{declare_versioned, version_with_scale}; use parity_scale_codec::{Decode, Encode}; use prelude::Executable; use serde::{Deserialize, Serialize}; -use serde_with::{DeserializeFromStr, SerializeDisplay}; use strum::FromRepr; pub use self::model::*; @@ -38,6 +35,7 @@ pub mod ipfs; pub mod isi; pub mod metadata; pub mod name; +pub mod parameter; pub mod peer; pub mod permission; pub mod query; @@ -112,7 +110,6 @@ mod seal { Revoke, SetParameter, - NewParameter, Upgrade, ExecuteTrigger, Log, @@ -182,8 +179,8 @@ pub struct EnumTryAsError { } // Manual implementation because this allow annotation does not affect `Display` derive -impl fmt::Display for EnumTryAsError { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> fmt::Result { +impl core::fmt::Display for EnumTryAsError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!( f, "Expected: {}\nGot: {:?}", @@ -204,414 +201,16 @@ impl EnumTryAsError { } #[cfg(feature = "std")] -impl std::error::Error for EnumTryAsError {} - -pub mod parameter { - //! Structures, traits and impls related to `Paramater`s. - - use core::borrow::Borrow; - - use iroha_primitives::numeric::Numeric; - - pub use self::model::*; - use super::*; - use crate::isi::InstructionBox; - - /// Set of parameter names currently used by Iroha - #[allow(missing_docs)] - pub mod default { - pub const MAX_TRANSACTIONS_IN_BLOCK: &str = "MaxTransactionsInBlock"; - pub const BLOCK_TIME: &str = "BlockTime"; - pub const COMMIT_TIME_LIMIT: &str = "CommitTimeLimit"; - pub const TRANSACTION_LIMITS: &str = "TransactionLimits"; - pub const WSV_DOMAIN_METADATA_LIMITS: &str = "WSVDomainMetadataLimits"; - pub const WSV_ASSET_DEFINITION_METADATA_LIMITS: &str = "WSVAssetDefinitionMetadataLimits"; - pub const WSV_ACCOUNT_METADATA_LIMITS: &str = "WSVAccountMetadataLimits"; - pub const WSV_ASSET_METADATA_LIMITS: &str = "WSVAssetMetadataLimits"; - pub const WSV_TRIGGER_METADATA_LIMITS: &str = "WSVTriggerMetadataLimits"; - pub const WSV_IDENT_LENGTH_LIMITS: &str = "WSVIdentLengthLimits"; - pub const EXECUTOR_FUEL_LIMIT: &str = "ExecutorFuelLimit"; - pub const EXECUTOR_MAX_MEMORY: &str = "ExecutorMaxMemory"; - pub const WASM_FUEL_LIMIT: &str = "WASMFuelLimit"; - pub const WASM_MAX_MEMORY: &str = "WASMMaxMemory"; - } - - #[model] - mod model { - use super::*; - - #[derive( - Debug, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - FromVariant, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[ffi_type(local)] - pub enum ParameterValueBox { - TransactionLimits(transaction::TransactionLimits), - MetadataLimits(metadata::Limits), - LengthLimits(LengthLimits), - Numeric( - #[skip_from] - #[skip_try_from] - Numeric, - ), - } - - /// Identification of a [`Parameter`]. - #[derive( - Debug, - Display, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - Hash, - Getters, - FromStr, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[display(fmt = "{name}")] - #[getset(get = "pub")] - #[serde(transparent)] - #[repr(transparent)] - #[ffi_type(opaque)] - pub struct ParameterId { - /// [`Name`] unique to a [`Parameter`]. - pub name: Name, - } - - #[derive( - Debug, - Display, - Clone, - Constructor, - IdEqOrdHash, - Decode, - Encode, - DeserializeFromStr, - SerializeDisplay, - IntoSchema, - )] - #[display(fmt = "?{id}={val}")] - /// A chain-wide configuration parameter and its value. - #[ffi_type] - pub struct Parameter { - /// Unique [`Id`] of the [`Parameter`]. - pub id: ParameterId, - /// Current value of the [`Parameter`]. - pub val: ParameterValueBox, - } - } - - // TODO: Maybe derive - impl core::fmt::Display for ParameterValueBox { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - Self::MetadataLimits(v) => core::fmt::Display::fmt(&v, f), - Self::TransactionLimits(v) => core::fmt::Display::fmt(&v, f), - Self::LengthLimits(v) => core::fmt::Display::fmt(&v, f), - Self::Numeric(v) => core::fmt::Display::fmt(&v, f), - } - } - } - - impl> From for ParameterValueBox { - fn from(value: T) -> Self { - Self::Numeric(value.into()) - } - } - - impl TryFrom for u32 { - type Error = iroha_macro::error::ErrorTryFromEnum; - - fn try_from(value: ParameterValueBox) -> Result { - use iroha_macro::error::ErrorTryFromEnum; - - let ParameterValueBox::Numeric(numeric) = value else { - return Err(ErrorTryFromEnum::default()); - }; - - numeric.try_into().map_err(|_| ErrorTryFromEnum::default()) - } - } - - impl TryFrom for u64 { - type Error = iroha_macro::error::ErrorTryFromEnum; - - fn try_from(value: ParameterValueBox) -> Result { - use iroha_macro::error::ErrorTryFromEnum; - - let ParameterValueBox::Numeric(numeric) = value else { - return Err(ErrorTryFromEnum::default()); - }; - - numeric.try_into().map_err(|_| ErrorTryFromEnum::default()) - } - } - - impl Parameter { - /// Current value of the [`Parameter`]. - pub fn val(&self) -> &ParameterValueBox { - &self.val - } - } - - impl Borrow for ParameterId { - fn borrow(&self) -> &str { - self.name.borrow() - } - } - - impl Borrow for Parameter { - fn borrow(&self) -> &str { - self.id.borrow() - } - } - - impl FromStr for Parameter { - type Err = ParseError; - - fn from_str(string: &str) -> Result { - if let Some((parameter_id_candidate, val_candidate)) = string.rsplit_once('=') { - if let Some(parameter_id_candidate) = parameter_id_candidate.strip_prefix('?') { - let param_id: ParameterId = - parameter_id_candidate.parse().map_err(|_| ParseError { - reason: "Failed to parse the `param_id` part of the `Parameter`.", - })?; - if let Some((val, ty)) = val_candidate.rsplit_once('_') { - let val = match ty { - // Shorthand for `LengthLimits` - "LL" => { - let (lower, upper) = val.rsplit_once(',').ok_or( ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `LengthLimits`. Two comma-separated values are expected.", - })?; - let lower = lower.parse::().map_err(|_| ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `LengthLimits`. Invalid lower `u32` bound.", - })?; - let upper = upper.parse::().map_err(|_| ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `LengthLimits`. Invalid upper `u32` bound.", - })?; - LengthLimits::new(lower, upper).into() - } - // Shorthand for `TransactionLimits` - "TL" => { - let (max_instr, max_wasm_size) = val.rsplit_once(',').ok_or( ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `TransactionLimits`. Two comma-separated values are expected.", - })?; - let max_instr = max_instr.parse::().map_err(|_| ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `TransactionLimits`. `max_instruction_number` field should be a valid `u64`.", - })?; - let max_wasm_size = max_wasm_size.parse::().map_err(|_| ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `TransactionLimits`. `max_wasm_size_bytes` field should be a valid `u64`.", - })?; - transaction::TransactionLimits::new( - max_instr, - max_wasm_size, - ).into() - } - // Shorthand for `MetadataLimits` - "ML" => { - let (lower, upper) = val.rsplit_once(',').ok_or( ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `MetadataLimits`. Two comma-separated values are expected.", - })?; - let lower = lower.parse::().map_err(|_| ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `MetadataLimits`. Invalid `u32` in `capacity` field.", - })?; - let upper = upper.parse::().map_err(|_| ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `MetadataLimits`. Invalid `u32` in `max_entry_len` field.", - })?; - metadata::Limits::new(lower, upper).into() - } - _ => return Err(ParseError { - reason: - "Unsupported type provided for the `val` part of the `Parameter`.", - }), - }; - Ok(Self::new(param_id, val)) - } else { - let val = val_candidate.parse::().map_err(|_| ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `Numeric`.", - })?; - - Ok(Self::new(param_id, val.into())) - } - } else { - Err(ParseError { - reason: "`param_id` part of `Parameter` must start with `?`", - }) - } - } else { - Err(ParseError { - reason: "The `Parameter` string did not contain the `=` character.", - }) - } - } - } - - /// Convenience tool for setting parameters - #[derive(Default)] - #[must_use] - pub struct ParametersBuilder { - parameters: Vec, - } - - /// Error associated with parameters builder - #[derive(From, Debug, Display, Copy, Clone)] - pub enum ParametersBuilderError { - /// Error emerged during parsing of parameter id - Parse(ParseError), - } - - #[cfg(feature = "std")] - impl std::error::Error for ParametersBuilderError {} - - impl ParametersBuilder { - /// Construct [`Self`] - pub fn new() -> Self { - Self::default() - } - - /// Add [`Parameter`] to self - /// - /// # Errors - /// - [`ParameterId`] parsing failed - pub fn add_parameter( - mut self, - parameter_id: &str, - val: impl Into, - ) -> Result { - let parameter = Parameter { - id: parameter_id.parse()?, - val: val.into(), - }; - self.parameters.push(parameter); - Ok(self) - } - - /// Create sequence isi for setting parameters - pub fn into_set_parameters(self) -> Vec { - self.parameters - .into_iter() - .map(isi::SetParameter::new) - .map(Into::into) - .collect() - } - - /// Create sequence isi for creating parameters - pub fn into_create_parameters(self) -> Vec { - self.parameters - .into_iter() - .map(isi::NewParameter::new) - .map(Into::into) - .collect() - } - } - - pub mod prelude { - //! Prelude: re-export of most commonly used traits, structs and macros in this crate. - - pub use super::{Parameter, ParameterId}; - } - - #[cfg(test)] - mod tests { - use super::*; - use crate::{ - prelude::{numeric, MetadataLimits}, - transaction::TransactionLimits, - }; - - const INVALID_PARAM: [&str; 4] = [ - "", - "Block?SyncGossipPeriod=20000", - "?BlockSyncGossipPeriod20000", - "?BlockSyncGossipPeriod=20000_u32", - ]; - - #[test] - fn test_invalid_parameter_str() { - assert!(matches!( - parameter::Parameter::from_str(INVALID_PARAM[0]), - Err(err) if err.reason == "The `Parameter` string did not contain the `=` character." - )); - assert!(matches!( - parameter::Parameter::from_str(INVALID_PARAM[1]), - Err(err) if err.reason == "`param_id` part of `Parameter` must start with `?`" - )); - assert!(matches!( - parameter::Parameter::from_str(INVALID_PARAM[2]), - Err(err) if err.to_string() == "The `Parameter` string did not contain the `=` character." - )); - assert!(matches!( - parameter::Parameter::from_str(INVALID_PARAM[3]), - Err(err) if err.to_string() == "Unsupported type provided for the `val` part of the `Parameter`." - )); - } - - #[test] - fn test_parameter_serialize_deserialize_consistent() { - let parameters = [ - Parameter::new( - ParameterId::from_str("TransactionLimits") - .expect("Failed to parse `ParameterId`"), - TransactionLimits::new(42, 24).into(), - ), - Parameter::new( - ParameterId::from_str("MetadataLimits").expect("Failed to parse `ParameterId`"), - MetadataLimits::new(42, 24).into(), - ), - Parameter::new( - ParameterId::from_str("LengthLimits").expect("Failed to parse `ParameterId`"), - LengthLimits::new(24, 42).into(), - ), - Parameter::new( - ParameterId::from_str("Int").expect("Failed to parse `ParameterId`"), - numeric!(42).into(), - ), - ]; - - for parameter in parameters { - assert_eq!( - parameter, - serde_json::to_string(¶meter) - .and_then(|parameter| serde_json::from_str(¶meter)) - .unwrap_or_else(|_| panic!( - "Failed to de/serialize parameter {:?}", - ¶meter - )) - ); - } - } - } +impl std::error::Error + for EnumTryAsError +{ } #[model] #[allow(clippy::redundant_pub_crate)] mod model { + use getset::Getters; + use super::*; /// Unique id of blockchain @@ -682,8 +281,8 @@ mod model { RoleId(role::RoleId), /// [`Permission`](`permission::Permission`) variant. Permission(permission::Permission), - /// [`ParameterId`](`parameter::ParameterId`) variant. - ParameterId(parameter::ParameterId), + /// [`CustomParameter`](`parameter::CustomParameter`) variant. + CustomParameterId(parameter::CustomParameterId), } /// Sized container for all possible entities. @@ -728,35 +327,8 @@ mod model { Trigger(trigger::Trigger), /// [`Role`](`role::Role`) variant. Role(role::Role), - /// [`Parameter`](`parameter::Parameter`) variant. - Parameter(parameter::Parameter), - } - - /// Limits of length of the identifiers (e.g. in [`domain::Domain`], [`account::Account`], [`asset::AssetDefinition`]) in number of chars - #[derive( - Debug, - Display, - Clone, - Copy, - PartialEq, - Eq, - PartialOrd, - Ord, - Getters, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[display(fmt = "{min},{max}_LL")] - #[getset(get = "pub")] - #[ffi_type] - pub struct LengthLimits { - /// Minimal length in number of chars (inclusive). - pub(super) min: u32, - /// Maximal length in number of chars (inclusive). - pub(super) max: u32, + /// [`CustomParameter`](`parameter::CustomParameter`) variant. + CustomParameter(parameter::CustomParameter), } /// Operation validation failed. @@ -906,7 +478,6 @@ impl_encode_as_id_box! { trigger::TriggerId, permission::Permission, role::RoleId, - parameter::ParameterId, } impl_encode_as_identifiable_box! { @@ -921,7 +492,6 @@ impl_encode_as_identifiable_box! { asset::Asset, trigger::Trigger, role::Role, - parameter::Parameter, } impl Decode for ChainId { @@ -960,7 +530,7 @@ impl IdentifiableBox { IdentifiableBox::Asset(a) => a.id().clone().into(), IdentifiableBox::Trigger(a) => a.id().clone().into(), IdentifiableBox::Role(a) => a.id().clone().into(), - IdentifiableBox::Parameter(a) => a.id().clone().into(), + IdentifiableBox::CustomParameter(a) => a.id().clone().into(), } } } @@ -1011,20 +581,6 @@ pub trait Registered: Identifiable { type With; } -impl LengthLimits { - /// Constructor. - pub const fn new(min: u32, max: u32) -> Self { - Self { min, max } - } -} - -impl From for RangeInclusive { - #[inline] - fn from(limits: LengthLimits) -> Self { - RangeInclusive::new(limits.min, limits.max) - } -} - declare_versioned!( BatchedResponse serde::Deserialize<'de>> 1..2, Debug, Clone, iroha_macro::FromVariant, IntoSchema @@ -1090,6 +646,6 @@ pub mod prelude { executor::prelude::*, isi::prelude::*, metadata::prelude::*, name::prelude::*, parameter::prelude::*, peer::prelude::*, permission::prelude::*, query::prelude::*, role::prelude::*, transaction::prelude::*, trigger::prelude::*, ChainId, EnumTryAsError, - HasMetadata, IdBox, Identifiable, IdentifiableBox, LengthLimits, ValidationFail, + HasMetadata, IdBox, Identifiable, IdentifiableBox, ValidationFail, }; } diff --git a/data_model/src/metadata.rs b/data_model/src/metadata.rs index 8c021e25fcc..8a5efe050aa 100644 --- a/data_model/src/metadata.rs +++ b/data_model/src/metadata.rs @@ -1,22 +1,13 @@ //! Metadata: key-value pairs that can be attached to accounts, transactions and assets. #[cfg(not(feature = "std"))] -use alloc::{ - collections::BTreeMap, - format, - string::{String, ToString}, - vec::Vec, -}; +use alloc::{collections::BTreeMap, format, string::String, vec::Vec}; use core::borrow::Borrow; #[cfg(feature = "std")] -use std::{collections::BTreeMap, string::ToString, vec::Vec}; +use std::{collections::BTreeMap, vec::Vec}; -use derive_more::Display; use iroha_data_model_derive::model; use iroha_primitives::json::JsonString; -use iroha_schema::IntoSchema; -use parity_scale_codec::{Decode, Encode}; -use serde::{Deserialize, Serialize}; pub use self::model::*; use crate::prelude::Name; @@ -25,11 +16,13 @@ use crate::prelude::Name; pub type Path = [Name]; -/// Collection of parameters by their names. -pub type UnlimitedMetadata = BTreeMap; - #[model] mod model { + use derive_more::Display; + use iroha_schema::IntoSchema; + use parity_scale_codec::{Decode, Encode}; + use serde::{Deserialize, Serialize}; + use super::*; /// Collection of parameters by their names with checked insertion. @@ -54,114 +47,9 @@ mod model { #[display(fmt = "Metadata")] #[allow(clippy::multiple_inherent_impl)] pub struct Metadata(pub(super) BTreeMap); - - /// Limits for [`Metadata`]. - #[derive( - Debug, - Display, - Clone, - Copy, - PartialEq, - Eq, - PartialOrd, - Ord, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[ffi_type] - #[display(fmt = "{capacity},{max_entry_len}_ML")] - pub struct Limits { - /// Maximum number of entries - pub capacity: u32, - /// Maximum length of entry - pub max_entry_len: u32, - } - - /// Metadata related errors. - #[derive( - Debug, - displaydoc::Display, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[ffi_type(local)] - #[cfg_attr(feature = "std", derive(thiserror::Error))] - pub enum MetadataError { - /// Path specification empty - EmptyPath, - /// Metadata entry is too big - EntryTooBig(#[cfg_attr(feature = "std", source)] SizeError), - /// Metadata exceeds overall length limit - MaxCapacity(#[cfg_attr(feature = "std", source)] SizeError), - /// `{0}`: path segment not found, i.e. nothing was found at that key - MissingSegment(Name), - /// `{0}`: path segment not an instance of metadata - InvalidSegment(Name), - /// Metadata has an Invalid Json - InvalidJson(String), - } - - /// Size limits exhaustion error - #[derive( - Debug, - Display, - Copy, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[ffi_type] - #[cfg_attr(feature = "std", derive(thiserror::Error))] - #[display(fmt = "Limits are {limits}, while the actual value is {actual}")] - pub struct SizeError { - /// The limits that were set for this entry - pub limits: Limits, - /// The actual *entry* size in bytes - pub actual: u64, - } -} - -impl Limits { - /// Constructor. - pub const fn new(capacity: u32, max_entry_len: u32) -> Limits { - Limits { - capacity, - max_entry_len, - } - } -} - -impl From for MetadataError { - fn from(err: serde_json::Error) -> Self { - MetadataError::InvalidJson(err.to_string()) - } } impl Metadata { - /// Constructor. - #[inline] - pub fn new() -> Self { - Self(UnlimitedMetadata::new()) - } - /// Check if the internal map contains the given key. pub fn contains(&self, key: &Name) -> bool { self.0.contains_key(key) @@ -181,37 +69,10 @@ impl Metadata { self.0.get(key) } - fn len_u64(&self) -> u64 { - self.0 - .len() - .try_into() - .expect("`usize` should always fit into `u64`") - } - /// Insert [`Value`] under the given key. Returns `Some(value)` /// if the value was already present, `None` otherwise. - /// - /// # Errors - /// Fails if `max_entry_len` or `capacity` from `limits` are exceeded. - pub fn insert_with_limits( - &mut self, - key: Name, - value: impl TryInto, - limits: Limits, - ) -> Result, MetadataError> { - let value = match value.try_into() { - Ok(value) => value, - _ => return Err(MetadataError::InvalidJson("Invalid Json value".to_string())), - }; - - if self.0.len() >= limits.capacity as usize && !self.0.contains_key(&key) { - return Err(MetadataError::MaxCapacity(SizeError { - limits, - actual: self.len_u64(), - })); - } - check_size_limits(&key, &value, limits)?; - Ok(self.0.insert(key, value)) + pub fn insert(&mut self, key: Name, value: impl Into) -> Option { + self.0.insert(key, value.into()) } } @@ -229,73 +90,7 @@ impl Metadata { } } -fn check_size_limits(key: &Name, value: &JsonString, limits: Limits) -> Result<(), MetadataError> { - let entry_bytes: Vec = (key, value).encode(); - let byte_size = entry_bytes.len(); - if byte_size > limits.max_entry_len as usize { - return Err(MetadataError::EntryTooBig(SizeError { - limits, - actual: byte_size - .try_into() - .expect("`usize` should always fit into `u64`"), - })); - } - Ok(()) -} - pub mod prelude { //! Prelude: re-export most commonly used traits, structs and macros from this module. - pub use super::{Limits as MetadataLimits, Metadata, UnlimitedMetadata}; -} - -#[cfg(test)] -mod tests { - #[cfg(not(feature = "std"))] - use alloc::{borrow::ToOwned as _, vec}; - use core::str::FromStr as _; - - use iroha_macro::FromVariant; - - use super::*; - use crate::ParseError; - - /// Error used in testing to make text more readable using the `?` operator. - #[derive(Debug, Display, Clone, FromVariant)] - pub enum TestError { - Parse(ParseError), - Metadata(MetadataError), - } - - #[test] - fn insert_exceeds_entry_size() -> Result<(), TestError> { - let mut metadata = Metadata::new(); - let limits = Limits::new(10, 5); - assert!(metadata - .insert_with_limits(Name::from_str("1")?, JsonString::new("2"), limits) - .is_ok()); - assert!(metadata - .insert_with_limits(Name::from_str("1")?, JsonString::new("23456"), limits) - .is_err()); - Ok(()) - } - - #[test] - // This test is a good candidate for both property-based and parameterised testing - fn insert_exceeds_len() -> Result<(), TestError> { - let mut metadata = Metadata::new(); - let limits = Limits::new(2, 5); - assert!(metadata - .insert_with_limits(Name::from_str("1")?, 0_u32, limits) - .is_ok()); - assert!(metadata - .insert_with_limits(Name::from_str("2")?, 0_u32, limits) - .is_ok()); - assert!(metadata - .insert_with_limits(Name::from_str("2")?, 1_u32, limits) - .is_ok()); - assert!(metadata - .insert_with_limits(Name::from_str("3")?, 0_u32, limits) - .is_err()); - Ok(()) - } + pub use super::Metadata; } diff --git a/data_model/src/name.rs b/data_model/src/name.rs index 6094cb7acf4..bf87b77a275 100644 --- a/data_model/src/name.rs +++ b/data_model/src/name.rs @@ -2,20 +2,21 @@ //! and related implementations and trait implementations. #[cfg(not(feature = "std"))] use alloc::{format, string::String, vec::Vec}; -use core::{borrow::Borrow, ops::RangeInclusive, str::FromStr}; +use core::{borrow::Borrow, str::FromStr}; -use derive_more::{DebugCustom, Display}; use iroha_data_model_derive::model; use iroha_primitives::conststr::ConstString; -use iroha_schema::IntoSchema; use parity_scale_codec::{Decode, Encode, Input}; use serde::{Deserialize, Serialize}; pub use self::model::*; -use crate::{isi::error::InvalidParameterError, ParseError}; +use crate::ParseError; #[model] mod model { + use derive_more::{DebugCustom, Display}; + use iroha_schema::IntoSchema; + use super::*; /// `Name` struct represents the type of Iroha Entities names, such as @@ -41,27 +42,6 @@ mod model { } impl Name { - /// Check if `range` contains the number of chars in the inner `ConstString` of this [`Name`]. - /// - /// # Errors - /// Fails if `range` does not - pub fn validate_len( - &self, - range: impl Into>, - ) -> Result<(), InvalidParameterError> { - let range = range.into(); - let Ok(true) = &self - .0 - .chars() - .count() - .try_into() - .map(|len| range.contains(&len)) - else { - return Err(InvalidParameterError::NameLength); - }; - Ok(()) - } - /// Check if `candidate` string would be valid [`Name`]. /// /// # Errors diff --git a/data_model/src/parameter.rs b/data_model/src/parameter.rs new file mode 100644 index 00000000000..4ad59afa219 --- /dev/null +++ b/data_model/src/parameter.rs @@ -0,0 +1,680 @@ +//! Structures, traits and impls related to `Paramater`s. +#[cfg(not(feature = "std"))] +use alloc::{collections::btree_map, format, string::String, vec::Vec}; +use core::{num::NonZeroU64, time::Duration}; +#[cfg(feature = "std")] +use std::collections::btree_map; + +use iroha_data_model_derive::model; +use iroha_primitives::json::JsonString; +use nonzero_ext::nonzero; + +pub use self::model::*; +use crate::name::Name; + +/// Collection of [`CustomParameter`]s +pub(crate) type CustomParameters = btree_map::BTreeMap; + +#[model] +mod model { + use derive_more::{Constructor, Display, FromStr}; + use getset::{CopyGetters, Getters}; + use iroha_data_model_derive::IdEqOrdHash; + use iroha_schema::IntoSchema; + use parity_scale_codec::{Decode, Encode}; + use serde::{Deserialize, Serialize}; + use strum::EnumDiscriminants; + + use super::*; + + /// Id of a custom parameter + #[derive( + Debug, + Display, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + FromStr, + Constructor, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + #[ffi_type] + pub struct CustomParameterId(pub Name); + + /// Limits that govern consensus operation + #[derive( + Debug, + Display, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + #[display(fmt = "{block_time_ms},{commit_time_ms}_SL")] + pub struct SumeragiParameters { + /// Maximal amount of time (in milliseconds) a peer will wait before forcing creation of a new block. + /// + /// A block is created if this limit or [`BlockParameters::max_transactions`] limit is reached, + /// whichever comes first. Regardless of the limits, an empty block is never created. + pub block_time_ms: u64, + /// Time (in milliseconds) a peer will wait for a block to be committed. + /// + /// If this period expires the block will request a view change + pub commit_time_ms: u64, + } + + /// Single Sumeragi parameter + /// + /// Check [`SumeragiParameters`] for more details + #[derive( + Debug, + Display, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Decode, + Encode, + Serialize, + Deserialize, + IntoSchema, + )] + pub enum SumeragiParameter { + BlockTimeMs(u64), + CommitTimeMs(u64), + } + + /// Limits that a block must obey to be accepted. + #[derive( + Debug, + Display, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + CopyGetters, + Encode, + Serialize, + IntoSchema, + )] + #[display(fmt = "{max_transactions}_BL")] + #[getset(get_copy = "pub")] + pub struct BlockParameters { + /// Maximal number of transactions in a block. + /// + /// A block is created if this limit is reached or [`SumeragiParameters::block_time_ms`] has expired, + /// whichever comes first. Regardless of the limits, an empty block is never created. + pub max_transactions: NonZeroU64, + } + + /// Single block parameter + /// + /// Check [`BlockParameters`] for more details + #[derive( + Debug, Display, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Encode, Serialize, IntoSchema, + )] + pub enum BlockParameter { + MaxTransactions(NonZeroU64), + } + + /// Limits that a transaction must obey to be accepted. + #[derive( + Debug, + Display, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + CopyGetters, + Encode, + Serialize, + IntoSchema, + )] + #[display(fmt = "{max_instructions},{smart_contract_size}_TL")] + #[getset(get_copy = "pub")] + pub struct TransactionParameters { + /// Maximum number of instructions per transaction + pub max_instructions: NonZeroU64, + /// Maximum size of wasm binary in bytes + pub smart_contract_size: NonZeroU64, + } + + /// Single transaction parameter + /// + /// Check [`TransactionParameters`] for more details + #[derive( + Debug, Display, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Encode, Serialize, IntoSchema, + )] + pub enum TransactionParameter { + MaxInstructions(NonZeroU64), + SmartContractSize(NonZeroU64), + } + + /// Limits that a smart contract must obey at runtime to considered valid. + #[derive( + Debug, + Display, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + CopyGetters, + Encode, + Serialize, + IntoSchema, + )] + #[display(fmt = "{fuel},{memory}_SCL")] + #[getset(get_copy = "pub")] + pub struct SmartContractParameters { + /// Maximum amount of fuel that a smart contract can consume + pub fuel: NonZeroU64, + /// Maximum amount of memory that a smart contract can use + pub memory: NonZeroU64, + } + + /// Single smart contract parameter + /// + /// Check [`SmartContractParameters`] for more details + #[derive( + Debug, Display, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Encode, Serialize, IntoSchema, + )] + pub enum SmartContractParameter { + Fuel(NonZeroU64), + Memory(NonZeroU64), + } + + /// Blockchain specific parameter defined in the executor + #[derive( + Debug, Display, Clone, IdEqOrdHash, Decode, Encode, Deserialize, Serialize, IntoSchema, + )] + #[ffi_type] + #[display(fmt = "{id}({payload})")] + pub struct CustomParameter { + /// Unique id of the parameter. + pub id: CustomParameterId, + /// Payload containing actual value. + /// + /// It is JSON-encoded, and its structure must correspond to the structure of + /// the type defined in [`crate::executor::ExecutorDataModel`]. + pub payload: JsonString, + } + + /// Set of all current blockchain parameter values + #[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Default, + Getters, + CopyGetters, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + pub struct Parameters { + /// Sumeragi parameters + #[getset(get_copy = "pub")] + pub sumeragi: SumeragiParameters, + /// Block parameters + #[getset(get_copy = "pub")] + pub block: BlockParameters, + /// Transaction parameters + #[getset(get_copy = "pub")] + pub transaction: TransactionParameters, + /// Executor parameters + #[getset(get_copy = "pub")] + pub executor: SmartContractParameters, + /// Smart contract parameters + #[getset(get_copy = "pub")] + pub smart_contract: SmartContractParameters, + /// Collection of blockchain specific parameters + #[getset(get = "pub")] + pub custom: CustomParameters, + } + + /// Single blockchain parameter. + /// + /// Check [`Parameters`] for more details + #[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + EnumDiscriminants, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + #[ffi_type(opaque)] + pub enum Parameter { + Sumeragi(SumeragiParameter), + Block(BlockParameter), + Transaction(TransactionParameter), + SmartContract(SmartContractParameter), + Executor(SmartContractParameter), + Custom(CustomParameter), + } +} + +impl core::fmt::Display for Parameter { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Sumeragi(v) => core::fmt::Display::fmt(&v, f), + Self::Block(v) => core::fmt::Display::fmt(&v, f), + Self::Transaction(v) => core::fmt::Display::fmt(&v, f), + Self::SmartContract(v) | Self::Executor(v) => core::fmt::Display::fmt(&v, f), + Self::Custom(v) => write!(f, "{}({})", v.id, v.payload), + } + } +} + +impl SumeragiParameters { + /// Maximal amount of time (in milliseconds) a peer will wait before forcing creation of a new block. + /// + /// A block is created if this limit or [`BlockParameters::max_transactions`] limit is reached, + /// whichever comes first. Regardless of the limits, an empty block is never created. + pub fn block_time(&self) -> Duration { + Duration::from_millis(self.block_time_ms) + } + + /// Time (in milliseconds) a peer will wait for a block to be committed. + /// + /// If this period expires the block will request a view change + pub fn commit_time(&self) -> Duration { + Duration::from_millis(self.commit_time_ms) + } + + /// Maximal amount of time it takes to commit a block + #[cfg(feature = "transparent_api")] + pub fn pipeline_time(&self) -> Duration { + self.block_time() + self.commit_time() + } + + /// Estimation of consensus duration + #[cfg(feature = "transparent_api")] + pub fn consensus_estimation(&self) -> Duration { + self.block_time() + (self.commit_time() / 2) + } +} + +impl Default for SumeragiParameters { + fn default() -> Self { + pub const DEFAULT_BLOCK_TIME: u64 = 2_000; + pub const DEFAULT_COMMIT_TIME: u64 = 4_000; + + Self { + block_time_ms: DEFAULT_BLOCK_TIME, + commit_time_ms: DEFAULT_COMMIT_TIME, + } + } +} +impl Default for BlockParameters { + fn default() -> Self { + /// Default value for [`Parameters::MaxTransactionsInBlock`] + pub const DEFAULT_TRANSACTIONS_IN_BLOCK: NonZeroU64 = nonzero!(2_u64.pow(9)); + + Self::new(DEFAULT_TRANSACTIONS_IN_BLOCK) + } +} + +impl Default for TransactionParameters { + fn default() -> Self { + const DEFAULT_INSTRUCTION_NUMBER: NonZeroU64 = nonzero!(2_u64.pow(12)); + const DEFAULT_SMART_CONTRACT_SIZE: NonZeroU64 = nonzero!(4 * 2_u64.pow(20)); + + Self::new(DEFAULT_INSTRUCTION_NUMBER, DEFAULT_SMART_CONTRACT_SIZE) + } +} + +impl Default for SmartContractParameters { + fn default() -> Self { + const DEFAULT_FUEL: NonZeroU64 = nonzero!(55_000_000_u64); + const DEFAULT_MEMORY: NonZeroU64 = nonzero!(55_000_000_u64); + + Self { + fuel: DEFAULT_FUEL, + memory: DEFAULT_MEMORY, + } + } +} + +impl SumeragiParameters { + /// Construct [`Self`] + pub fn new(block_time: Duration, commit_time: Duration) -> Self { + Self { + block_time_ms: block_time + .as_millis() + .try_into() + .expect("INTERNAL BUG: Time should fit into u64"), + commit_time_ms: commit_time + .as_millis() + .try_into() + .expect("INTERNAL BUG: Time should fit into u64"), + } + } +} + +impl BlockParameters { + /// Construct [`Self`] + pub const fn new(max_transactions: NonZeroU64) -> Self { + Self { max_transactions } + } +} + +impl TransactionParameters { + /// Construct [`Self`] + pub const fn new(max_instructions: NonZeroU64, smart_contract_size: NonZeroU64) -> Self { + Self { + max_instructions, + smart_contract_size, + } + } +} + +impl CustomParameterId { + /// Getter for name + pub fn name(&self) -> &Name { + &self.0 + } +} + +impl CustomParameter { + /// Constructor + pub fn new(id: CustomParameterId, payload: impl Into) -> Self { + Self { + id, + payload: payload.into(), + } + } + + /// Getter + // TODO: derive with getset once FFI impl is fixed + pub fn payload(&self) -> &JsonString { + &self.payload + } +} + +mod candidate { + use core::num::NonZeroUsize; + + use parity_scale_codec::{Decode, Input}; + use serde::Deserialize; + + use super::*; + + #[derive(Decode, Deserialize)] + enum TransactionParameterCandidate { + MaxInstructions(NonZeroU64), + SmartContractSize(NonZeroU64), + } + + #[derive(Decode, Deserialize)] + struct TransactionParametersCandidate { + max_instructions: NonZeroU64, + smart_contract_size: NonZeroU64, + } + + #[derive(Decode, Deserialize)] + enum BlockParameterCandidate { + MaxTransactions(NonZeroU64), + } + + #[derive(Decode, Deserialize)] + struct BlockParametersCandidate { + max_transactions: NonZeroU64, + } + + #[derive(Decode, Deserialize)] + enum SmartContractParameterCandidate { + Fuel(NonZeroU64), + Memory(NonZeroU64), + } + + #[derive(Decode, Deserialize)] + struct SmartContractParametersCandidate { + fuel: NonZeroU64, + memory: NonZeroU64, + } + + impl BlockParameterCandidate { + fn validate(self) -> Result { + Ok(match self { + Self::MaxTransactions(max_transactions) => { + let _ = NonZeroUsize::try_from(max_transactions) + .map_err(|_| "BlockParameter::MaxTransactions exceeds usize::MAX")?; + + BlockParameter::MaxTransactions(max_transactions) + } + }) + } + } + + impl BlockParametersCandidate { + fn validate(self) -> Result { + let _ = NonZeroUsize::try_from(self.max_transactions) + .map_err(|_| "BlockParameters::max_transactions exceeds usize::MAX")?; + + Ok(BlockParameters { + max_transactions: self.max_transactions, + }) + } + } + + impl TransactionParameterCandidate { + fn validate(self) -> Result { + Ok(match self { + Self::MaxInstructions(max_instructions) => { + let _ = NonZeroUsize::try_from(max_instructions) + .map_err(|_| "TransactionParameter::MaxInstructions exceeds usize::MAX")?; + TransactionParameter::MaxInstructions(max_instructions) + } + Self::SmartContractSize(smart_contract_size) => { + let _ = NonZeroUsize::try_from(smart_contract_size).map_err(|_| { + "TransactionParameter::SmartContractSize exceeds usize::MAX" + })?; + TransactionParameter::SmartContractSize(smart_contract_size) + } + }) + } + } + + impl TransactionParametersCandidate { + fn validate(self) -> Result { + let _ = NonZeroUsize::try_from(self.max_instructions) + .map_err(|_| "TransactionParameters::max_instructions exceeds usize::MAX")?; + + let _ = NonZeroUsize::try_from(self.smart_contract_size) + .map_err(|_| "TransactionParameters::smart_contract_size exceeds usize::MAX")?; + + Ok(TransactionParameters { + max_instructions: self.max_instructions, + smart_contract_size: self.smart_contract_size, + }) + } + } + + impl SmartContractParameterCandidate { + fn validate(self) -> Result { + Ok(match self { + Self::Fuel(fuel) => SmartContractParameter::Fuel(fuel), + Self::Memory(memory) => { + NonZeroUsize::try_from(memory) + .map_err(|_| "SmartContractParameter::Memory exceeds usize::MAX")?; + SmartContractParameter::Memory(memory) + } + }) + } + } + + impl SmartContractParametersCandidate { + fn validate(self) -> Result { + let _ = NonZeroUsize::try_from(self.memory) + .map_err(|_| "SmartContractParameters::memory exceeds usize::MAX")?; + + Ok(SmartContractParameters { + fuel: self.fuel, + memory: self.memory, + }) + } + } + + impl Decode for BlockParameter { + fn decode(input: &mut I) -> Result { + BlockParameterCandidate::decode(input)? + .validate() + .map_err(Into::into) + } + } + + impl<'de> Deserialize<'de> for BlockParameter { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::Error as _; + + BlockParameterCandidate::deserialize(deserializer)? + .validate() + .map_err(D::Error::custom) + } + } + + impl Decode for BlockParameters { + fn decode(input: &mut I) -> Result { + BlockParametersCandidate::decode(input)? + .validate() + .map_err(Into::into) + } + } + + impl<'de> Deserialize<'de> for BlockParameters { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::Error as _; + + BlockParametersCandidate::deserialize(deserializer)? + .validate() + .map_err(D::Error::custom) + } + } + + impl Decode for TransactionParameter { + fn decode(input: &mut I) -> Result { + TransactionParameterCandidate::decode(input)? + .validate() + .map_err(Into::into) + } + } + + impl<'de> Deserialize<'de> for TransactionParameter { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::Error as _; + + TransactionParameterCandidate::deserialize(deserializer)? + .validate() + .map_err(D::Error::custom) + } + } + + impl Decode for TransactionParameters { + fn decode(input: &mut I) -> Result { + TransactionParametersCandidate::decode(input)? + .validate() + .map_err(Into::into) + } + } + + impl<'de> Deserialize<'de> for TransactionParameters { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::Error as _; + + TransactionParametersCandidate::deserialize(deserializer)? + .validate() + .map_err(D::Error::custom) + } + } + + impl Decode for SmartContractParameter { + fn decode(input: &mut I) -> Result { + SmartContractParameterCandidate::decode(input)? + .validate() + .map_err(Into::into) + } + } + impl<'de> Deserialize<'de> for SmartContractParameter { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::Error as _; + + SmartContractParameterCandidate::deserialize(deserializer)? + .validate() + .map_err(D::Error::custom) + } + } + + impl Decode for SmartContractParameters { + fn decode(input: &mut I) -> Result { + SmartContractParametersCandidate::decode(input)? + .validate() + .map_err(Into::into) + } + } + impl<'de> Deserialize<'de> for SmartContractParameters { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::Error as _; + + SmartContractParametersCandidate::deserialize(deserializer)? + .validate() + .map_err(D::Error::custom) + } + } +} +pub mod prelude { + //! Prelude: re-export of most commonly used traits, structs and macros in this crate. + + pub use super::{Parameter, Parameters, SmartContractParameters, TransactionParameters}; +} diff --git a/data_model/src/peer.rs b/data_model/src/peer.rs index ffacb39dd98..72ab6b9a8e8 100644 --- a/data_model/src/peer.rs +++ b/data_model/src/peer.rs @@ -9,18 +9,19 @@ use core::{ }; use derive_more::Display; -use iroha_data_model_derive::{model, IdEqOrdHash}; +use iroha_data_model_derive::model; use iroha_primitives::addr::SocketAddr; -use iroha_schema::IntoSchema; -use parity_scale_codec::{Decode, Encode}; -use serde::{Deserialize, Serialize}; pub use self::model::*; -use crate::{Identifiable, PublicKey, Registered}; +use crate::{PublicKey, Registered}; #[model] mod model { use getset::Getters; + use iroha_data_model_derive::IdEqOrdHash; + use iroha_schema::IntoSchema; + use parity_scale_codec::{Decode, Encode}; + use serde::{Deserialize, Serialize}; use super::*; diff --git a/data_model/src/permission.rs b/data_model/src/permission.rs index 6a7cfef7677..04bab19588e 100644 --- a/data_model/src/permission.rs +++ b/data_model/src/permission.rs @@ -10,13 +10,12 @@ use iroha_schema::{Ident, IntoSchema}; pub use self::model::*; -/// Collection of [`Token`]s +/// Collection of [`Permission`]s pub type Permissions = BTreeSet; -use super::*; - #[model] mod model { + use derive_more::Display; use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; @@ -38,7 +37,7 @@ mod model { Display, )] #[ffi_type] - #[display(fmt = "PERMISSION `{name}` = `{payload}`")] + #[display(fmt = "{name}({payload})")] pub struct Permission { /// Refers to a type defined in [`crate::executor::ExecutorDataModel`]. pub name: Ident, diff --git a/data_model/src/query/mod.rs b/data_model/src/query/mod.rs index 639a1d8d160..4520069e0b3 100644 --- a/data_model/src/query/mod.rs +++ b/data_model/src/query/mod.rs @@ -212,7 +212,8 @@ mod model { Identifiable(IdentifiableBox), Transaction(TransactionQueryOutput), Permission(crate::permission::Permission), - LimitedMetadata(JsonString), + Parameters(crate::parameter::Parameters), + Metadata(JsonString), Numeric(Numeric), BlockHeader(BlockHeader), Block(crate::block::SignedBlock), @@ -221,7 +222,7 @@ mod model { Vec( #[skip_from] #[skip_try_from] - Vec, + Vec, ), } @@ -402,7 +403,7 @@ impl_queries! { FindDomainById => crate::domain::Domain, FindDomainKeyValueByIdAndKey => JsonString, FindAllPeers => Vec, - FindAllParameters => Vec, + FindAllParameters => crate::parameter::Parameters, FindAllActiveTriggerIds => Vec, FindTriggerById => crate::trigger::Trigger, FindTriggerKeyValueByIdAndKey => JsonString, @@ -429,17 +430,18 @@ impl core::fmt::Display for QueryOutputBox { // TODO: Maybe derive fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { match self { - QueryOutputBox::Id(v) => core::fmt::Display::fmt(&v, f), - QueryOutputBox::Identifiable(v) => core::fmt::Display::fmt(&v, f), - QueryOutputBox::Transaction(_) => write!(f, "TransactionQueryOutput"), - QueryOutputBox::Permission(v) => core::fmt::Display::fmt(&v, f), - QueryOutputBox::Block(v) => core::fmt::Display::fmt(&v, f), - QueryOutputBox::BlockHeader(v) => core::fmt::Display::fmt(&v, f), - QueryOutputBox::Numeric(v) => core::fmt::Display::fmt(&v, f), - QueryOutputBox::LimitedMetadata(v) => core::fmt::Display::fmt(&v, f), - QueryOutputBox::ExecutorDataModel(v) => core::fmt::Display::fmt(&v, f), - - QueryOutputBox::Vec(v) => { + Self::Id(v) => core::fmt::Display::fmt(&v, f), + Self::Identifiable(v) => core::fmt::Display::fmt(&v, f), + Self::Transaction(_) => write!(f, "TransactionQueryOutput"), + Self::Permission(v) => core::fmt::Display::fmt(&v, f), + Self::Parameters(v) => core::fmt::Debug::fmt(&v, f), + Self::Block(v) => core::fmt::Display::fmt(&v, f), + Self::BlockHeader(v) => core::fmt::Display::fmt(&v, f), + Self::Numeric(v) => core::fmt::Display::fmt(&v, f), + Self::Metadata(v) => core::fmt::Display::fmt(&v, f), + Self::ExecutorDataModel(v) => core::fmt::Display::fmt(&v, f), + + Self::Vec(v) => { // TODO: Remove so we can derive. let list_of_display: Vec<_> = v.iter().map(ToString::to_string).collect(); // this prints with quotation marks, which is fine 90% @@ -468,7 +470,7 @@ macro_rules! from_and_try_from_value_idbox { impl From<$ty> for QueryOutputBox { fn from(id: $ty) -> Self { - QueryOutputBox::Id(IdBox::$variant(id)) + Self::Id(IdBox::$variant(id)) } })+ }; @@ -490,7 +492,7 @@ macro_rules! from_and_try_from_value_identifiable { impl From<$ty> for QueryOutputBox { fn from(id: $ty) -> Self { - QueryOutputBox::Identifiable(IdentifiableBox::$variant(id)) + Self::Identifiable(IdentifiableBox::$variant(id)) } } )+ }; @@ -504,7 +506,6 @@ from_and_try_from_value_idbox!( AssetDefinitionId(crate::asset::AssetDefinitionId), TriggerId(crate::trigger::TriggerId), RoleId(crate::role::RoleId), - ParameterId(crate::parameter::ParameterId), // TODO: Should we wrap String with new type in order to convert like here? //from_and_try_from_value_idbox!((DomainName(Name), ErrorValueTryFromDomainName),); ); @@ -521,12 +522,11 @@ from_and_try_from_value_identifiable!( Asset(crate::asset::Asset), Trigger(crate::trigger::Trigger), Role(crate::role::Role), - Parameter(crate::parameter::Parameter), ); impl> From> for QueryOutputBox { - fn from(values: Vec) -> QueryOutputBox { - QueryOutputBox::Vec(values.into_iter().map(Into::into).collect()) + fn from(values: Vec) -> Self { + Self::Vec(values.into_iter().map(Into::into).collect()) } } @@ -854,7 +854,7 @@ pub mod asset { } /// [`FindAssetQuantityById`] Iroha Query gets [`AssetId`] as input and finds [`Asset::quantity`] - /// parameter's value if [`Asset`] is presented in Iroha Peer. + /// value if [`Asset`] is presented in Iroha Peer. #[derive(Display)] #[display(fmt = "Find quantity of the `{id}` asset")] #[repr(transparent)] @@ -1536,10 +1536,8 @@ pub mod error { Trigger(TriggerId), /// Role with id `{0}` not found Role(RoleId), - /// Failed to find [`Permission`] + /// Failed to find [`Permission`] by id. Permission(Permission), - /// Parameter with id `{0}` not found - Parameter(ParameterId), /// Failed to find public key: `{0}` PublicKey(PublicKey), } diff --git a/data_model/src/query/predicate.rs b/data_model/src/query/predicate.rs index 47e16e6bd5e..3846840af0b 100644 --- a/data_model/src/query/predicate.rs +++ b/data_model/src/query/predicate.rs @@ -603,7 +603,7 @@ pub mod string { IdBox::TriggerId(id) => self.applies(&id.to_string()), IdBox::RoleId(id) => self.applies(&id.to_string()), IdBox::Permission(id) => self.applies(&id.to_string()), - IdBox::ParameterId(id) => self.applies(&id.to_string()), + IdBox::CustomParameterId(id) => self.applies(&id.to_string()), } } } diff --git a/data_model/src/role.rs b/data_model/src/role.rs index 45c6b53732a..834ef75e57b 100644 --- a/data_model/src/role.rs +++ b/data_model/src/role.rs @@ -3,21 +3,23 @@ #[cfg(not(feature = "std"))] use alloc::{format, string::String, vec::Vec}; -use derive_more::{Constructor, Display, FromStr}; -use getset::Getters; -use iroha_data_model_derive::{model, IdEqOrdHash}; -use iroha_schema::IntoSchema; -use parity_scale_codec::{Decode, Encode}; -use serde::{Deserialize, Serialize}; +use iroha_data_model_derive::model; pub use self::model::*; use crate::{ permission::{Permission, Permissions}, - Identifiable, Name, Registered, + Name, Registered, }; #[model] mod model { + use derive_more::{Constructor, Display, FromStr}; + use getset::Getters; + use iroha_data_model_derive::IdEqOrdHash; + use iroha_schema::IntoSchema; + use parity_scale_codec::{Decode, Encode}; + use serde::{Deserialize, Serialize}; + use super::*; /// Identification of a role. diff --git a/data_model/src/transaction.rs b/data_model/src/transaction.rs index e5263a3452d..6da46f26437 100644 --- a/data_model/src/transaction.rs +++ b/data_model/src/transaction.rs @@ -23,13 +23,13 @@ pub use self::model::*; use crate::{ account::AccountId, isi::{Instruction, InstructionBox}, - metadata::UnlimitedMetadata, + metadata::Metadata, ChainId, }; #[model] mod model { - use getset::{CopyGetters, Getters}; + use getset::Getters; use super::*; use crate::account::AccountId; @@ -114,34 +114,7 @@ mod model { /// Random value to make different hashes for transactions which occur repeatedly and simultaneously. pub nonce: Option, /// Store for additional information. - pub metadata: UnlimitedMetadata, - } - - /// Container for limits that transactions must obey. - #[derive( - Debug, - Display, - Clone, - Copy, - PartialEq, - Eq, - PartialOrd, - Ord, - CopyGetters, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[display(fmt = "{max_instruction_number},{max_wasm_size_bytes}_TL")] - #[getset(get_copy = "pub")] - #[ffi_type] - pub struct TransactionLimits { - /// Maximum number of instructions per transaction - pub max_instruction_number: u64, - /// Maximum size of wasm binary - pub max_wasm_size_bytes: u64, + pub metadata: Metadata, } /// Signature of transaction @@ -206,16 +179,6 @@ mod model { } } -impl TransactionLimits { - /// Construct [`Self`] - pub const fn new(max_instruction_number: u64, max_wasm_size_bytes: u64) -> Self { - Self { - max_instruction_number, - max_wasm_size_bytes, - } - } -} - impl FromIterator for Executable { fn from_iter>(iter: T) -> Self { Self::Instructions(iter.into_iter().map(Into::into).collect()) @@ -282,7 +245,7 @@ impl SignedTransaction { /// Return transaction metadata. #[inline] - pub fn metadata(&self) -> &UnlimitedMetadata { + pub fn metadata(&self) -> &Metadata { let SignedTransaction::V1(tx) = self; &tx.payload.metadata } @@ -596,7 +559,6 @@ pub mod error { Revoke(_) => "revoke", ExecuteTrigger(_) => "execute trigger", SetParameter(_) => "set parameter", - NewParameter(_) => "new parameter", Upgrade(_) => "upgrade", Log(_) => "log", Custom(_) => "custom", @@ -655,7 +617,7 @@ mod http { nonce: None, time_to_live_ms: None, instructions: Vec::::new().into(), - metadata: UnlimitedMetadata::new(), + metadata: Metadata::default(), }, } } @@ -722,7 +684,7 @@ mod http { } /// Adds metadata to the `Transaction` - pub fn with_metadata(mut self, metadata: UnlimitedMetadata) -> Self { + pub fn with_metadata(mut self, metadata: Metadata) -> Self { self.payload.metadata = metadata; self } diff --git a/data_model/src/trigger.rs b/data_model/src/trigger.rs index 1170a87c4e2..8be9660bc05 100644 --- a/data_model/src/trigger.rs +++ b/data_model/src/trigger.rs @@ -17,9 +17,7 @@ use serde::{Deserialize, Serialize}; use serde_with::{DeserializeFromStr, SerializeDisplay}; pub use self::model::*; -use crate::{ - events::prelude::*, metadata::Metadata, transaction::Executable, Identifiable, Name, Registered, -}; +use crate::{events::prelude::*, metadata::Metadata, transaction::Executable, Name, Registered}; #[model] mod model { @@ -189,7 +187,7 @@ pub mod action { // TODO: At this point the authority is meaningless. authority, filter: filter.into(), - metadata: Metadata::new(), + metadata: Metadata::default(), } } diff --git a/data_model/src/visit.rs b/data_model/src/visit.rs index 0ca83ed6c38..cc0e92764f5 100644 --- a/data_model/src/visit.rs +++ b/data_model/src/visit.rs @@ -39,7 +39,6 @@ pub trait Visit { visit_upgrade(&Upgrade), visit_execute_trigger(&ExecuteTrigger), - visit_new_parameter(&NewParameter), visit_set_parameter(&SetParameter), visit_log(&Log), visit_custom(&CustomInstruction), @@ -232,9 +231,6 @@ pub fn visit_instruction( isi: &InstructionBox, ) { match isi { - InstructionBox::NewParameter(variant_value) => { - visitor.visit_new_parameter(authority, variant_value) - } InstructionBox::SetParameter(variant_value) => { visitor.visit_set_parameter(authority, variant_value) } @@ -426,7 +422,6 @@ leaf_visitors! { visit_mint_trigger_repetitions(&Mint), visit_burn_trigger_repetitions(&Burn), visit_upgrade(&Upgrade), - visit_new_parameter(&NewParameter), visit_set_parameter(&SetParameter), visit_execute_trigger(&ExecuteTrigger), visit_log(&Log), diff --git a/default_executor/src/lib.rs b/default_executor/src/lib.rs index a506d4df6dc..9e79fec18a9 100644 --- a/default_executor/src/lib.rs +++ b/default_executor/src/lib.rs @@ -49,7 +49,7 @@ impl Executor { /// If `migrate()` entrypoint fails then the whole `Upgrade` instruction /// will be denied and previous executor will stay unchanged. #[entrypoint] -pub fn migrate(block_height: u64) -> MigrationResult { +fn migrate(block_height: u64) -> MigrationResult { Executor::ensure_genesis(block_height)?; DataModelBuilder::with_default_permissions().build_and_set(); diff --git a/docs/source/references/schema.json b/docs/source/references/schema.json index ae48c96ff30..20920ee1ab4 100644 --- a/docs/source/references/schema.json +++ b/docs/source/references/schema.json @@ -17,59 +17,49 @@ }, "AccountEvent": { "Enum": [ - { - "tag": "Asset", - "discriminant": 0, - "type": "AssetEvent" - }, { "tag": "Created", - "discriminant": 1, + "discriminant": 0, "type": "Account" }, { "tag": "Deleted", - "discriminant": 2, - "type": "AccountId" - }, - { - "tag": "AuthenticationAdded", - "discriminant": 3, + "discriminant": 1, "type": "AccountId" }, { - "tag": "AuthenticationRemoved", - "discriminant": 4, - "type": "AccountId" + "tag": "Asset", + "discriminant": 2, + "type": "AssetEvent" }, { "tag": "PermissionAdded", - "discriminant": 5, + "discriminant": 3, "type": "AccountPermissionChanged" }, { "tag": "PermissionRemoved", - "discriminant": 6, + "discriminant": 4, "type": "AccountPermissionChanged" }, { - "tag": "RoleRevoked", - "discriminant": 7, + "tag": "RoleGranted", + "discriminant": 5, "type": "AccountRoleChanged" }, { - "tag": "RoleGranted", - "discriminant": 8, + "tag": "RoleRevoked", + "discriminant": 6, "type": "AccountRoleChanged" }, { "tag": "MetadataInserted", - "discriminant": 9, + "discriminant": 7, "type": "MetadataChanged" }, { "tag": "MetadataRemoved", - "discriminant": 10, + "discriminant": 8, "type": "MetadataChanged" } ] @@ -91,48 +81,40 @@ "repr": "u32", "masks": [ { - "name": "AnyAsset", + "name": "Created", "mask": 1 }, { - "name": "Created", + "name": "Deleted", "mask": 2 }, { - "name": "Deleted", + "name": "AnyAsset", "mask": 4 }, { - "name": "AuthenticationAdded", + "name": "PermissionAdded", "mask": 8 }, { - "name": "AuthenticationRemoved", + "name": "PermissionRemoved", "mask": 16 }, { - "name": "PermissionAdded", + "name": "RoleGranted", "mask": 32 }, - { - "name": "PermissionRemoved", - "mask": 64 - }, { "name": "RoleRevoked", - "mask": 128 - }, - { - "name": "RoleGranted", - "mask": 256 + "mask": 64 }, { "name": "MetadataInserted", - "mask": 512 + "mask": 128 }, { "name": "MetadataRemoved", - "mask": 1024 + "mask": 256 } ] } @@ -295,34 +277,34 @@ "type": "AssetDefinition" }, { - "tag": "MintabilityChanged", + "tag": "Deleted", "discriminant": 1, "type": "AssetDefinitionId" }, { - "tag": "OwnerChanged", + "tag": "MetadataInserted", "discriminant": 2, - "type": "AssetDefinitionOwnerChanged" + "type": "MetadataChanged" }, { - "tag": "Deleted", + "tag": "MetadataRemoved", "discriminant": 3, - "type": "AssetDefinitionId" + "type": "MetadataChanged" }, { - "tag": "MetadataInserted", + "tag": "MintabilityChanged", "discriminant": 4, - "type": "MetadataChanged" + "type": "AssetDefinitionId" }, { - "tag": "MetadataRemoved", + "tag": "TotalQuantityChanged", "discriminant": 5, - "type": "MetadataChanged" + "type": "AssetDefinitionTotalQuantityChanged" }, { - "tag": "TotalQuantityChanged", + "tag": "OwnerChanged", "discriminant": 6, - "type": "AssetDefinitionTotalQuantityChanged" + "type": "AssetDefinitionOwnerChanged" } ] }, @@ -347,27 +329,27 @@ "mask": 1 }, { - "name": "MintabilityChanged", + "name": "Deleted", "mask": 2 }, { - "name": "OwnerChanged", + "name": "MetadataInserted", "mask": 4 }, { - "name": "Deleted", + "name": "MetadataRemoved", "mask": 8 }, { - "name": "MetadataInserted", + "name": "MintabilityChanged", "mask": 16 }, { - "name": "MetadataRemoved", + "name": "TotalQuantityChanged", "mask": 32 }, { - "name": "TotalQuantityChanged", + "name": "OwnerChanged", "mask": 64 } ] @@ -592,7 +574,7 @@ "Struct": [ { "name": "height", - "type": "Option" + "type": "Option>" }, { "name": "status", @@ -604,7 +586,7 @@ "Struct": [ { "name": "height", - "type": "u64" + "type": "NonZero" }, { "name": "prev_block_hash", @@ -629,6 +611,23 @@ ] }, "BlockMessage": "SignedBlock", + "BlockParameter": { + "Enum": [ + { + "tag": "MaxTransactions", + "discriminant": 0, + "type": "NonZero" + } + ] + }, + "BlockParameters": { + "Struct": [ + { + "name": "max_transactions", + "type": "NonZero" + } + ] + }, "BlockPayload": { "Struct": [ { @@ -775,26 +774,12 @@ { "tag": "Changed", "discriminant": 0, - "type": "ParameterId" - }, - { - "tag": "Created", - "discriminant": 1, - "type": "ParameterId" - }, - { - "tag": "Deleted", - "discriminant": 2, - "type": "ParameterId" + "type": "ParameterChanged" } ] }, "ConfigurationEventFilter": { "Struct": [ - { - "name": "id_matcher", - "type": "Option" - }, { "name": "event_set", "type": "ConfigurationEventSet" @@ -808,14 +793,6 @@ { "name": "Changed", "mask": 1 - }, - { - "name": "Created", - "mask": 2 - }, - { - "name": "Deleted", - "mask": 4 } ] } @@ -847,6 +824,19 @@ } ] }, + "CustomParameter": { + "Struct": [ + { + "name": "id", + "type": "CustomParameterId" + }, + { + "name": "payload", + "type": "JsonString" + } + ] + }, + "CustomParameterId": "Name", "DataEvent": { "Enum": [ { @@ -965,24 +955,24 @@ "DomainEvent": { "Enum": [ { - "tag": "Account", + "tag": "Created", "discriminant": 0, - "type": "AccountEvent" + "type": "Domain" }, { - "tag": "AssetDefinition", + "tag": "Deleted", "discriminant": 1, - "type": "AssetDefinitionEvent" + "type": "DomainId" }, { - "tag": "Created", + "tag": "AssetDefinition", "discriminant": 2, - "type": "Domain" + "type": "AssetDefinitionEvent" }, { - "tag": "Deleted", + "tag": "Account", "discriminant": 3, - "type": "DomainId" + "type": "AccountEvent" }, { "tag": "MetadataInserted", @@ -1018,19 +1008,19 @@ "repr": "u32", "masks": [ { - "name": "AnyAccount", + "name": "Created", "mask": 1 }, { - "name": "AnyAssetDefinition", + "name": "Deleted", "mask": 2 }, { - "name": "Created", + "name": "AnyAssetDefinition", "mask": 4 }, { - "name": "Deleted", + "name": "AnyAccount", "mask": 8 }, { @@ -1204,12 +1194,16 @@ "ExecutorDataModel": { "Struct": [ { - "name": "permissions", + "name": "parameters", + "type": "SortedMap" + }, + { + "name": "instructions", "type": "SortedVec" }, { - "name": "custom_instruction", - "type": "Option" + "name": "permissions", + "type": "SortedVec" }, { "name": "schema", @@ -1486,14 +1480,9 @@ "discriminant": 10, "type": "Permission" }, - { - "tag": "Parameter", - "discriminant": 11, - "type": "ParameterId" - }, { "tag": "PublicKey", - "discriminant": 12, + "discriminant": 11, "type": "PublicKey" } ] @@ -1721,9 +1710,9 @@ "type": "Permission" }, { - "tag": "ParameterId", + "tag": "CustomParameterId", "discriminant": 8, - "type": "ParameterId" + "type": "CustomParameterId" } ] }, @@ -1785,9 +1774,9 @@ "type": "Role" }, { - "tag": "Parameter", + "tag": "CustomParameter", "discriminant": 11, - "type": "Parameter" + "type": "CustomParameter" } ] }, @@ -1848,24 +1837,19 @@ "discriminant": 10, "type": "SetParameter" }, - { - "tag": "NewParameter", - "discriminant": 11, - "type": "NewParameter" - }, { "tag": "Upgrade", - "discriminant": 12, + "discriminant": 11, "type": "Upgrade" }, { "tag": "Log", - "discriminant": 13, + "discriminant": 12, "type": "Log" }, { "tag": "Custom", - "discriminant": 14, + "discriminant": 13, "type": "CustomInstruction" } ] @@ -1926,19 +1910,14 @@ "discriminant": 6, "type": "MathError" }, - { - "tag": "Metadata", - "discriminant": 7, - "type": "MetadataError" - }, { "tag": "InvalidParameter", - "discriminant": 8, + "discriminant": 7, "type": "InvalidParameterError" }, { "tag": "InvariantViolation", - "discriminant": 9, + "discriminant": 8, "type": "String" } ] @@ -2001,21 +1980,17 @@ "tag": "SetParameter", "discriminant": 10 }, - { - "tag": "NewParameter", - "discriminant": 11 - }, { "tag": "Upgrade", - "discriminant": 12 + "discriminant": 11 }, { "tag": "Log", - "discriminant": 13 + "discriminant": 12 }, { "tag": "Custom", - "discriminant": 14 + "discriminant": 13 } ] }, @@ -2040,18 +2015,6 @@ "Ipv4Addr": "Array", "Ipv6Addr": "Array", "JsonString": "String", - "LengthLimits": { - "Struct": [ - { - "name": "min", - "type": "u32" - }, - { - "name": "max", - "type": "u32" - } - ] - }, "Level": { "Enum": [ { @@ -2076,18 +2039,6 @@ } ] }, - "Limits": { - "Struct": [ - { - "name": "capacity", - "type": "u32" - }, - { - "name": "max_entry_len", - "type": "u32" - } - ] - }, "Log": { "Struct": [ { @@ -2217,39 +2168,6 @@ } ] }, - "MetadataError": { - "Enum": [ - { - "tag": "EmptyPath", - "discriminant": 0 - }, - { - "tag": "EntryTooBig", - "discriminant": 1, - "type": "SizeError" - }, - { - "tag": "MaxCapacity", - "discriminant": 2, - "type": "SizeError" - }, - { - "tag": "MissingSegment", - "discriminant": 3, - "type": "Name" - }, - { - "tag": "InvalidSegment", - "discriminant": 4, - "type": "Name" - }, - { - "tag": "InvalidJson", - "discriminant": 5, - "type": "String" - } - ] - }, "Mint": { "Struct": [ { @@ -2381,14 +2299,6 @@ } ] }, - "NewParameter": { - "Struct": [ - { - "name": "parameter", - "type": "Parameter" - } - ] - }, "NewRole": { "Struct": [ { @@ -2456,11 +2366,8 @@ "Option>": { "Option": "NonZero" }, - "Option>": { - "Option": "Option" - }, - "Option": { - "Option": "ParameterId" + "Option>>": { + "Option": "Option>" }, "Option": { "Option": "PeerId" @@ -2489,9 +2396,6 @@ "Option": { "Option": "u32" }, - "Option": { - "Option": "u64" - }, "Pagination": { "Struct": [ { @@ -2505,46 +2409,76 @@ ] }, "Parameter": { - "Struct": [ + "Enum": [ { - "name": "id", - "type": "ParameterId" + "tag": "Sumeragi", + "discriminant": 0, + "type": "SumeragiParameter" }, { - "name": "val", - "type": "ParameterValueBox" + "tag": "Block", + "discriminant": 1, + "type": "BlockParameter" + }, + { + "tag": "Transaction", + "discriminant": 2, + "type": "TransactionParameter" + }, + { + "tag": "SmartContract", + "discriminant": 3, + "type": "SmartContractParameter" + }, + { + "tag": "Executor", + "discriminant": 4, + "type": "SmartContractParameter" + }, + { + "tag": "Custom", + "discriminant": 5, + "type": "CustomParameter" } ] }, - "ParameterId": { + "ParameterChanged": { "Struct": [ { - "name": "name", - "type": "Name" + "name": "old_value", + "type": "Parameter" + }, + { + "name": "new_value", + "type": "Parameter" } ] }, - "ParameterValueBox": { - "Enum": [ + "Parameters": { + "Struct": [ { - "tag": "TransactionLimits", - "discriminant": 0, - "type": "TransactionLimits" + "name": "sumeragi", + "type": "SumeragiParameters" }, { - "tag": "MetadataLimits", - "discriminant": 1, - "type": "Limits" + "name": "block", + "type": "BlockParameters" }, { - "tag": "LengthLimits", - "discriminant": 2, - "type": "LengthLimits" + "name": "transaction", + "type": "TransactionParameters" }, { - "tag": "Numeric", - "discriminant": 3, - "type": "Numeric" + "name": "executor", + "type": "SmartContractParameters" + }, + { + "name": "smart_contract", + "type": "SmartContractParameters" + }, + { + "name": "custom", + "type": "SortedMap" } ] }, @@ -2914,33 +2848,38 @@ "type": "Permission" }, { - "tag": "LimitedMetadata", + "tag": "Parameters", "discriminant": 4, + "type": "Parameters" + }, + { + "tag": "Metadata", + "discriminant": 5, "type": "JsonString" }, { "tag": "Numeric", - "discriminant": 5, + "discriminant": 6, "type": "Numeric" }, { "tag": "BlockHeader", - "discriminant": 6, + "discriminant": 7, "type": "BlockHeader" }, { "tag": "Block", - "discriminant": 7, + "discriminant": 8, "type": "SignedBlock" }, { "tag": "ExecutorDataModel", - "discriminant": 8, + "discriminant": 9, "type": "ExecutorDataModel" }, { "tag": "Vec", - "discriminant": 9, + "discriminant": 10, "type": "Vec" } ] @@ -3288,12 +3227,12 @@ "type": "RoleId" }, { - "tag": "PermissionRemoved", + "tag": "PermissionAdded", "discriminant": 2, "type": "RolePermissionChanged" }, { - "tag": "PermissionAdded", + "tag": "PermissionRemoved", "discriminant": 3, "type": "RolePermissionChanged" } @@ -3324,11 +3263,11 @@ "mask": 2 }, { - "name": "PermissionRemoved", + "name": "PermissionAdded", "mask": 4 }, { - "name": "PermissionAdded", + "name": "PermissionRemoved", "mask": 8 } ] @@ -3508,14 +3447,7 @@ } ] }, - "SetParameter": { - "Struct": [ - { - "name": "parameter", - "type": "Parameter" - } - ] - }, + "SetParameter": "Parameter", "Signature": { "Struct": [ { @@ -3590,15 +3522,29 @@ } ] }, - "SizeError": { + "SmartContractParameter": { + "Enum": [ + { + "tag": "Fuel", + "discriminant": 0, + "type": "NonZero" + }, + { + "tag": "Memory", + "discriminant": 1, + "type": "NonZero" + } + ] + }, + "SmartContractParameters": { "Struct": [ { - "name": "limits", - "type": "Limits" + "name": "fuel", + "type": "NonZero" }, { - "name": "actual", - "type": "u64" + "name": "memory", + "type": "NonZero" } ] }, @@ -3675,6 +3621,12 @@ "value": "Numeric" } }, + "SortedMap": { + "Map": { + "key": "CustomParameterId", + "value": "CustomParameter" + } + }, "SortedMap": { "Map": { "key": "Name", @@ -3720,6 +3672,32 @@ } ] }, + "SumeragiParameter": { + "Enum": [ + { + "tag": "BlockTimeMs", + "discriminant": 0, + "type": "u64" + }, + { + "tag": "CommitTimeMs", + "discriminant": 1, + "type": "u64" + } + ] + }, + "SumeragiParameters": { + "Struct": [ + { + "name": "block_time_ms", + "type": "u64" + }, + { + "name": "commit_time_ms", + "type": "u64" + } + ] + }, "TimeEvent": { "Struct": [ { @@ -3753,7 +3731,7 @@ }, { "name": "block_height", - "type": "Option" + "type": "Option>" }, { "name": "status", @@ -3769,7 +3747,7 @@ }, { "name": "block_height", - "type": "Option>" + "type": "Option>>" }, { "name": "status", @@ -3785,15 +3763,29 @@ } ] }, - "TransactionLimits": { + "TransactionParameter": { + "Enum": [ + { + "tag": "MaxInstructions", + "discriminant": 0, + "type": "NonZero" + }, + { + "tag": "SmartContractSize", + "discriminant": 1, + "type": "NonZero" + } + ] + }, + "TransactionParameters": { "Struct": [ { - "name": "max_instruction_number", - "type": "u64" + "name": "max_instructions", + "type": "NonZero" }, { - "name": "max_wasm_size_bytes", - "type": "u64" + "name": "smart_contract_size", + "type": "NonZero" } ] }, @@ -3825,7 +3817,7 @@ }, { "name": "metadata", - "type": "SortedMap" + "type": "Metadata" } ] }, diff --git a/ffi/src/std_impls.rs b/ffi/src/std_impls.rs index d45d2ece14e..ff65bbae07b 100644 --- a/ffi/src/std_impls.rs +++ b/ffi/src/std_impls.rs @@ -47,17 +47,25 @@ ffi_type! { niche_value=RefMutSlice::null_mut() } } +ffi_type! { + unsafe impl Transparent for core::ptr::NonNull { + type Target = *mut T; + + validation_fn=unsafe {|target: &*mut T| !target.is_null()}, + niche_value=core::ptr::null_mut() + } +} ffi_type! { unsafe impl Transparent for core::mem::ManuallyDrop { type Target = T; } } ffi_type! { - unsafe impl Transparent for core::ptr::NonNull { - type Target = *mut T; + unsafe impl Transparent for core::num::NonZeroU64 { + type Target = u64; - validation_fn=unsafe {|target: &*mut T| !target.is_null()}, - niche_value=core::ptr::null_mut() + validation_fn=unsafe {|target: &u64| *target != 0}, + niche_value=0 } } diff --git a/primitives/src/json.rs b/primitives/src/json.rs index 35968711752..f36be4d5f92 100644 --- a/primitives/src/json.rs +++ b/primitives/src/json.rs @@ -7,23 +7,22 @@ use alloc::{ string::{String, ToString}, vec::Vec, }; -use core::{ - fmt::{Display, Formatter}, - str::FromStr, -}; +use core::str::FromStr; #[cfg(feature = "std")] use std::{ string::{String, ToString}, vec::Vec, }; +use derive_more::Display; use iroha_schema::IntoSchema; use parity_scale_codec::{Decode, Encode}; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde_json::Value; /// A valid `JsonString` that consists of valid String of Json type -#[derive(Debug, Clone, PartialOrd, PartialEq, Ord, Eq, IntoSchema, Encode, Decode)] +#[derive(Debug, Display, Clone, PartialOrd, PartialEq, Ord, Eq, IntoSchema, Encode, Decode)] +#[display(fmt = "{_0}")] pub struct JsonString(String); impl JsonString { @@ -154,12 +153,6 @@ impl AsRef for JsonString { } } -impl Display for JsonString { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", &self.0) - } -} - mod candidate { use super::*; diff --git a/schema/gen/src/lib.rs b/schema/gen/src/lib.rs index 367459d8760..4e846d0aa72 100644 --- a/schema/gen/src/lib.rs +++ b/schema/gen/src/lib.rs @@ -209,7 +209,6 @@ types!( Ipv4Addr, Ipv6Addr, JsonString, - LengthLimits, Level, Log, MathError, @@ -220,8 +219,6 @@ types!( MetadataChanged, MetadataChanged, MetadataChanged, - MetadataError, - MetadataLimits, Mint, Mint, MintBox, @@ -232,7 +229,6 @@ types!( NewAccount, NewAssetDefinition, NewDomain, - NewParameter, NewRole, NonTrivial, NonZeroU32, @@ -254,7 +250,6 @@ types!( Option, Option, Option>, - Option, Option, Option, Option, @@ -265,8 +260,6 @@ types!( Option, Pagination, Parameter, - ParameterId, - ParameterValueBox, Peer, PeerEvent, PeerEventFilter, @@ -329,7 +322,6 @@ types!( SignedQueryV1, SignedTransaction, SignedTransactionV1, - SizeError, SocketAddr, SocketAddrHost, SocketAddrV4, @@ -345,7 +337,6 @@ types!( TransactionEvent, TransactionEventFilter, TransactionLimitError, - TransactionLimits, TransactionPayload, TransactionQueryOutput, TransactionRejectionReason, @@ -430,8 +421,6 @@ pub mod complete_data_model { }, InstructionType, }, - metadata::{MetadataError, SizeError}, - parameter::ParameterValueBox, prelude::*, query::{ error::{FindError, QueryExecutionFail}, @@ -444,8 +433,8 @@ pub mod complete_data_model { ForwardCursor, Pagination, QueryOutputBox, Sorting, }, transaction::{ - error::TransactionLimitError, SignedTransactionV1, TransactionLimits, - TransactionPayload, TransactionSignature, + error::TransactionLimitError, SignedTransactionV1, TransactionPayload, + TransactionSignature, }, BatchedResponse, BatchedResponseV1, Level, }; diff --git a/smart_contract/executor/derive/src/default.rs b/smart_contract/executor/derive/src/default.rs index 04dc91b5fdd..a307de0674e 100644 --- a/smart_contract/executor/derive/src/default.rs +++ b/smart_contract/executor/derive/src/default.rs @@ -156,7 +156,6 @@ pub fn impl_derive_visit(emitter: &mut Emitter, input: &syn::DeriveInput) -> Tok "fn visit_burn_trigger_repetitions(operation: &Burn)", "fn visit_execute_trigger(operation: &ExecuteTrigger)", "fn visit_set_parameter(operation: &SetParameter)", - "fn visit_new_parameter(operation: &NewParameter)", "fn visit_upgrade(operation: &Upgrade)", "fn visit_log(operation: &Log)", "fn visit_custom(operation: &CustomInstruction)", diff --git a/smart_contract/executor/derive/src/lib.rs b/smart_contract/executor/derive/src/lib.rs index 4b11347a112..6c3360d5417 100644 --- a/smart_contract/executor/derive/src/lib.rs +++ b/smart_contract/executor/derive/src/lib.rs @@ -7,6 +7,7 @@ use proc_macro2::TokenStream; mod conversion; mod default; mod entrypoint; +mod parameter; mod permission; mod validate; @@ -100,6 +101,16 @@ pub fn derive_permission(input: TokenStream) -> Result { Ok(permission::impl_derive_permission(&input)) } +/// Derive macro for `Parameter` trait. +/// ``` +#[manyhow] +#[proc_macro_derive(Parameter)] +pub fn derive_parameter(input: TokenStream) -> Result { + let input = syn::parse2(input)?; + + Ok(parameter::impl_derive_parameter(&input)) +} + /// Derive macro for `ValidateGrantRevoke` trait. /// /// # Attributes diff --git a/smart_contract/executor/derive/src/parameter.rs b/smart_contract/executor/derive/src/parameter.rs new file mode 100644 index 00000000000..14ee485351d --- /dev/null +++ b/smart_contract/executor/derive/src/parameter.rs @@ -0,0 +1,43 @@ +//! Module with [`derive_parameter`](crate::derive_parameter) macro implementation + +use proc_macro2::TokenStream; +use quote::quote; + +/// [`derive_parameter`](crate::derive_parameter()) macro implementation +pub fn impl_derive_parameter(input: &syn::DeriveInput) -> TokenStream { + let generics = &input.generics; + let ident = &input.ident; + let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + + quote! { + impl #impl_generics ::iroha_executor::parameter::Parameter for #ident #ty_generics #where_clause {} + + impl #impl_generics TryFrom<&::iroha_executor::data_model::parameter::CustomParameter> for #ident #ty_generics #where_clause { + type Error = ::iroha_executor::TryFromDataModelObjectError; + + fn try_from(value: &::iroha_executor::data_model::parameter::CustomParameter) -> core::result::Result { + if *value.id() != ::id() { + return Err(Self::Error::UnknownIdent(alloc::string::ToString::to_string(value.id().name().as_ref()))); + } + + serde_json::from_str::(value.payload().as_ref()).map_err(Self::Error::Deserialize) + } + } + + impl #impl_generics From<#ident #ty_generics> for ::iroha_executor::data_model::parameter::CustomParameter #where_clause { + fn from(value: #ident #ty_generics) -> Self { + ::iroha_executor::data_model::parameter::CustomParameter::new( + <#ident as ::iroha_executor::parameter::Parameter>::id(), + ::serde_json::to_value::<#ident #ty_generics>(value) + .expect("INTERNAL BUG: Failed to serialize Executor data model entity"), + ) + } + } + + impl #impl_generics From<#ident #ty_generics> for ::iroha_executor::data_model::parameter::Parameter #where_clause { + fn from(value: #ident #ty_generics) -> Self { + Self::Custom(value.into()) + } + } + } +} diff --git a/smart_contract/executor/src/default.rs b/smart_contract/executor/src/default.rs index 9146b2fab66..e7102fba840 100644 --- a/smart_contract/executor/src/default.rs +++ b/smart_contract/executor/src/default.rs @@ -27,7 +27,7 @@ pub use domain::{ pub use executor::visit_upgrade; use iroha_smart_contract::data_model::isi::InstructionBox; pub use log::visit_log; -pub use parameter::{visit_new_parameter, visit_set_parameter}; +pub use parameter::visit_set_parameter; pub use peer::{visit_register_peer, visit_unregister_peer}; pub use permission::{visit_grant_account_permission, visit_revoke_account_permission}; use permissions::AnyPermission; @@ -85,9 +85,6 @@ pub fn visit_instruction( isi: &InstructionBox, ) { match isi { - InstructionBox::NewParameter(isi) => { - executor.visit_new_parameter(authority, isi); - } InstructionBox::SetParameter(isi) => { executor.visit_set_parameter(authority, isi); } @@ -1117,25 +1114,6 @@ pub mod asset { pub mod parameter { use super::*; - #[allow(clippy::needless_pass_by_value)] - pub fn visit_new_parameter( - executor: &mut V, - authority: &AccountId, - isi: &NewParameter, - ) { - if is_genesis(executor) { - execute!(executor, isi); - } - if permissions::parameter::CanCreateParameters.is_owned_by(authority) { - execute!(executor, isi); - } - - deny!( - executor, - "Can't create new configuration parameters outside genesis without permission" - ); - } - #[allow(clippy::needless_pass_by_value)] pub fn visit_set_parameter( executor: &mut V, diff --git a/smart_contract/executor/src/lib.rs b/smart_contract/executor/src/lib.rs index bd01de26da0..0208209c2dc 100644 --- a/smart_contract/executor/src/lib.rs +++ b/smart_contract/executor/src/lib.rs @@ -19,6 +19,7 @@ use iroha_smart_contract_utils::{decode_with_length_prefix_from_raw, encode_and_ pub use smart_contract::{data_model, parse, stub_getrandom}; pub mod default; +pub mod parameter; pub mod permission; pub mod utils { @@ -188,8 +189,9 @@ pub enum TryFromDataModelObjectError { /// A convenience to build [`ExecutorDataModel`] from within the executor #[derive(Debug, Clone)] pub struct DataModelBuilder { + parameters: BTreeSet, + instructions: BTreeSet, permissions: BTreeSet, - custom_instruction: Option, schema: MetaMap, } @@ -199,8 +201,9 @@ impl DataModelBuilder { #[allow(clippy::new_without_default)] pub fn new() -> Self { Self { + parameters: <_>::default(), + instructions: <_>::default(), permissions: <_>::default(), - custom_instruction: None, schema: <_>::default(), } } @@ -223,26 +226,37 @@ impl DataModelBuilder { /// Define a permission in the data model #[must_use] - pub fn add_permission(mut self) -> Self { - ::update_schema_map(&mut self.schema); - self.permissions - .insert(::name()); + pub fn add_parameter>( + mut self, + param: T, + ) -> Self { + T::update_schema_map(&mut self.schema); + self.parameters.insert(param.into()); self } /// Define a type of custom instruction in the data model. /// Corresponds to payload of `InstructionBox::Custom`. #[must_use] - pub fn with_custom_instruction(mut self) -> Self { + pub fn add_instruction(mut self) -> Self { T::update_schema_map(&mut self.schema); - self.custom_instruction = Some(T::type_name()); + self.instructions.insert(T::type_name()); + self + } + + /// Define a permission in the data model + #[must_use] + pub fn add_permission(mut self) -> Self { + T::update_schema_map(&mut self.schema); + self.permissions + .insert(::name()); self } /// Remove a permission from the data model #[must_use] pub fn remove_permission(mut self) -> Self { - ::remove_from_schema(&mut self.schema); + T::remove_from_schema(&mut self.schema); self.permissions .remove(&::name()); self @@ -282,8 +296,12 @@ impl DataModelBuilder { } set_data_model(&ExecutorDataModel::new( + self.parameters + .into_iter() + .map(|param| (param.id().clone(), param)) + .collect(), + self.instructions, self.permissions, - self.custom_instruction, serde_json::to_value(&self.schema) .expect("INTERNAL BUG: Failed to serialize Executor data model entity") .into(), @@ -309,8 +327,8 @@ pub mod prelude { pub use alloc::vec::Vec; pub use iroha_executor_derive::{ - entrypoint, Constructor, Permission, Validate, ValidateEntrypoints, ValidateGrantRevoke, - Visit, + entrypoint, Constructor, Parameter, Permission, Validate, ValidateEntrypoints, + ValidateGrantRevoke, Visit, }; pub use iroha_smart_contract::prelude::*; @@ -321,6 +339,7 @@ pub mod prelude { ValidationFail, }, deny, execute, + parameter::Parameter as ParameterTrait, permission::Permission as PermissionTrait, DataModelBuilder, Validate, }; diff --git a/smart_contract/executor/src/parameter.rs b/smart_contract/executor/src/parameter.rs new file mode 100644 index 00000000000..22a61c74a3d --- /dev/null +++ b/smart_contract/executor/src/parameter.rs @@ -0,0 +1,17 @@ +//! Module with parameter related functionality. + +use iroha_schema::IntoSchema; +use iroha_smart_contract::{data_model::parameter::CustomParameterId, debug::DebugExpectExt}; +use serde::{de::DeserializeOwned, Serialize}; + +/// Blockchain specific parameter +pub trait Parameter: Default + Serialize + DeserializeOwned + IntoSchema { + /// Parameter id, according to [`IntoSchema`]. + fn id() -> CustomParameterId { + CustomParameterId::new( + ::type_name() + .parse() + .dbg_expect("Failed to parse parameter id as `Name`"), + ) + } +} diff --git a/smart_contract/executor/src/permission.rs b/smart_contract/executor/src/permission.rs index dd6f729e900..a402e861c32 100644 --- a/smart_contract/executor/src/permission.rs +++ b/smart_contract/executor/src/permission.rs @@ -1,4 +1,4 @@ -//! Module with permission tokens and permission related functionality. +//! Module with permission related functionality. use alloc::borrow::ToOwned as _; @@ -9,7 +9,7 @@ use serde::{de::DeserializeOwned, Serialize}; use crate::prelude::{Permission as PermissionObject, *}; -/// Is used to check if the permission token is owned by the account. +/// Used to check if the permission token is owned by the account. pub trait Permission: Serialize + DeserializeOwned + IntoSchema + PartialEq + ValidateGrantRevoke { @@ -113,7 +113,7 @@ pub mod asset_definition { /// Check if `authority` is the owner of asset definition - /// `authority` is owner of asset_definition if: + /// `authority` is owner of asset definition if: /// - `asset_definition.owned_by` is `authority` /// - `asset_definition.domain_id` domain is owned by `authority` /// diff --git a/tools/kagami/src/genesis/generate.rs b/tools/kagami/src/genesis/generate.rs index a5fb95f0c94..1004a9f57d1 100644 --- a/tools/kagami/src/genesis/generate.rs +++ b/tools/kagami/src/genesis/generate.rs @@ -5,12 +5,7 @@ use std::{ use clap::{Parser, Subcommand}; use color_eyre::eyre::WrapErr as _; -use iroha_config::parameters::defaults::chain_wide as chain_wide_defaults; -use iroha_data_model::{ - metadata::Limits, - parameter::{default::*, ParametersBuilder}, - prelude::*, -}; +use iroha_data_model::prelude::*; use iroha_genesis::{GenesisBuilder, RawGenesisTransaction, GENESIS_DOMAIN_ID}; use serde_json::json; use test_samples::{gen_account_in, ALICE_ID, BOB_ID, CARPENTER_ID}; @@ -92,12 +87,8 @@ pub fn generate_default( genesis_public_key: PublicKey, ) -> color_eyre::Result { let genesis_account_id = AccountId::new(GENESIS_DOMAIN_ID.clone(), genesis_public_key); - let mut meta = Metadata::new(); - meta.insert_with_limits( - "key".parse()?, - JsonString::new("value"), - Limits::new(1024, 1024), - )?; + let mut meta = Metadata::default(); + meta.insert("key".parse()?, JsonString::new("value")); let mut builder = builder .domain_with_metadata("wonderland".parse()?, meta.clone()) @@ -135,7 +126,7 @@ pub fn generate_default( "wonderland".parse()?, ALICE_ID.clone(), ); - let register_user_metadata_access = Register::role( + let register_user_metadata_access: InstructionBox = Register::role( Role::new("ALICE_METADATA_ACCESS".parse()?) .add_permission(Permission::new( "CanSetKeyValueInAccount".parse()?, @@ -148,62 +139,6 @@ pub fn generate_default( ) .into(); - let parameter_defaults = ParametersBuilder::new() - .add_parameter( - MAX_TRANSACTIONS_IN_BLOCK, - Numeric::new(chain_wide_defaults::MAX_TXS.get().into(), 0), - )? - .add_parameter( - BLOCK_TIME, - Numeric::new(chain_wide_defaults::BLOCK_TIME.as_millis(), 0), - )? - .add_parameter( - COMMIT_TIME_LIMIT, - Numeric::new(chain_wide_defaults::COMMIT_TIME.as_millis(), 0), - )? - .add_parameter(TRANSACTION_LIMITS, chain_wide_defaults::TRANSACTION_LIMITS)? - .add_parameter( - WSV_DOMAIN_METADATA_LIMITS, - chain_wide_defaults::METADATA_LIMITS, - )? - .add_parameter( - WSV_ASSET_DEFINITION_METADATA_LIMITS, - chain_wide_defaults::METADATA_LIMITS, - )? - .add_parameter( - WSV_ACCOUNT_METADATA_LIMITS, - chain_wide_defaults::METADATA_LIMITS, - )? - .add_parameter( - WSV_ASSET_METADATA_LIMITS, - chain_wide_defaults::METADATA_LIMITS, - )? - .add_parameter( - WSV_TRIGGER_METADATA_LIMITS, - chain_wide_defaults::METADATA_LIMITS, - )? - .add_parameter( - WSV_IDENT_LENGTH_LIMITS, - chain_wide_defaults::IDENT_LENGTH_LIMITS, - )? - .add_parameter( - EXECUTOR_FUEL_LIMIT, - Numeric::new(chain_wide_defaults::WASM_FUEL_LIMIT.into(), 0), - )? - .add_parameter( - EXECUTOR_MAX_MEMORY, - Numeric::new(chain_wide_defaults::WASM_MAX_MEMORY.get().into(), 0), - )? - .add_parameter( - WASM_FUEL_LIMIT, - Numeric::new(chain_wide_defaults::WASM_FUEL_LIMIT.into(), 0), - )? - .add_parameter( - WASM_MAX_MEMORY, - Numeric::new(chain_wide_defaults::WASM_MAX_MEMORY.get().into(), 0), - )? - .into_create_parameters(); - for isi in [ mint.into(), mint_cabbage.into(), @@ -212,7 +147,6 @@ pub fn generate_default( grant_permission_to_set_parameters.into(), ] .into_iter() - .chain(parameter_defaults.into_iter()) .chain(std::iter::once(register_user_metadata_access)) { builder = builder.append_instruction(isi); diff --git a/tools/parity_scale_cli/samples/trigger.bin b/tools/parity_scale_cli/samples/trigger.bin index c6493efbb4f..d46095a94fd 100644 Binary files a/tools/parity_scale_cli/samples/trigger.bin and b/tools/parity_scale_cli/samples/trigger.bin differ diff --git a/tools/parity_scale_cli/src/main.rs b/tools/parity_scale_cli/src/main.rs index 6657ff2f59d..bd162df8b47 100644 --- a/tools/parity_scale_cli/src/main.rs +++ b/tools/parity_scale_cli/src/main.rs @@ -313,13 +313,11 @@ mod tests { #[test] fn decode_account_sample() { - let limits = MetadataLimits::new(256, 256); - let mut metadata = Metadata::new(); + let mut metadata = Metadata::default(); metadata - .insert_with_limits( + .insert( "hat".parse().expect("Valid"), "white".parse::().expect("Valid"), - limits, ) .expect("Valid"); let account = Account::new(ALICE_ID.clone()).with_metadata(metadata); @@ -329,10 +327,9 @@ mod tests { #[test] fn decode_domain_sample() { - let limits = MetadataLimits::new(256, 256); - let mut metadata = Metadata::new(); + let mut metadata = Metadata::default(); metadata - .insert_with_limits("Is_Jabberwocky_alive".parse().expect("Valid"), true, limits) + .insert("Is_Jabberwocky_alive".parse().expect("Valid"), true) .expect("Valid"); let domain = Domain::new("wonderland".parse().expect("Valid")) .with_logo( diff --git a/torii/src/routing.rs b/torii/src/routing.rs index 87c2381673e..3f8dbe997ef 100644 --- a/torii/src/routing.rs +++ b/torii/src/routing.rs @@ -52,7 +52,7 @@ pub async fn handle_transaction( transaction: SignedTransaction, ) -> Result { let state_view = state.view(); - let transaction_limits = state_view.config.transaction_limits; + let transaction_limits = state_view.world().parameters().transaction; let transaction = AcceptedTransaction::accept(transaction, &chain_id, transaction_limits) .map_err(Error::AcceptTransaction)?; queue