diff --git a/client/tests/integration/events/data.rs b/client/tests/integration/events/data.rs index d3aa2a96834..9f914bb5bb3 100644 --- a/client/tests/integration/events/data.rs +++ b/client/tests/integration/events/data.rs @@ -181,46 +181,46 @@ fn produce_multiple_events() -> Result<()> { } let expected_domain_events: Vec = [ - WorldEvent::Domain(DomainEvent::Account(AccountEvent::PermissionAdded( + DataEvent::Domain(DomainEvent::Account(AccountEvent::PermissionAdded( AccountPermissionChanged { account_id: bob_id.clone(), permission_id: token_1.definition_id.clone(), }, ))), - WorldEvent::Domain(DomainEvent::Account(AccountEvent::PermissionAdded( + DataEvent::Domain(DomainEvent::Account(AccountEvent::PermissionAdded( AccountPermissionChanged { account_id: bob_id.clone(), permission_id: token_2.definition_id.clone(), }, ))), - WorldEvent::Domain(DomainEvent::Account(AccountEvent::RoleGranted( + DataEvent::Domain(DomainEvent::Account(AccountEvent::RoleGranted( AccountRoleChanged { account_id: bob_id.clone(), role_id: role_id.clone(), }, ))), - WorldEvent::Domain(DomainEvent::Account(AccountEvent::PermissionRemoved( + DataEvent::Domain(DomainEvent::Account(AccountEvent::PermissionRemoved( AccountPermissionChanged { account_id: bob_id.clone(), permission_id: token_1.definition_id, }, ))), - WorldEvent::Domain(DomainEvent::Account(AccountEvent::PermissionRemoved( + DataEvent::Domain(DomainEvent::Account(AccountEvent::PermissionRemoved( AccountPermissionChanged { account_id: bob_id.clone(), permission_id: token_2.definition_id, }, ))), - WorldEvent::Domain(DomainEvent::Account(AccountEvent::RoleRevoked( + DataEvent::Domain(DomainEvent::Account(AccountEvent::RoleRevoked( AccountRoleChanged { account_id: bob_id, role_id: role_id.clone(), }, ))), - WorldEvent::Role(RoleEvent::Deleted(role_id)), + DataEvent::Role(RoleEvent::Deleted(role_id)), ] .into_iter() - .flat_map(WorldEvent::flatten) + .map(Into::into) .collect(); for expected_event in expected_domain_events { diff --git a/client/tests/integration/restart_peer.rs b/client/tests/integration/restart_peer.rs index 8249edee078..6c62a7bb393 100644 --- a/client/tests/integration/restart_peer.rs +++ b/client/tests/integration/restart_peer.rs @@ -19,7 +19,7 @@ fn restarted_peer_should_have_the_same_asset_amount() -> Result<()> { let mut removed_peer = { let n_peers = 4; - let (_rt, network, _) = Network::start_test_with_runtime(n_peers, Some(11_200)); + let (_rt, network, _) = Network::start_test_with_runtime(n_peers, Some(11_205)); wait_for_genesis_committed(&network.clients(), 0); let pipeline_time = Configuration::pipeline_time(); let peer_clients = Network::clients(&network); diff --git a/client/tests/integration/triggers/by_call_trigger.rs b/client/tests/integration/triggers/by_call_trigger.rs index 7c2cf8aff41..f40963bc2ce 100644 --- a/client/tests/integration/triggers/by_call_trigger.rs +++ b/client/tests/integration/triggers/by_call_trigger.rs @@ -9,7 +9,7 @@ use iroha_client::{ transaction::Executable, }, }; -use iroha_data_model::events::TriggeringFilterBox; +use iroha_data_model::{events::TriggeringFilterBox, transaction::WasmSmartContract}; use iroha_genesis::GenesisNetwork; use iroha_logger::info; use test_network::*; @@ -480,6 +480,59 @@ fn trigger_burn_repetitions() -> Result<()> { Ok(()) } +#[test] +fn unregistering_one_of_two_triggers_with_identical_wasm_should_not_cause_original_wasm_loss( +) -> Result<()> { + let (_rt, _peer, test_client) = ::new().with_port(11_105).start_with_runtime(); + wait_for_genesis_committed(&vec![test_client.clone()], 0); + + let account_id = AccountId::from_str("alice@wonderland")?; + let first_trigger_id = TriggerId::from_str("mint_rose_1")?; + let second_trigger_id = TriggerId::from_str("mint_rose_2")?; + + let wasm = + iroha_wasm_builder::Builder::new("tests/integration/smartcontracts/mint_rose_trigger") + .show_output() + .build()? + .optimize()? + .into_bytes()?; + let wasm = WasmSmartContract::from_compiled(wasm); + + let build_trigger = |trigger_id: TriggerId| { + Trigger::new( + trigger_id.clone(), + Action::new( + wasm.clone(), + Repeats::Indefinitely, + account_id.clone(), + TriggeringFilterBox::ExecuteTrigger(ExecuteTriggerEventFilter::new( + trigger_id, + account_id.clone(), + )), + ), + ) + }; + + let first_trigger = build_trigger(first_trigger_id.clone()); + let second_trigger = build_trigger(second_trigger_id.clone()); + + test_client.submit_all_blocking([ + Register::trigger(first_trigger), + Register::trigger(second_trigger.clone()), + ])?; + + test_client.submit_blocking(Unregister::trigger(first_trigger_id))?; + let got_second_trigger = test_client + .request(FindTriggerById { + id: second_trigger_id, + }) + .expect("Failed to request second trigger"); + + assert_eq!(got_second_trigger, second_trigger); + + Ok(()) +} + fn get_asset_value(client: &mut Client, asset_id: AssetId) -> Result { let asset = client.request(client::asset::by_id(asset_id))?; Ok(*TryAsRef::::try_as_ref(asset.value())?) diff --git a/client/tests/integration/triggers/data_trigger.rs b/client/tests/integration/triggers/data_trigger.rs index e7096f7b024..2c197c9da18 100644 --- a/client/tests/integration/triggers/data_trigger.rs +++ b/client/tests/integration/triggers/data_trigger.rs @@ -20,8 +20,15 @@ fn must_execute_both_triggers() -> Result<()> { [instruction.clone()], Repeats::Indefinitely, account_id.clone(), - TriggeringFilterBox::Data(BySome(DataEntityFilter::ByAccount(BySome( - AccountFilter::new(AcceptAll, BySome(AccountEventFilter::ByCreated)), + // FIXME: rewrite the filters using the builder DSL https://github.com/hyperledger/iroha/issues/3068 + TriggeringFilterBox::Data(BySome(DataEntityFilter::ByDomain(BySome( + DomainFilter::new( + AcceptAll, + BySome(DomainEventFilter::ByAccount(BySome(AccountFilter::new( + AcceptAll, + BySome(AccountEventFilter::ByCreated), + )))), + ), )))), ), )); @@ -86,8 +93,15 @@ fn domain_scoped_trigger_must_be_executed_only_on_events_in_its_domain() -> Resu [Mint::asset_quantity(1_u32, asset_id.clone())], Repeats::Indefinitely, account_id, - TriggeringFilterBox::Data(BySome(DataEntityFilter::ByAccount(BySome( - AccountFilter::new(AcceptAll, BySome(AccountEventFilter::ByCreated)), + // FIXME: rewrite the filters using the builder DSL https://github.com/hyperledger/iroha/issues/3068 + TriggeringFilterBox::Data(BySome(DataEntityFilter::ByDomain(BySome( + DomainFilter::new( + AcceptAll, + BySome(DomainEventFilter::ByAccount(BySome(AccountFilter::new( + AcceptAll, + BySome(AccountEventFilter::ByCreated), + )))), + ), )))), ), )); diff --git a/client/tests/integration/triggers/event_trigger.rs b/client/tests/integration/triggers/event_trigger.rs index 8269a244ad4..bc340775aff 100644 --- a/client/tests/integration/triggers/event_trigger.rs +++ b/client/tests/integration/triggers/event_trigger.rs @@ -24,10 +24,16 @@ fn test_mint_asset_when_new_asset_definition_created() -> Result<()> { vec![instruction], Repeats::Indefinitely, account_id, - TriggeringFilterBox::Data(BySome(DataEntityFilter::ByAssetDefinition(BySome( - AssetDefinitionFilter::new( + // FIXME: rewrite the filters using the builder DSL https://github.com/hyperledger/iroha/issues/3068 + TriggeringFilterBox::Data(BySome(DataEntityFilter::ByDomain(BySome( + DomainFilter::new( AcceptAll, - BySome(AssetDefinitionEventFilter::ByCreated), + BySome(DomainEventFilter::ByAssetDefinition(BySome( + AssetDefinitionFilter::new( + AcceptAll, + BySome(AssetDefinitionEventFilter::ByCreated), + ), + ))), ), )))), ), diff --git a/configs/peer/executor.wasm b/configs/peer/executor.wasm index 48c0f25b41c..7af36698d6c 100644 Binary files a/configs/peer/executor.wasm and b/configs/peer/executor.wasm differ diff --git a/core/src/smartcontracts/isi/domain.rs b/core/src/smartcontracts/isi/domain.rs index 81855321e7c..2912b511b09 100644 --- a/core/src/smartcontracts/isi/domain.rs +++ b/core/src/smartcontracts/isi/domain.rs @@ -179,7 +179,7 @@ pub mod isi { domain.remove_asset_total_quantity(&asset_definition_id); - events.push(WorldEvent::from(DomainEvent::AssetDefinition( + events.push(DataEvent::from(DomainEvent::AssetDefinition( AssetDefinitionEvent::Deleted(asset_definition_id), ))); diff --git a/core/src/smartcontracts/isi/triggers/set.rs b/core/src/smartcontracts/isi/triggers/set.rs index 57e32ae1406..b8a41ea0b3e 100644 --- a/core/src/smartcontracts/isi/triggers/set.rs +++ b/core/src/smartcontracts/isi/triggers/set.rs @@ -9,9 +9,9 @@ //! trigger hooks. use core::cmp::min; -use std::fmt; +use std::{fmt, num::NonZeroU64}; -use indexmap::IndexMap; +use indexmap::{map::Entry, IndexMap}; use iroha_crypto::HashOf; use iroha_data_model::{ events::Filter as EventFilter, @@ -58,6 +58,15 @@ pub struct LoadedAction { pub metadata: Metadata, } +impl LoadedAction { + fn extract_blob_hash(&self) -> Option> { + match self.executable { + LoadedExecutable::Wasm(LoadedWasm { blob_hash, .. }) => Some(blob_hash), + LoadedExecutable::Instructions(_) => None, + } + } +} + /// Trait common for all `LoadedAction`s pub trait LoadedActionTrait { /// Get action executable @@ -133,6 +142,10 @@ impl + Clone> LoadedActionTrait for Loaded } } +/// [`WasmSmartContract`]s by [`TriggerId`]. +/// Stored together with number to count triggers with identical [`WasmSmartContract`]. +type WasmSmartContractMap = IndexMap, (WasmSmartContract, NonZeroU64)>; + /// Specialized structure that maps event filters to Triggers. // NB: `Set` has custom `Serialize` and `DeserializeSeed` implementations // which need to be manually updated when changing the struct @@ -149,7 +162,7 @@ pub struct Set { /// Trigger ids with type of events they process ids: IndexMap, /// Original [`WasmSmartContract`]s by [`TriggerId`] for querying purposes. - original_contracts: IndexMap, WasmSmartContract>, + original_contracts: WasmSmartContractMap, /// List of actions that should be triggered by events provided by `handle_*` methods. /// Vector is used to save the exact triggers order. matched_ids: Vec<(Event, TriggerId)>, @@ -159,7 +172,7 @@ pub struct Set { struct TriggersWithContext<'s, F> { /// Triggers being serialized triggers: &'s IndexMap>, - /// Containing Set, used for looking up origignal [`WasmSmartContract`]s + /// Containing Set, used for looking up original [`WasmSmartContract`]s /// during serialization. set: &'s Set, } @@ -189,24 +202,33 @@ impl Serialize for Set { where S: serde::Serializer, { + let &Self { + data_triggers, + pipeline_triggers, + time_triggers, + by_call_triggers, + ids, + original_contracts: _original_contracts, + matched_ids: _matched_ids, + } = &self; let mut set = serializer.serialize_struct("Set", 6)?; set.serialize_field( "data_triggers", - &TriggersWithContext::new(&self.data_triggers, self), + &TriggersWithContext::new(data_triggers, self), )?; set.serialize_field( "pipeline_triggers", - &TriggersWithContext::new(&self.pipeline_triggers, self), + &TriggersWithContext::new(pipeline_triggers, self), )?; set.serialize_field( "time_triggers", - &TriggersWithContext::new(&self.time_triggers, self), + &TriggersWithContext::new(time_triggers, self), )?; set.serialize_field( "by_call_triggers", - &TriggersWithContext::new(&self.by_call_triggers, self), + &TriggersWithContext::new(by_call_triggers, self), )?; - set.serialize_field("ids", &self.ids)?; + set.serialize_field("ids", ids)?; set.end() } } @@ -411,7 +433,16 @@ impl Set { blob_hash: hash, }); // Store original executable representation to respond to queries with. - self.original_contracts.insert(hash, bytes); + self.original_contracts + .entry(hash) + .and_modify(|(_, count)| { + // Considering 1 trigger registration takes 1 second, + // it would take 584 942 417 355 years to overflow. + *count = count.checked_add(1).expect( + "There is no way someone could register 2^64 amount of same triggers", + ) + }) + .or_insert((bytes, NonZeroU64::MIN)); loaded } Executable::Instructions(instructions) => LoadedExecutable::Instructions(instructions), @@ -438,7 +469,9 @@ impl Set { &self, hash: &HashOf, ) -> Option<&WasmSmartContract> { - self.original_contracts.get(hash) + self.original_contracts + .get(hash) + .map(|(contract, _)| contract) } /// Convert [`LoadedAction`] to original [`Action`] by retrieving original @@ -633,52 +666,86 @@ impl Set { /// Remove a trigger from the [`Set`]. /// /// Return `false` if [`Set`] doesn't contain the trigger with the given `id`. + /// + /// # Panics + /// + /// Panics on inconsistent state of [`Set`]. This is a bug. pub fn remove(&mut self, id: &TriggerId) -> bool { - // Used in a map that requires this signature - #[allow(clippy::needless_pass_by_value)] - fn extract_blob_hash(action: LoadedAction) -> Option> { - match action.executable { - LoadedExecutable::Wasm(LoadedWasm { blob_hash, .. }) => Some(blob_hash), - LoadedExecutable::Instructions(_) => None, - } - } - let Some(event_type) = self.ids.remove(id) else { return false; }; - let blob_hash = match event_type { - TriggeringEventType::Data => self - .data_triggers - .remove(id) - .map(extract_blob_hash) - .expect("`Set::data_triggers` doesn't contain required id. This is a bug"), - TriggeringEventType::Pipeline => self - .pipeline_triggers - .remove(id) - .map(extract_blob_hash) - .expect("`Set::pipeline_triggers` doesn't contain required id. This is a bug"), - TriggeringEventType::Time => self - .time_triggers - .remove(id) - .map(extract_blob_hash) - .expect("`Set::time_triggers` doesn't contain required id. This is a bug"), - TriggeringEventType::ExecuteTrigger => self - .by_call_triggers - .remove(id) - .map(extract_blob_hash) - .expect("`Set::by_call_triggers` doesn't contain required id. This is a bug"), + let removed = match event_type { + TriggeringEventType::Data => { + Self::remove_from(&mut self.original_contracts, &mut self.data_triggers, id) + } + TriggeringEventType::Pipeline => Self::remove_from( + &mut self.original_contracts, + &mut self.pipeline_triggers, + id, + ), + TriggeringEventType::Time => { + Self::remove_from(&mut self.original_contracts, &mut self.time_triggers, id) + } + TriggeringEventType::ExecuteTrigger => { + Self::remove_from(&mut self.original_contracts, &mut self.by_call_triggers, id) + } }; - if let Some(blob_hash) = blob_hash { - self.original_contracts - .remove(&blob_hash) - .expect("`Set::original_contracts` doesn't contain required hash. This is a bug"); - } + assert!( + removed, + "`Set`'s `ids` and typed trigger collections are inconsistent. This is a bug" + ); true } + /// Remove trigger from `triggers` and decrease the counter of the original [`WasmSmartContract`]. + /// + /// Note that this function doesn't remove the trigger from [`Set::ids`]. + /// + /// Returns `true` if trigger was removed and `false` otherwise. + fn remove_from( + original_contracts: &mut WasmSmartContractMap, + triggers: &mut IndexMap>, + trigger_id: &TriggerId, + ) -> bool { + triggers + .remove(trigger_id) + .map(|loaded_action| { + if let Some(blob_hash) = loaded_action.extract_blob_hash() { + Self::remove_original_trigger(original_contracts, blob_hash); + } + }) + .is_some() + } + + /// Decrease the counter of the original [`WasmSmartContract`] by `blob_hash` + /// or remove it if the counter reaches zero. + /// + /// # Panics + /// + /// Panics if `blob_hash` is not in the [`Set::original_contracts`]. + fn remove_original_trigger( + original_contracts: &mut WasmSmartContractMap, + blob_hash: HashOf, + ) { + #[allow(clippy::option_if_let_else)] // More readable this way + match original_contracts.entry(blob_hash) { + Entry::Occupied(mut entry) => { + let count = &mut entry.get_mut().1; + if let Some(new_count) = NonZeroU64::new(count.get() - 1) { + *count = new_count; + } else { + entry.remove(); + } + } + Entry::Vacant(_) => { + panic!("`Set::original_contracts` doesn't contain required hash. This is a bug") + } + } + } + /// Check if [`Set`] contains `id`. #[inline] pub fn contains(&self, id: &TriggerId) -> bool { @@ -807,35 +874,35 @@ impl Set { time_triggers, by_call_triggers, ids, + original_contracts, .. } = self; - Self::remove_zeros(ids, data_triggers); - Self::remove_zeros(ids, pipeline_triggers); - Self::remove_zeros(ids, time_triggers); - Self::remove_zeros(ids, by_call_triggers); + Self::remove_zeros(ids, original_contracts, data_triggers); + Self::remove_zeros(ids, original_contracts, pipeline_triggers); + Self::remove_zeros(ids, original_contracts, time_triggers); + Self::remove_zeros(ids, original_contracts, by_call_triggers); } /// Remove actions with zero execution count from `triggers` fn remove_zeros( ids: &mut IndexMap, + original_contracts: &mut WasmSmartContractMap, triggers: &mut IndexMap>, ) { let to_remove: Vec = triggers .iter() .filter_map(|(id, action)| { - if let Repeats::Exactly(repeats) = action.repeats { - if repeats == 0 { - return Some(id.clone()); - } + if let Repeats::Exactly(0) = action.repeats { + return Some(id.clone()); } None }) .collect(); for id in to_remove { - triggers.remove(&id).and_then(|_| ids.remove(&id)).expect( - "Removing existing keys from `Set` should be always possible. This is a bug", - ); + ids.remove(&id) + .and_then(|_| Self::remove_from(original_contracts, triggers, &id).then_some(())) + .expect("`Set`'s `ids`, `original_contracts` and typed trigger collections are inconsistent. This is a bug") } } diff --git a/core/src/sumeragi/mod.rs b/core/src/sumeragi/mod.rs index 8c82663ee6c..2bafc67e9f2 100644 --- a/core/src/sumeragi/mod.rs +++ b/core/src/sumeragi/mod.rs @@ -229,11 +229,14 @@ impl SumeragiHandle { fn replay_block( block: &SignedBlock, wsv: &mut WorldStateView, - current_topology: &Topology, + mut current_topology: Topology, ) -> Topology { - let block = ValidBlock::validate(block.clone(), current_topology, wsv) + // NOTE: topology need to be updated up to block's view_change_index + current_topology.rotate_all_n(block.payload().header.view_change_index); + + let block = ValidBlock::validate(block.clone(), ¤t_topology, wsv) .expect("Kura blocks should be valid") - .commit(current_topology) + .commit(¤t_topology) .expect("Kura blocks should be valid"); if block.payload().header.is_genesis() { @@ -293,14 +296,14 @@ impl SumeragiHandle { let block_iter_except_last = (&mut blocks_iter).take(block_count.saturating_sub(skip_block_count + 1)); for block in block_iter_except_last { - current_topology = Self::replay_block(&block, &mut wsv, ¤t_topology); + current_topology = Self::replay_block(&block, &mut wsv, current_topology); } // finalized_wsv is one block behind let finalized_wsv = wsv.clone(); if let Some(block) = blocks_iter.next() { - current_topology = Self::replay_block(&block, &mut wsv, ¤t_topology); + current_topology = Self::replay_block(&block, &mut wsv, current_topology); } info!("Sumeragi has finished loading blocks and setting up the WSV"); diff --git a/core/src/wsv.rs b/core/src/wsv.rs index 0e653e6c488..2bbf58ff7ca 100644 --- a/core/src/wsv.rs +++ b/core/src/wsv.rs @@ -1252,7 +1252,7 @@ impl WorldStateView { /// The function puts events produced by iterator into `events_buffer`. /// Events should be produced in the order of expanding scope: from specific to general. /// Example: account events before domain events. - pub fn emit_events, T: Into>(&mut self, world_events: I) { + pub fn emit_events, T: Into>(&mut self, world_events: I) { Self::emit_events_impl( &mut self.world.triggers, &mut self.events_buffer, @@ -1263,7 +1263,7 @@ impl WorldStateView { /// Implementation of [`Self::emit_events()`]. /// /// Usable when you can't call [`Self::emit_events()`] due to mutable reference to self. - fn emit_events_impl, T: Into>( + fn emit_events_impl, T: Into>( triggers: &mut TriggerSet, events_buffer: &mut Vec, world_events: I, @@ -1271,7 +1271,7 @@ impl WorldStateView { let data_events: SmallVec<[DataEvent; 3]> = world_events .into_iter() .map(Into::into) - .flat_map(WorldEvent::flatten) + .map(Into::into) .collect(); for event in data_events.iter() { @@ -1285,7 +1285,7 @@ impl WorldStateView { /// Produces [`PermissionTokenSchemaUpdateEvent`]. pub fn set_permission_token_schema(&mut self, schema: PermissionTokenSchema) { let old_schema = std::mem::replace(&mut self.world.permission_token_schema, schema.clone()); - self.emit_events(std::iter::once(WorldEvent::PermissionTokenSchemaUpdate( + self.emit_events(std::iter::once(DataEvent::PermissionToken( PermissionTokenSchemaUpdateEvent { old_schema, new_schema: schema, diff --git a/crypto/src/signature/secp256k1.rs b/crypto/src/signature/secp256k1.rs index 4d7ec814141..1939213af37 100644 --- a/crypto/src/signature/secp256k1.rs +++ b/crypto/src/signature/secp256k1.rs @@ -148,8 +148,8 @@ mod test { #[test] fn secp256k1_load_keys() { let secret = PrivateKey::from_hex(ALGORITHM, PRIVATE_KEY).unwrap(); - let sres = EcdsaSecp256k1Sha256::keypair(Some(KeyGenOption::FromPrivateKey(secret))); - assert!(sres.is_ok()); + let _sres = + EcdsaSecp256k1Sha256::keypair(Some(KeyGenOption::FromPrivateKey(secret))).unwrap(); } #[test] @@ -158,16 +158,14 @@ mod test { let (p, s) = EcdsaSecp256k1Sha256::keypair(Some(KeyGenOption::FromPrivateKey(secret))).unwrap(); - let sk = secp256k1::SecretKey::from_slice(s.payload()); - assert!(sk.is_ok()); - let pk = secp256k1::PublicKey::from_slice(p.payload()); - assert!(pk.is_ok()); + let _sk = secp256k1::SecretKey::from_slice(s.payload()).unwrap(); + let _pk = secp256k1::PublicKey::from_slice(p.payload()).unwrap(); let openssl_group = EcGroup::from_curve_name(Nid::SECP256K1).unwrap(); let mut ctx = BigNumContext::new().unwrap(); - let openssl_point = - EcPoint::from_bytes(&openssl_group, &public_key_uncompressed(&p)[..], &mut ctx); - assert!(openssl_point.is_ok()); + let _openssl_point = + EcPoint::from_bytes(&openssl_group, &public_key_uncompressed(&p)[..], &mut ctx) + .unwrap(); } #[test] @@ -179,7 +177,8 @@ mod test { hex::decode(SIGNATURE_1).unwrap().as_slice(), &p, ); - assert!(result.is_ok()); + // we are returning a `Result` + // unwrap will catch the `Err(_)`, and assert will catch the `false` assert!(result.unwrap()); let context = secp256k1::Secp256k1::new(); @@ -189,13 +188,11 @@ mod test { let h = sha2::Sha256::digest(MESSAGE_1); let msg = secp256k1::Message::from_digest_slice(h.as_slice()).unwrap(); - //Check if signatures produced here can be verified by secp256k1 - let mut signature = + // Check if signatures produced here can be verified by secp256k1 + let signature = secp256k1::ecdsa::Signature::from_compact(&hex::decode(SIGNATURE_1).unwrap()[..]) .unwrap(); - signature.normalize_s(); - let result = context.verify_ecdsa(&msg, &signature, &pk); - assert!(result.is_ok()); + context.verify_ecdsa(&msg, &signature, &pk).unwrap(); let openssl_group = EcGroup::from_curve_name(Nid::SECP256K1).unwrap(); let mut ctx = BigNumContext::new().unwrap(); @@ -209,19 +206,19 @@ mod test { let openssl_s = BigNum::from_hex_str(s).unwrap(); let openssl_sig = EcdsaSig::from_private_components(openssl_r, openssl_s).unwrap(); let openssl_result = openssl_sig.verify(h.as_slice(), &openssl_pkey); - assert!(openssl_result.is_ok()); assert!(openssl_result.unwrap()); } #[test] fn secp256k1_sign() { let secret = PrivateKey::from_hex(ALGORITHM, PRIVATE_KEY).unwrap(); - let (p, s) = + let (pk, sk) = EcdsaSecp256k1Sha256::keypair(Some(KeyGenOption::FromPrivateKey(secret))).unwrap(); - let sig = EcdsaSecp256k1Sha256::sign(MESSAGE_1, &s).unwrap(); - let result = EcdsaSecp256k1Sha256::verify(MESSAGE_1, &sig, &p); - assert!(result.is_ok()); + let sig = EcdsaSecp256k1Sha256::sign(MESSAGE_1, &sk).unwrap(); + let result = EcdsaSecp256k1Sha256::verify(MESSAGE_1, &sig, &pk); + // we are returning a `Result` + // unwrap will catch the `Err(_)`, and assert will catch the `false` assert!(result.unwrap()); assert_eq!(sig.len(), 64); @@ -237,16 +234,13 @@ mod test { let msg = secp256k1::Message::from_digest_slice(h.as_slice()).unwrap(); let sig_1 = context.sign_ecdsa(&msg, &sk).serialize_compact(); - let result = EcdsaSecp256k1Sha256::verify(MESSAGE_1, &sig_1, &p); - - assert!(result.is_ok()); + let result = EcdsaSecp256k1Sha256::verify(MESSAGE_1, &sig_1, &pk); assert!(result.unwrap()); let openssl_group = EcGroup::from_curve_name(Nid::SECP256K1).unwrap(); let mut ctx = BigNumContext::new().unwrap(); let openssl_point = - EcPoint::from_bytes(&openssl_group, &public_key_uncompressed(&p)[..], &mut ctx) - .unwrap(); + EcPoint::from_bytes(&openssl_group, &public_key_uncompressed(&pk), &mut ctx).unwrap(); let openssl_public_key = EcKey::from_public_key(&openssl_group, &openssl_point).unwrap(); let openssl_secret_key = EcKey::from_private_components( &openssl_group, @@ -257,23 +251,41 @@ mod test { let openssl_sig = EcdsaSig::sign(h.as_slice(), &openssl_secret_key).unwrap(); let openssl_result = openssl_sig.verify(h.as_slice(), &openssl_public_key); - assert!(openssl_result.is_ok()); assert!(openssl_result.unwrap()); - let mut temp_sig = Vec::new(); - temp_sig.extend(openssl_sig.r().to_vec()); - temp_sig.extend(openssl_sig.s().to_vec()); - - // secp256k1 expects normalized "s"'s. - // scheme.normalize_s(temp_sig.as_mut_slice()).unwrap(); - // k256 seems to be normalizing always now - let result = EcdsaSecp256k1Sha256::verify(MESSAGE_1, temp_sig.as_slice(), &p); - assert!(result.is_ok()); + + let openssl_sig = { + use std::ops::{Shr, Sub}; + + // ensure the S value is "low" (see BIP-0062) https://github.com/bitcoin/bips/blob/master/bip-0062.mediawiki#user-content-Low_S_values_in_signatures + // this is required for k256 to successfully verify the signature, as it will fail verification of any signature with a High S value + // Based on https://github.com/bitcoin/bitcoin/blob/v0.9.3/src/key.cpp#L202-L227 + // this is only required for interoperability with OpenSSL + // if we are only using signatures from iroha_crypto, all of this dance is not necessary + let mut s = openssl_sig.s().to_owned().unwrap(); + let mut order = BigNum::new().unwrap(); + openssl_group.order(&mut order, &mut ctx).unwrap(); + let half_order = order.shr(1); + + // if the S is "high" (s > half_order), convert it to "low" form (order - s) + if s.cmp(&half_order) == std::cmp::Ordering::Greater { + s = order.sub(&s); + } + + let r = openssl_sig.r(); + + // serialize the key + let mut res = Vec::new(); + res.extend(r.to_vec()); + res.extend(s.to_vec()); + res + }; + + let result = EcdsaSecp256k1Sha256::verify(MESSAGE_1, openssl_sig.as_slice(), &pk); assert!(result.unwrap()); let (p, s) = EcdsaSecp256k1Sha256::keypair(None).unwrap(); let signed = EcdsaSecp256k1Sha256::sign(MESSAGE_1, &s).unwrap(); let result = EcdsaSecp256k1Sha256::verify(MESSAGE_1, &signed, &p); - assert!(result.is_ok()); assert!(result.unwrap()); } } diff --git a/data_model/src/events/data/events.rs b/data_model/src/events/data/events.rs index 90f1caf2cf8..c08b9e504c0 100644 --- a/data_model/src/events/data/events.rs +++ b/data_model/src/events/data/events.rs @@ -3,7 +3,6 @@ use getset::Getters; use iroha_data_model_derive::{model, Filter, HasOrigin}; -use iroha_primitives::small::SmallVec; pub use self::model::*; use super::*; @@ -61,22 +60,6 @@ pub mod model { pub value: Box, } - /// World event - /// - /// Does not participate in `Event`, but useful for events warranties when modifying `wsv` - #[derive( - Debug, Clone, PartialEq, Eq, FromVariant, Decode, Encode, Deserialize, Serialize, IntoSchema, - )] - pub enum WorldEvent { - Peer(peer::PeerEvent), - Domain(domain::DomainEvent), - Role(role::RoleEvent), - Trigger(trigger::TriggerEvent), - PermissionTokenSchemaUpdate(permission::PermissionTokenSchemaUpdateEvent), - Configuration(config::ConfigurationEvent), - Executor(executor::ExecutorEvent), - } - /// Event #[derive( Debug, @@ -98,12 +81,6 @@ pub mod model { Peer(peer::PeerEvent), /// Domain event Domain(domain::DomainEvent), - /// Account event - Account(account::AccountEvent), - /// Asset definition event - AssetDefinition(asset::AssetDefinitionEvent), - /// Asset event - Asset(asset::AssetEvent), /// Trigger event Trigger(trigger::TriggerEvent), /// Role event @@ -629,65 +606,19 @@ pub trait HasOrigin { fn origin_id(&self) -> &::Id; } -impl WorldEvent { - /// Unfold [`Self`] and return vector of [`Event`]s in the expanding scope order: from specific to general. - /// E.g [`AssetEvent`] -> [`AccountEvent`] -> [`DomainEvent`] - pub fn flatten(self) -> SmallVec<[DataEvent; 3]> { - let mut events = SmallVec::new(); - - match self { - WorldEvent::Domain(domain_event) => { - match &domain_event { - DomainEvent::Account(account_event) => { - if let AccountEvent::Asset(asset_event) = account_event { - events.push(DataEvent::Asset(asset_event.clone())); - } - events.push(DataEvent::Account(account_event.clone())); - } - DomainEvent::AssetDefinition(asset_definition_event) => { - events.push(DataEvent::AssetDefinition(asset_definition_event.clone())); - } - _ => (), - } - events.push(DataEvent::Domain(domain_event)); - } - WorldEvent::Peer(peer_event) => { - events.push(DataEvent::Peer(peer_event)); - } - WorldEvent::Role(role_event) => { - events.push(DataEvent::Role(role_event)); - } - WorldEvent::Trigger(trigger_event) => { - events.push(DataEvent::Trigger(trigger_event)); - } - WorldEvent::PermissionTokenSchemaUpdate(token_event) => { - events.push(DataEvent::PermissionToken(token_event)); - } - WorldEvent::Configuration(config_event) => { - events.push(DataEvent::Configuration(config_event)); - } - WorldEvent::Executor(executor_event) => { - events.push(DataEvent::Executor(executor_event)); - } - } - - events - } -} - -impl From for WorldEvent { +impl From for DataEvent { fn from(value: AccountEvent) -> Self { DomainEvent::Account(value).into() } } -impl From for WorldEvent { +impl From for DataEvent { fn from(value: AssetDefinitionEvent) -> Self { DomainEvent::AssetDefinition(value).into() } } -impl From for WorldEvent { +impl From for DataEvent { fn from(value: AssetEvent) -> Self { AccountEvent::Asset(value).into() } @@ -698,9 +629,6 @@ impl DataEvent { pub fn domain_id(&self) -> Option<&DomainId> { match self { Self::Domain(event) => Some(event.origin_id()), - Self::Account(event) => Some(&event.origin_id().domain_id), - Self::AssetDefinition(event) => Some(&event.origin_id().domain_id), - Self::Asset(event) => Some(&event.origin_id().definition_id.domain_id), Self::Trigger(event) => event.origin_id().domain_id.as_ref(), Self::Peer(_) | Self::Configuration(_) @@ -731,6 +659,6 @@ pub mod prelude { trigger::{ TriggerEvent, TriggerEventFilter, TriggerFilter, TriggerNumberOfExecutionsChanged, }, - DataEvent, HasOrigin, MetadataChanged, WorldEvent, + DataEvent, HasOrigin, MetadataChanged, }; } diff --git a/data_model/src/events/data/filters.rs b/data_model/src/events/data/filters.rs index 30d4029e018..c03b9b7c6b6 100644 --- a/data_model/src/events/data/filters.rs +++ b/data_model/src/events/data/filters.rs @@ -52,12 +52,6 @@ pub mod model { ByPeer(FilterOpt), /// Filter by Domain entity. `AcceptAll` value will accept all `Domain` events ByDomain(FilterOpt), - /// Filter by Account entity. `AcceptAll` value will accept all `Account` events - ByAccount(FilterOpt), - /// Filter by AssetDefinition entity. `AcceptAll` value will accept all `AssetDefinition` events - ByAssetDefinition(FilterOpt), - /// Filter by Asset entity. `AcceptAll` value will accept all `Asset` events - ByAsset(FilterOpt), /// Filter by Trigger entity. `AcceptAll` value will accept all `Trigger` events ByTrigger(FilterOpt), /// Filter by Role entity. `AcceptAll` value will accept all `Role` events @@ -145,13 +139,9 @@ impl Filter for DataEntityFilter { match (self, event) { (Self::ByPeer(filter_opt), DataEvent::Peer(peer)) => filter_opt.matches(peer), (Self::ByDomain(filter_opt), DataEvent::Domain(domain)) => filter_opt.matches(domain), - (Self::ByAccount(filter_opt), DataEvent::Account(account)) => { - filter_opt.matches(account) + (Self::ByTrigger(filter_opt), DataEvent::Trigger(trigger)) => { + filter_opt.matches(trigger) } - (Self::ByAssetDefinition(filter_opt), DataEvent::AssetDefinition(asset_definition)) => { - filter_opt.matches(asset_definition) - } - (Self::ByAsset(filter_opt), DataEvent::Asset(asset)) => filter_opt.matches(asset), (Self::ByRole(filter_opt), DataEvent::Role(role)) => filter_opt.matches(role), _ => false, } @@ -241,22 +231,60 @@ mod tests { metadata: Metadata::default(), }; let asset_id = AssetId::new( - AssetDefinitionId::new(asset_name, domain_id), + AssetDefinitionId::new(asset_name, domain_id.clone()), account_id.clone(), ); - let asset = Asset::new(asset_id, 0u32); - - let domain_created = DomainEvent::Created(domain); - let account_created = AccountEvent::Created(account); - let asset_created = AssetEvent::Created(asset); - let account_asset_created = AccountEvent::Asset(asset_created.clone()); - let account_filter = BySome(DataEntityFilter::ByAccount(BySome(AccountFilter::new( - BySome(OriginFilter(account_id)), + let asset = Asset::new(asset_id.clone(), 0u32); + + // Create three events with three levels of nesting + // the first one is just a domain event + // the second one is an account event with a domain event inside + // the third one is an asset event with an account event with a domain event inside + let domain_created = DomainEvent::Created(domain).into(); + let account_created = DomainEvent::Account(AccountEvent::Created(account)).into(); + let asset_created = + DomainEvent::Account(AccountEvent::Asset(AssetEvent::Created(asset))).into(); + + // test how the differently nested filters with with the events + // FIXME: rewrite the filters using the builder DSL https://github.com/hyperledger/iroha/issues/3068 + let domain_filter = BySome(DataEntityFilter::ByDomain(BySome(DomainFilter::new( + BySome(OriginFilter(domain_id)), + AcceptAll, + )))); + let account_filter = BySome(DataEntityFilter::ByDomain(BySome(DomainFilter::new( + // kind of unfortunately, we have to specify the domain id filter, + // even though we will filter it with the account id filter (account id contains domain id with and account name) + // FIXME: maybe make this more orthogonal by introducing a partial id (in account event filter only by account name) AcceptAll, + BySome(DomainEventFilter::ByAccount(BySome(AccountFilter::new( + BySome(OriginFilter(account_id)), + AcceptAll, + )))), )))); - assert!(!account_filter.matches(&domain_created.into())); - assert!(!account_filter.matches(&asset_created.into())); - assert!(account_filter.matches(&account_created.into())); - assert!(account_filter.matches(&account_asset_created.into())); + let asset_filter = BySome(DataEntityFilter::ByDomain(BySome(DomainFilter::new( + AcceptAll, + BySome(DomainEventFilter::ByAccount(BySome(AccountFilter::new( + AcceptAll, + BySome(AccountEventFilter::ByAsset(BySome(AssetFilter::new( + BySome(OriginFilter(asset_id)), + AcceptAll, + )))), + )))), + )))); + + // domain filter matches all of those, because all of those events happened in the same domain + assert!(domain_filter.matches(&domain_created)); + assert!(domain_filter.matches(&account_created)); + assert!(domain_filter.matches(&asset_created)); + + // account event does not match the domain created event, as it is not an account event + assert!(!account_filter.matches(&domain_created)); + assert!(account_filter.matches(&account_created)); + assert!(account_filter.matches(&asset_created)); + + // asset event matches only the domain->account->asset event + assert!(!asset_filter.matches(&domain_created)); + assert!(!asset_filter.matches(&account_created)); + assert!(asset_filter.matches(&asset_created)); } } diff --git a/docs/source/references/schema.json b/docs/source/references/schema.json index a4640d1122e..3ac32350d43 100644 --- a/docs/source/references/schema.json +++ b/docs/source/references/schema.json @@ -876,29 +876,14 @@ "discriminant": 1, "type": "FilterOpt" }, - { - "tag": "ByAccount", - "discriminant": 2, - "type": "FilterOpt" - }, - { - "tag": "ByAssetDefinition", - "discriminant": 3, - "type": "FilterOpt" - }, - { - "tag": "ByAsset", - "discriminant": 4, - "type": "FilterOpt" - }, { "tag": "ByTrigger", - "discriminant": 5, + "discriminant": 2, "type": "FilterOpt" }, { "tag": "ByRole", - "discriminant": 6, + "discriminant": 3, "type": "FilterOpt" } ] @@ -915,44 +900,29 @@ "discriminant": 1, "type": "DomainEvent" }, - { - "tag": "Account", - "discriminant": 2, - "type": "AccountEvent" - }, - { - "tag": "AssetDefinition", - "discriminant": 3, - "type": "AssetDefinitionEvent" - }, - { - "tag": "Asset", - "discriminant": 4, - "type": "AssetEvent" - }, { "tag": "Trigger", - "discriminant": 5, + "discriminant": 2, "type": "TriggerEvent" }, { "tag": "Role", - "discriminant": 6, + "discriminant": 3, "type": "RoleEvent" }, { "tag": "PermissionToken", - "discriminant": 7, + "discriminant": 4, "type": "PermissionTokenSchemaUpdateEvent" }, { "tag": "Configuration", - "discriminant": 8, + "discriminant": 5, "type": "ConfigurationEvent" }, { "tag": "Executor", - "discriminant": 9, + "discriminant": 6, "type": "ExecutorEvent" } ] diff --git a/tools/parity_scale_decoder/build.rs b/tools/parity_scale_decoder/build.rs index 58e3db5dbd8..49223203f69 100644 --- a/tools/parity_scale_decoder/build.rs +++ b/tools/parity_scale_decoder/build.rs @@ -23,13 +23,11 @@ where path_to.push("samples/"); path_to.push(filename); - let mut path_to_json_sample = path_to.clone(); - path_to_json_sample.set_extension("json"); + let path_to_json = path_to.with_extension("json"); + let path_to_binary = path_to.with_extension("bin"); - let mut path_to_binary = path_to; - path_to_binary.set_extension("bin"); - - let buf = fs::read_to_string(path_to_json_sample)?; + println!("cargo:rerun-if-changed={}", path_to_json.to_str().unwrap()); + let buf = fs::read_to_string(path_to_json)?; let sample = serde_json::from_str::(buf.as_str())?; diff --git a/tools/parity_scale_decoder/samples/trigger.bin b/tools/parity_scale_decoder/samples/trigger.bin index 9d084408877..4ad7ccdfd9e 100644 Binary files a/tools/parity_scale_decoder/samples/trigger.bin and b/tools/parity_scale_decoder/samples/trigger.bin differ diff --git a/tools/parity_scale_decoder/samples/trigger.json b/tools/parity_scale_decoder/samples/trigger.json index b37816c0352..2cf26cf26f3 100644 --- a/tools/parity_scale_decoder/samples/trigger.json +++ b/tools/parity_scale_decoder/samples/trigger.json @@ -19,7 +19,12 @@ "authority": "alice@wonderland", "filter": { "Data": { - "ByAccount": "AcceptAll" + "ByDomain": { + "origin_filter": "AcceptAll", + "event_filter": { + "ByAccount": "AcceptAll" + } + } } }, "metadata": {} diff --git a/tools/parity_scale_decoder/src/main.rs b/tools/parity_scale_decoder/src/main.rs index 47dc7801e7a..c57d0d1a7cb 100644 --- a/tools/parity_scale_decoder/src/main.rs +++ b/tools/parity_scale_decoder/src/main.rs @@ -264,9 +264,10 @@ mod tests { vec![Mint::asset_quantity(1_u32, rose_id)], Repeats::Indefinitely, account_id, - FilterBox::Data(DataEventFilter::BySome(DataEntityFilter::ByAccount( - AcceptAll, - ))), + // FIXME: rewrite the filters using the builder DSL https://github.com/hyperledger/iroha/issues/3068 + FilterBox::Data(BySome(DataEntityFilter::ByDomain(BySome( + DomainFilter::new(AcceptAll, BySome(DomainEventFilter::ByAccount(AcceptAll))), + )))), ); let trigger = Trigger::new(trigger_id, action);