From 71a0506f2e84e795067f3a5a7a0082019a0b425b Mon Sep 17 00:00:00 2001 From: Nikita Strygin Date: Tue, 12 Sep 2023 14:42:54 +0300 Subject: [PATCH 1/9] [refactor] #3882: Update iroha_data_model_derive to use syn 2.0 Signed-off-by: Nikita Strygin --- Cargo.lock | 12 +- data_model/derive/Cargo.toml | 6 +- data_model/derive/src/filter.rs | 19 ++- data_model/derive/src/has_origin.rs | 117 +++++++----------- data_model/derive/src/id.rs | 80 ++++++------ data_model/derive/src/lib.rs | 73 +++++++---- data_model/derive/src/model.rs | 56 +++++---- data_model/derive/src/partially_tagged.rs | 31 ++--- .../derive/tests/partial_tagged_serde.rs | 83 +++++++++++++ ffi/derive/Cargo.toml | 3 +- ffi/derive/src/convert.rs | 64 ++-------- ffi/derive/src/getset_gen.rs | 2 +- ffi/derive/src/impl_visitor.rs | 3 +- ffi/derive/src/lib.rs | 3 +- ffi/derive/src/wrapper.rs | 2 +- macro/utils/Cargo.toml | 5 +- {ffi/derive => macro/utils}/src/emitter.rs | 3 +- macro/utils/src/lib.rs | 102 +++++++++++++++ 18 files changed, 413 insertions(+), 251 deletions(-) create mode 100644 data_model/derive/tests/partial_tagged_serde.rs rename {ffi/derive => macro/utils}/src/emitter.rs (97%) diff --git a/Cargo.lock b/Cargo.lock index 35701a3bc9e..b26ac45ea5f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3172,14 +3172,15 @@ dependencies = [ name = "iroha_data_model_derive" version = "2.0.0-pre-rc.19" dependencies = [ + "darling", "iroha_data_model", "iroha_macro_utils", - "proc-macro-error", + "manyhow", "proc-macro2", "quote", "serde", "serde_json", - "syn 1.0.109", + "syn 2.0.26", "trybuild", ] @@ -3226,9 +3227,9 @@ name = "iroha_ffi_derive" version = "2.0.0-pre-rc.19" dependencies = [ "darling", - "drop_bomb", "getset", "iroha_ffi", + "iroha_macro_utils", "manyhow", "parse-display", "proc-macro2", @@ -3311,10 +3312,13 @@ dependencies = [ name = "iroha_macro_utils" version = "2.0.0-pre-rc.19" dependencies = [ - "proc-macro-error", + "darling", + "drop_bomb", + "manyhow", "proc-macro2", "quote", "syn 1.0.109", + "syn 2.0.26", ] [[package]] diff --git a/data_model/derive/Cargo.toml b/data_model/derive/Cargo.toml index 0fb3f485bbd..bee036dd7ef 100644 --- a/data_model/derive/Cargo.toml +++ b/data_model/derive/Cargo.toml @@ -14,12 +14,12 @@ workspace = true proc-macro = true [dependencies] -syn = { workspace = true, features = ["default", "full", "extra-traits"] } +syn2 = { workspace = true, features = ["default", "full", "extra-traits"] } quote = { workspace = true } +darling = { workspace = true } proc-macro2 = { workspace = true } -proc-macro-error = { workspace = true } +manyhow = { workspace = true } iroha_macro_utils = { workspace = true } -serde_json = { workspace = true, features = ["std"] } [dev-dependencies] iroha_data_model = { workspace = true, features = ["http"] } diff --git a/data_model/derive/src/filter.rs b/data_model/derive/src/filter.rs index f9cbe87c09a..4725f0c45ac 100644 --- a/data_model/derive/src/filter.rs +++ b/data_model/derive/src/filter.rs @@ -4,9 +4,9 @@ clippy::arithmetic_side_effects )] -use proc_macro::TokenStream; +use proc_macro2::TokenStream; use quote::{format_ident, quote}; -use syn::{ +use syn2::{ parse::{Parse, ParseStream}, punctuated::Punctuated, Attribute, Generics, Ident, Token, Variant, Visibility, @@ -113,15 +113,15 @@ impl EventEnum { } impl Parse for EventEnum { - fn parse(input: ParseStream) -> syn::Result { + fn parse(input: ParseStream) -> syn2::Result { let _attrs = input.call(Attribute::parse_outer)?; let vis = input.parse()?; let _enum_token = input.parse::()?; let ident = input.parse::()?; let generics = input.parse::()?; let content; - let _brace_token = syn::braced!(content in input); - let variants = content.parse_terminated(EventVariant::parse)?; + let _brace_token = syn2::braced!(content in input); + let variants = content.parse_terminated(EventVariant::parse, Token![,])?; if ident.to_string().ends_with("Event") { Ok(EventEnum { vis, @@ -130,7 +130,7 @@ impl Parse for EventEnum { variants, }) } else { - Err(syn::Error::new_spanned( + Err(syn2::Error::new_spanned( ident, "Bad ident: only derivable for `...Event` enums", )) @@ -139,7 +139,7 @@ impl Parse for EventEnum { } impl Parse for EventVariant { - fn parse(input: ParseStream) -> syn::Result { + fn parse(input: ParseStream) -> syn2::Result { let variant = input.parse::()?; let variant_ident = variant.ident; let field_type = variant @@ -148,7 +148,7 @@ impl Parse for EventVariant { .next() .expect("Variant should have at least one unnamed field") .ty; - if let syn::Type::Path(path) = field_type { + if let syn2::Type::Path(path) = field_type { let field_ident = path .path .get_ident() @@ -163,7 +163,7 @@ impl Parse for EventVariant { Ok(EventVariant::IdField(variant_ident)) } } else { - Err(syn::Error::new_spanned( + Err(syn2::Error::new_spanned( field_type, "Unexpected AST type variant", )) @@ -220,7 +220,6 @@ pub fn impl_filter(event: &EventEnum) -> TokenStream { #event_filter_and_impl } - .into() } /// Generates the event filter for the event. E.g. for `AccountEvent`, `AccountEventFilter` diff --git a/data_model/derive/src/has_origin.rs b/data_model/derive/src/has_origin.rs index 85dab5114bb..18df35f8d10 100644 --- a/data_model/derive/src/has_origin.rs +++ b/data_model/derive/src/has_origin.rs @@ -4,102 +4,80 @@ clippy::unwrap_in_result )] -use iroha_macro_utils::{attr_struct, AttrParser}; -use proc_macro::TokenStream; -use proc_macro_error::abort; +use darling::{FromDeriveInput, FromVariant}; +use iroha_macro_utils::{attr_struct2, parse_single_list_attr, parse_single_list_attr_opt}; +use manyhow::Result; +use proc_macro2::TokenStream; use quote::quote; -use syn::{ - parse::{Parse, ParseStream}, - parse_quote, - punctuated::Punctuated, - Attribute, Generics, Ident, Token, Type, Variant, Visibility, -}; +use syn2::{parse_quote, Ident, Token, Type}; mod kw { - syn::custom_keyword!(origin); - syn::custom_keyword!(variant); + syn2::custom_keyword!(origin); } +const HAS_ORIGIN_ATTR: &str = "has_origin"; + pub struct HasOriginEnum { ident: Ident, - variants: Punctuated, + variants: Vec, origin: Type, } +impl FromDeriveInput for HasOriginEnum { + fn from_derive_input(input: &syn2::DeriveInput) -> darling::Result { + let ident = input.ident.clone(); + + let Some(variants) = darling::ast::Data::::try_from(&input.data)?.take_enum() else { + return Err(darling::Error::custom("Expected enum")); + }; + + let origin = parse_single_list_attr::(HAS_ORIGIN_ATTR, &input.attrs)?.ty; + + Ok(Self { + ident, + variants, + origin, + }) + } +} + pub struct HasOriginVariant { ident: Ident, - extractor: Option, + extractor: Option, } -struct HasOriginAttr(core::marker::PhantomData); +impl FromVariant for HasOriginVariant { + fn from_variant(variant: &syn2::Variant) -> darling::Result { + let ident = variant.ident.clone(); + let extractor = parse_single_list_attr_opt(HAS_ORIGIN_ATTR, &variant.attrs)?; -impl AttrParser for HasOriginAttr { - const IDENT: &'static str = "has_origin"; + Ok(Self { ident, extractor }) + } } -attr_struct! { - pub struct Origin { +attr_struct2! { + pub struct OriginAttr { _kw: kw::origin, _eq: Token![=], ty: Type, } } -attr_struct! { - pub struct OriginExtractor { +attr_struct2! { + pub struct OriginExtractorAttr { ident: Ident, _eq: Token![=>], - extractor: syn::Expr, + extractor: syn2::Expr, } } -impl Parse for HasOriginEnum { - fn parse(input: ParseStream) -> syn::Result { - let attrs = input.call(Attribute::parse_outer)?; - let _vis = input.parse::()?; - let _enum_token = input.parse::()?; - let ident = input.parse::()?; - let generics = input.parse::()?; - if !generics.params.is_empty() { - abort!(generics, "Generics are not supported"); - } - let content; - let _brace_token = syn::braced!(content in input); - let variants = content.parse_terminated(HasOriginVariant::parse)?; - let origin = attrs - .iter() - .find_map(|attr| HasOriginAttr::::parse(attr).ok()) - .map(|origin| origin.ty) - .expect("Attribute `#[has_origin(origin = Type)]` is required"); - Ok(HasOriginEnum { - ident, - variants, - origin, - }) - } -} +pub fn impl_has_origin(input: &syn2::DeriveInput) -> Result { + let enum_ = HasOriginEnum::from_derive_input(input)?; -impl Parse for HasOriginVariant { - fn parse(input: ParseStream) -> syn::Result { - let variant = input.parse::()?; - let Variant { - ident, - fields, - attrs, - .. - } = variant; - match fields { - syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {} - fields => abort!(fields, "Only supports tuple variants with single field"), - }; - let extractor = attrs - .iter() - .find_map(|attr| HasOriginAttr::::parse(attr).ok()); - Ok(HasOriginVariant { ident, extractor }) - } -} + // TODO: verify enum is non-empty (or make it work with empty enums) + // TODO: verify all the enum variants are newtype variants + // TODO: verify there are no generics on the enum -pub fn impl_has_origin(enum_: &HasOriginEnum) -> TokenStream { let enum_ident = &enum_.ident; let enum_origin = &enum_.origin; let variants_match_arms = &enum_ @@ -116,9 +94,9 @@ pub fn impl_has_origin(enum_: &HasOriginEnum) -> TokenStream { }, ) }) - .collect::>(); + .collect::>(); - quote! { + Ok(quote! { impl HasOrigin for #enum_ident { type Origin = #enum_origin; @@ -131,6 +109,5 @@ pub fn impl_has_origin(enum_: &HasOriginEnum) -> TokenStream { } } } - } - .into() + }) } diff --git a/data_model/derive/src/id.rs b/data_model/derive/src/id.rs index afb742b2420..ad57dfd789e 100644 --- a/data_model/derive/src/id.rs +++ b/data_model/derive/src/id.rs @@ -1,33 +1,16 @@ #![allow(clippy::str_to_string, clippy::mixed_read_write_in_expression)] +use manyhow::{bail, Result}; use proc_macro2::TokenStream; -use proc_macro_error::abort; use quote::quote; -use syn::parse_quote; +use syn2::parse_quote; -fn derive_identifiable(input: &syn::ItemStruct) -> TokenStream { +pub fn impl_id(input: &syn2::ItemStruct) -> Result { let name = &input.ident; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let (id_type, id_expr) = get_id_type(input); + let identifiable_derive = derive_identifiable(input)?; - quote! { - impl #impl_generics Identifiable for #name #ty_generics #where_clause { - type Id = #id_type; - - #[inline] - fn id(&self) -> &Self::Id { - #id_expr - } - } - } -} - -pub fn impl_id(input: &syn::ItemStruct) -> TokenStream { - let name = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let identifiable_derive = derive_identifiable(input); - - quote! { + Ok(quote! { #identifiable_derive impl #impl_generics ::core::cmp::PartialOrd for #name #ty_generics #where_clause where Self: Identifiable { @@ -55,65 +38,82 @@ pub fn impl_id(input: &syn::ItemStruct) -> TokenStream { self.id().hash(state); } } - } + }) +} + +fn derive_identifiable(input: &syn2::ItemStruct) -> Result { + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let (id_type, id_expr) = get_id_type(input)?; + + Ok(quote! { + impl #impl_generics Identifiable for #name #ty_generics #where_clause { + type Id = #id_type; + + #[inline] + fn id(&self) -> &Self::Id { + #id_expr + } + } + }) } -fn get_id_type(input: &syn::ItemStruct) -> (TokenStream, TokenStream) { +fn get_id_type(input: &syn2::ItemStruct) -> Result<(TokenStream, TokenStream)> { match &input.fields { - syn::Fields::Named(fields) => { + syn2::Fields::Named(fields) => { for field in &fields.named { let (field_name, field_ty) = (&field.ident, &field.ty); if is_identifier(&field.attrs) { - return (quote! {#field_ty}, quote! {&self.#field_name}); + return Ok((quote! {#field_ty}, quote! {&self.#field_name})); } if is_transparent(&field.attrs) { - return ( + return Ok(( quote! {<#field_ty as Identifiable>::Id}, quote! {Identifiable::id(&self.#field_name)}, - ); + )); } } } - syn::Fields::Unnamed(fields) => { + syn2::Fields::Unnamed(fields) => { for (i, field) in fields.unnamed.iter().enumerate() { - let (field_id, field_ty): (syn::Index, _) = (i.into(), &field.ty); + let (field_id, field_ty): (syn2::Index, _) = (i.into(), &field.ty); if is_identifier(&field.attrs) { - return (quote! {#field_ty}, quote! {&self.#field_id}); + return Ok((quote! {#field_ty}, quote! {&self.#field_id})); } if is_transparent(&field.attrs) { - return ( + return Ok(( quote! {<#field_ty as Identifiable>::Id}, quote! {Identifiable::id(&self.#field_id)}, - ); + )); } } } - syn::Fields::Unit => {} + syn2::Fields::Unit => {} } match &input.fields { - syn::Fields::Named(named) => { + syn2::Fields::Named(named) => { for field in &named.named { let field_ty = &field.ty; if field.ident.as_ref().expect("Field must be named") == "id" { - return (quote! {#field_ty}, quote! {&self.id}); + return Ok((quote! {#field_ty}, quote! {&self.id})); } } } - syn::Fields::Unnamed(_) | syn::Fields::Unit => {} + syn2::Fields::Unnamed(_) | syn2::Fields::Unit => {} } - abort!(input, "Identifier not found") + bail!(input, "Identifier not found") } -fn is_identifier(attrs: &[syn::Attribute]) -> bool { +fn is_identifier(attrs: &[syn2::Attribute]) -> bool { attrs.iter().any(|attr| attr == &parse_quote! {#[id]}) } -fn is_transparent(attrs: &[syn::Attribute]) -> bool { +fn is_transparent(attrs: &[syn2::Attribute]) -> bool { attrs .iter() .any(|attr| attr == &parse_quote! {#[id(transparent)]}) diff --git a/data_model/derive/src/lib.rs b/data_model/derive/src/lib.rs index 607ff1720e6..61b17a51b09 100644 --- a/data_model/derive/src/lib.rs +++ b/data_model/derive/src/lib.rs @@ -7,8 +7,9 @@ mod id; mod model; mod partially_tagged; -use proc_macro::TokenStream; -use syn::parse_macro_input; +use iroha_macro_utils::Emitter; +use manyhow::{emit, manyhow, Result}; +use proc_macro2::TokenStream; /// Macro which controls how to export item's API. The behaviour is controlled with `transparent_api` /// feature flag. If the flag is active, item's public fields will be exposed as public, however, if @@ -80,19 +81,37 @@ use syn::parse_macro_input; /// ``` /// /// It assumes that the derive is imported and referred to by its original name. +#[manyhow] #[proc_macro_attribute] -#[proc_macro_error::proc_macro_error] -pub fn model(_attr: TokenStream, input: TokenStream) -> TokenStream { - model::impl_model(&parse_macro_input!(input)).into() +pub fn model(attr: TokenStream, input: TokenStream) -> TokenStream { + let mut emitter = Emitter::new(); + + if !attr.is_empty() { + emit!(emitter, attr, "This attribute does not take any arguments"); + } + + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + let result = model::impl_model(&mut emitter, &input); + + emitter.finish_token_stream_with(result) } /// Same as [`model`] macro, but only processes a single item. /// /// You should prefer using [`model`] macro over this one. +#[manyhow] #[proc_macro] -#[proc_macro_error::proc_macro_error] pub fn model_single(input: TokenStream) -> TokenStream { - model::process_item(parse_macro_input!(input)).into() + let mut emitter = Emitter::new(); + + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + emitter.finish_token_stream_with(model::process_item(input)) } /// Derive macro for `Identifiable` trait which also automatically implements [`Ord`], [`Eq`], @@ -209,10 +228,12 @@ pub fn model_single(input: TokenStream) -> TokenStream { /// } /// ``` /// -#[proc_macro_error::proc_macro_error] +#[manyhow] #[proc_macro_derive(IdEqOrdHash, attributes(id, opaque))] -pub fn id_eq_ord_hash(input: TokenStream) -> TokenStream { - id::impl_id(&parse_macro_input!(input)).into() +pub fn id_eq_ord_hash(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + id::impl_id(&input) } /// [`Filter`] is used for code generation of `...Filter` structs and `...EventFilter` enums, as well as @@ -377,10 +398,12 @@ pub fn id_eq_ord_hash(input: TokenStream) -> TokenStream { /// ``` /// /// It assumes that the derive is imported and referred to by its original name. +#[manyhow] #[proc_macro_derive(Filter)] -pub fn filter_derive(input: TokenStream) -> TokenStream { - let event = parse_macro_input!(input as filter::EventEnum); - filter::impl_filter(&event) +pub fn filter_derive(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + Ok(filter::impl_filter(&input)) } /// Derive `::serde::Serialize` trait for `enum` with possibility to avoid tags for selected variants @@ -409,10 +432,12 @@ pub fn filter_derive(input: TokenStream) -> TokenStream { /// &serde_json::to_string(&Outer::A(42)).expect("Failed to serialize"), r#"{"A":42}"# /// ); /// ``` -#[proc_macro_error::proc_macro_error] +#[manyhow] #[proc_macro_derive(PartiallyTaggedSerialize, attributes(serde_partially_tagged, serde))] -pub fn partially_tagged_serialize_derive(input: TokenStream) -> TokenStream { - partially_tagged::impl_partially_tagged_serialize(&parse_macro_input!(input)) +pub fn partially_tagged_serialize_derive(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + Ok(partially_tagged::impl_partially_tagged_serialize(&input)) } /// Derive `::serde::Deserialize` trait for `enum` with possibility to avoid tags for selected variants @@ -470,10 +495,12 @@ pub fn partially_tagged_serialize_derive(input: TokenStream) -> TokenStream { /// serde_json::from_str::(r#"{"B":42}"#).expect("Failed to deserialize"), Outer::Inner1(Inner::B(42)) /// ); /// ``` -#[proc_macro_error::proc_macro_error] +#[manyhow] #[proc_macro_derive(PartiallyTaggedDeserialize, attributes(serde_partially_tagged, serde))] -pub fn partially_tagged_deserialize_derive(input: TokenStream) -> TokenStream { - partially_tagged::impl_partially_tagged_deserialize(&parse_macro_input!(input)) +pub fn partially_tagged_deserialize_derive(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + Ok(partially_tagged::impl_partially_tagged_deserialize(&input)) } /// Derive macro for `HasOrigin`. @@ -559,8 +586,10 @@ pub fn partially_tagged_deserialize_derive(input: TokenStream) -> TokenStream { /// assert_eq!(&layer_id, layer_sub_layer_event.origin_id()); /// assert_eq!(&sub_layer_id, sub_layer_created_event.origin_id()); /// ``` -#[proc_macro_error::proc_macro_error] +#[manyhow] #[proc_macro_derive(HasOrigin, attributes(has_origin))] -pub fn has_origin_derive(input: TokenStream) -> TokenStream { - has_origin::impl_has_origin(&parse_macro_input!(input)) +pub fn has_origin_derive(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + has_origin::impl_has_origin(&input) } diff --git a/data_model/derive/src/model.rs b/data_model/derive/src/model.rs index 8a7426baca3..73aa757c87c 100644 --- a/data_model/derive/src/model.rs +++ b/data_model/derive/src/model.rs @@ -1,10 +1,11 @@ +use iroha_macro_utils::Emitter; +use manyhow::emit; use proc_macro2::TokenStream; -use proc_macro_error::abort; use quote::{quote, ToTokens}; -use syn::{parse_quote, Attribute}; +use syn2::{parse_quote, Attribute}; -pub fn impl_model(input: &syn::ItemMod) -> TokenStream { - let syn::ItemMod { +pub fn impl_model(emitter: &mut Emitter, input: &syn2::ItemMod) -> TokenStream { + let syn2::ItemMod { attrs, vis, mod_token, @@ -14,14 +15,17 @@ pub fn impl_model(input: &syn::ItemMod) -> TokenStream { .. } = input; - let syn::Visibility::Public(vis_public) = vis else { - abort!( + let syn2::Visibility::Public(vis_public) = vis else { + emit!( + emitter, input, "The `model` attribute can only be used on public modules" ); + return quote!(); }; if ident != "model" { - abort!( + emit!( + emitter, input, "The `model` attribute can only be used on the `model` module" ); @@ -40,16 +44,16 @@ pub fn impl_model(input: &syn::ItemMod) -> TokenStream { } } -pub fn process_item(item: syn::Item) -> TokenStream { - let mut input: syn::DeriveInput = match item { - syn::Item::Struct(item_struct) => item_struct.into(), - syn::Item::Enum(item_enum) => item_enum.into(), - syn::Item::Union(item_union) => item_union.into(), +pub fn process_item(item: syn2::Item) -> TokenStream { + let mut input: syn2::DeriveInput = match item { + syn2::Item::Struct(item_struct) => item_struct.into(), + syn2::Item::Enum(item_enum) => item_enum.into(), + syn2::Item::Union(item_union) => item_union.into(), other => return other.into_token_stream(), }; let vis = &input.vis; - if matches!(vis, syn::Visibility::Public(_)) { + if matches!(vis, syn2::Visibility::Public(_)) { return process_pub_item(input); } @@ -70,21 +74,21 @@ pub fn process_item(item: syn::Item) -> TokenStream { } } -fn process_pub_item(input: syn::DeriveInput) -> TokenStream { +fn process_pub_item(input: syn2::DeriveInput) -> TokenStream { let (impl_generics, _, where_clause) = input.generics.split_for_impl(); let attrs = input.attrs; let ident = input.ident; match input.data { - syn::Data::Struct(item) => match &item.fields { - syn::Fields::Named(fields) => { + syn2::Data::Struct(item) => match &item.fields { + syn2::Fields::Named(fields) => { let fields = fields.named.iter().map(|field| { let field_attrs = &field.attrs; let field_name = &field.ident; let field_ty = &field.ty; - if !matches!(field.vis, syn::Visibility::Public(_)) { + if !matches!(field.vis, syn2::Visibility::Public(_)) { return quote! {#field,}; } @@ -107,12 +111,12 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { expose_ffi(attrs, &item) } - syn::Fields::Unnamed(fields) => { + syn2::Fields::Unnamed(fields) => { let fields = fields.unnamed.iter().map(|field| { let field_attrs = &field.attrs; let field_ty = &field.ty; - if !matches!(field.vis, syn::Visibility::Public(_)) { + if !matches!(field.vis, syn2::Visibility::Public(_)) { return quote! {#field,}; } @@ -133,7 +137,7 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { expose_ffi(attrs, &item) } - syn::Fields::Unit => { + syn2::Fields::Unit => { let item = quote! { pub struct #ident #impl_generics #where_clause; }; @@ -141,7 +145,7 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { expose_ffi(attrs, &item) } }, - syn::Data::Enum(item) => { + syn2::Data::Enum(item) => { let variants = &item.variants; let item = quote! { @@ -154,13 +158,13 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { } // Triggers in `quote!` side, see https://github.com/rust-lang/rust-clippy/issues/10417 #[allow(clippy::arithmetic_side_effects)] - syn::Data::Union(item) => { + syn2::Data::Union(item) => { let fields = item.fields.named.iter().map(|field| { let field_attrs = &field.attrs; let field_name = &field.ident; let field_ty = &field.ty; - if !matches!(field.vis, syn::Visibility::Public(_)) { + if !matches!(field.vis, syn2::Visibility::Public(_)) { return quote! {#field,}; } @@ -189,7 +193,9 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { } fn expose_ffi(mut attrs: Vec, item: &TokenStream) -> TokenStream { - let mut ffi_attrs = attrs.iter().filter(|&attr| attr.path.is_ident("ffi_type")); + let mut ffi_attrs = attrs + .iter() + .filter(|&attr| attr.path().is_ident("ffi_type")); if ffi_attrs.next().is_none() { return quote! { @@ -201,7 +207,7 @@ fn expose_ffi(mut attrs: Vec, item: &TokenStream) -> TokenStream { attrs.retain(|attr| *attr != parse_quote! (#[ffi_type])); let no_ffi_attrs: Vec<_> = attrs .iter() - .filter(|&attr| !attr.path.is_ident("ffi_type")) + .filter(|&attr| !attr.path().is_ident("ffi_type")) .collect(); quote! { diff --git a/data_model/derive/src/partially_tagged.rs b/data_model/derive/src/partially_tagged.rs index 845520f0670..830d4e65c6a 100644 --- a/data_model/derive/src/partially_tagged.rs +++ b/data_model/derive/src/partially_tagged.rs @@ -1,11 +1,11 @@ #![allow(clippy::too_many_lines)] -use proc_macro::TokenStream; -use proc_macro_error::abort; +use proc_macro2::TokenStream; use quote::{format_ident, quote}; -use syn::{ +use syn2::{ parse::{Parse, ParseStream}, parse_quote, punctuated::Punctuated, + spanned::Spanned, Attribute, Generics, Ident, Token, Type, Variant, Visibility, }; @@ -24,15 +24,15 @@ pub struct PartiallyTaggedVariant { } impl Parse for PartiallyTaggedEnum { - fn parse(input: ParseStream) -> syn::Result { + fn parse(input: ParseStream) -> syn2::Result { let mut attrs = input.call(Attribute::parse_outer)?; let _vis = input.parse::()?; let _enum_token = input.parse::()?; let ident = input.parse::()?; let generics = input.parse::()?; let content; - let _brace_token = syn::braced!(content in input); - let variants = content.parse_terminated(PartiallyTaggedVariant::parse)?; + let _brace_token = syn2::braced!(content in input); + let variants = content.parse_terminated(PartiallyTaggedVariant::parse, Token![,])?; attrs.retain(is_serde_attr); Ok(PartiallyTaggedEnum { attrs, @@ -44,7 +44,7 @@ impl Parse for PartiallyTaggedEnum { } impl Parse for PartiallyTaggedVariant { - fn parse(input: ParseStream) -> syn::Result { + fn parse(input: ParseStream) -> syn2::Result { let variant = input.parse::()?; let Variant { ident, @@ -53,12 +53,17 @@ impl Parse for PartiallyTaggedVariant { .. } = variant; let field = match fields { - syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => fields + syn2::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => fields .unnamed .into_iter() .next() .expect("Guaranteed to have exactly one field"), - fields => abort!(fields, "Only supports tuple variants with single field"), + fields => { + return Err(syn2::Error::new( + fields.span(), + "Only supports tuple variants with single field", + )) + } }; let ty = field.ty; let is_untagged = attrs.iter().any(is_untagged_attr); @@ -104,7 +109,7 @@ fn is_untagged_attr(attr: &Attribute) -> bool { /// Check if `#[serde...]` attribute fn is_serde_attr(attr: &Attribute) -> bool { - attr.path + attr.path() .get_ident() .map_or_else(|| false, |ident| ident.to_string().eq("serde")) } @@ -117,7 +122,7 @@ pub fn impl_partially_tagged_serialize(enum_: &PartiallyTaggedEnum) -> TokenStre let (variants_ident, variants_ty, variants_attrs) = variants_to_tuple(enum_.variants()); let (untagged_variants_ident, untagged_variants_ty, untagged_variants_attrs) = variants_to_tuple(enum_.untagged_variants()); - let serialize_trait_bound: syn::TypeParamBound = parse_quote!(::serde::Serialize); + let serialize_trait_bound: syn2::TypeParamBound = parse_quote!(::serde::Serialize); let mut generics = enum_.generics.clone(); generics .type_params_mut() @@ -177,7 +182,6 @@ pub fn impl_partially_tagged_serialize(enum_: &PartiallyTaggedEnum) -> TokenStre } } } - .into() } pub fn impl_partially_tagged_deserialize(enum_: &PartiallyTaggedEnum) -> TokenStream { @@ -190,7 +194,7 @@ pub fn impl_partially_tagged_deserialize(enum_: &PartiallyTaggedEnum) -> TokenSt let (variants_ident, variants_ty, variants_attrs) = variants_to_tuple(enum_.variants()); let (untagged_variants_ident, untagged_variants_ty, untagged_variants_attrs) = variants_to_tuple(enum_.untagged_variants()); - let deserialize_trait_bound: syn::TypeParamBound = parse_quote!(::serde::de::DeserializeOwned); + let deserialize_trait_bound: syn2::TypeParamBound = parse_quote!(::serde::de::DeserializeOwned); let variants_ty_deserialize_bound = variants_ty .iter() .map(|ty| quote!(#ty: #deserialize_trait_bound).to_string()) @@ -343,5 +347,4 @@ pub fn impl_partially_tagged_deserialize(enum_: &PartiallyTaggedEnum) -> TokenSt } } } - .into() } diff --git a/data_model/derive/tests/partial_tagged_serde.rs b/data_model/derive/tests/partial_tagged_serde.rs new file mode 100644 index 00000000000..99e11e06e0e --- /dev/null +++ b/data_model/derive/tests/partial_tagged_serde.rs @@ -0,0 +1,83 @@ +use std::fmt::Formatter; + +use iroha_data_model_derive::{PartiallyTaggedDeserialize, PartiallyTaggedSerialize}; +use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; + +#[allow(variant_size_differences)] // it's a test, duh +#[derive(Debug, PartialEq, Eq, PartiallyTaggedDeserialize, PartiallyTaggedSerialize)] +enum Value { + Bool(bool), + String(String), + #[serde_partially_tagged(untagged)] + Numeric(NumericValue), +} + +// a simpler version of NumericValue than used in data_model +// this one is always i32, but is still serialized as a string literal +// NOTE: debug is actually required for `PartiallyTaggedDeserialize`! +#[derive(Debug, PartialEq, Eq)] +struct NumericValue(i32); + +impl Serialize for NumericValue { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(&self.0.to_string()) + } +} + +struct NumericValueVisitor; + +impl de::Visitor<'_> for NumericValueVisitor { + type Value = NumericValue; + + fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result { + formatter.write_str("a string literal containing a number") + } + + fn visit_str(self, v: &str) -> Result + where + E: de::Error, + { + let parsed = v.parse::().map_err(|e| E::custom(e))?; + + Ok(NumericValue(parsed)) + } +} + +impl<'de> Deserialize<'de> for NumericValue { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_str(NumericValueVisitor) + } +} + +#[test] +fn partially_tagged_serde() { + let values = [ + Value::Bool(true), + Value::String("I am string".to_owned()), + Value::Numeric(NumericValue(42)), + ]; + let serialized_values = [r#"{"Bool":true}"#, r#"{"String":"I am string"}"#, r#""42""#]; + + for (value, serialized_value) in values.iter().zip(serialized_values.iter()) { + let serialized = serde_json::to_string(value) + .unwrap_or_else(|e| panic!("Failed to serialize `{:?}`: {:?}", value, e)); + assert_eq!( + serialized, *serialized_value, + "Serialized form of `{:?}` does not match the expected value", + value + ); + let deserialized: Value = serde_json::from_str(serialized_value) + .unwrap_or_else(|e| panic!("Failed to deserialize `{:?}`: {:?}", serialized_value, e)); + assert_eq!( + *value, deserialized, + "Deserialized form of `{:?}` does not match the expected value", + value + ); + } +} diff --git a/ffi/derive/Cargo.toml b/ffi/derive/Cargo.toml index d8c320bfa11..5004ea9a52c 100644 --- a/ffi/derive/Cargo.toml +++ b/ffi/derive/Cargo.toml @@ -15,6 +15,8 @@ workspace = true proc-macro = true [dependencies] +iroha_macro_utils = { workspace = true } + syn2 = { workspace = true, features = ["full", "visit", "visit-mut", "extra-traits"] } quote = { workspace = true } proc-macro2 = { workspace = true } @@ -22,7 +24,6 @@ manyhow = { workspace = true } darling = { workspace = true } rustc-hash = { workspace = true } -drop_bomb = "0.1.5" parse-display = "0.8.2" [dev-dependencies] diff --git a/ffi/derive/src/convert.rs b/ffi/derive/src/convert.rs index cb409c622af..d8995fedc4f 100644 --- a/ffi/derive/src/convert.rs +++ b/ffi/derive/src/convert.rs @@ -4,21 +4,17 @@ use std::fmt::{Display, Formatter}; use darling::{ ast::Style, util::SpannedValue, FromAttributes, FromDeriveInput, FromField, FromVariant, }; +use iroha_macro_utils::{parse_single_list_attr_opt, Emitter}; use manyhow::{emit, error_message}; use proc_macro2::{Delimiter, Span, TokenStream}; use quote::quote; -use syn2::{ - parse::ParseStream, spanned::Spanned as _, visit::Visit as _, Attribute, Field, Ident, Meta, -}; +use syn2::{parse::ParseStream, spanned::Spanned as _, visit::Visit as _, Attribute, Field, Ident}; -use crate::{ - attr_parse::{ - derive::DeriveAttrs, - doc::DocAttrs, - getset::{GetSetFieldAttrs, GetSetStructAttrs}, - repr::{Repr, ReprKind, ReprPrimitive}, - }, - emitter::Emitter, +use crate::attr_parse::{ + derive::DeriveAttrs, + doc::DocAttrs, + getset::{GetSetFieldAttrs, GetSetStructAttrs}, + repr::{Repr, ReprKind, ReprPrimitive}, }; #[derive(Debug)] @@ -135,47 +131,7 @@ impl syn2::parse::Parse for FfiTypeKindFieldAttribute { } } -fn parse_ffi_type_attr(attrs: &[Attribute]) -> darling::Result> { - let mut accumulator = darling::error::Accumulator::default(); - - // first, ensure there is only one "ffi_type" attribute (we don't support multiple) - let ffi_type_attrs = attrs - .iter() - .filter(|a| a.path().is_ident("ffi_type")) - .collect::>(); - let attr = match *ffi_type_attrs.as_slice() { - [] => { - return accumulator.finish_with(None); - } - [attr] => attr, - [attr, ref tail @ ..] => { - // allow parsing to proceed further to collect more errors - accumulator.push( - darling::Error::custom("Only one #[ffi_type] attribute is allowed!").with_span( - &tail - .iter() - .map(syn2::spanned::Spanned::span) - .reduce(|a, b| a.join(b).unwrap()) - .unwrap(), - ), - ); - attr - } - }; - - let mut kind = None; - - match &attr.meta { - Meta::Path(_) | Meta::NameValue(_) => accumulator.push(darling::Error::custom( - "Expected #[ffi_type(...)] attribute to be a list", - )), - Meta::List(list) => { - kind = accumulator.handle(syn2::parse2(list.tokens.clone()).map_err(Into::into)); - } - } - - accumulator.finish_with(kind) -} +const FFI_TYPE_ATTR: &str = "ffi_type"; pub struct FfiTypeAttr { pub kind: Option, @@ -183,7 +139,7 @@ pub struct FfiTypeAttr { impl FromAttributes for FfiTypeAttr { fn from_attributes(attrs: &[Attribute]) -> darling::Result { - parse_ffi_type_attr(attrs).map(|kind| Self { kind }) + parse_single_list_attr_opt(FFI_TYPE_ATTR, attrs).map(|kind| Self { kind }) } } @@ -193,7 +149,7 @@ pub struct FfiTypeFieldAttr { impl FromAttributes for FfiTypeFieldAttr { fn from_attributes(attrs: &[Attribute]) -> darling::Result { - parse_ffi_type_attr(attrs).map(|kind| Self { kind }) + parse_single_list_attr_opt(FFI_TYPE_ATTR, attrs).map(|kind| Self { kind }) } } diff --git a/ffi/derive/src/getset_gen.rs b/ffi/derive/src/getset_gen.rs index 6458c04a030..89c628de71b 100644 --- a/ffi/derive/src/getset_gen.rs +++ b/ffi/derive/src/getset_gen.rs @@ -1,6 +1,7 @@ use std::default::Default; use darling::ast::Style; +use iroha_macro_utils::Emitter; use manyhow::emit; use proc_macro2::TokenStream; use quote::quote; @@ -13,7 +14,6 @@ use crate::{ getset::{GetSetGenMode, GetSetStructAttrs}, }, convert::{FfiTypeField, FfiTypeFields}, - emitter::Emitter, impl_visitor::{unwrap_result_type, Arg, FnDescriptor}, }; diff --git a/ffi/derive/src/impl_visitor.rs b/ffi/derive/src/impl_visitor.rs index 6c547b10020..c5bd408b01b 100644 --- a/ffi/derive/src/impl_visitor.rs +++ b/ffi/derive/src/impl_visitor.rs @@ -2,6 +2,7 @@ //! //! It also defines descriptors - types that are used for the codegen step +use iroha_macro_utils::Emitter; use manyhow::emit; use proc_macro2::Span; use syn2::{ @@ -11,8 +12,6 @@ use syn2::{ Attribute, Ident, Path, Type, Visibility, }; -use crate::emitter::Emitter; - pub struct Arg { self_ty: Option, name: Ident, diff --git a/ffi/derive/src/lib.rs b/ffi/derive/src/lib.rs index 2bbd93b6489..fdd0673192b 100644 --- a/ffi/derive/src/lib.rs +++ b/ffi/derive/src/lib.rs @@ -3,6 +3,7 @@ use darling::FromDeriveInput; use impl_visitor::{FnDescriptor, ImplDescriptor}; +use iroha_macro_utils::Emitter; use manyhow::{emit, manyhow}; use proc_macro2::TokenStream; use quote::quote; @@ -12,12 +13,10 @@ use wrapper::wrap_method; use crate::{ attr_parse::derive::Derive, convert::{derive_ffi_type, FfiTypeData, FfiTypeInput}, - emitter::Emitter, }; mod attr_parse; mod convert; -mod emitter; mod ffi_fn; mod getset_gen; mod impl_visitor; diff --git a/ffi/derive/src/wrapper.rs b/ffi/derive/src/wrapper.rs index 17fcb77e083..8ea1286eeb4 100644 --- a/ffi/derive/src/wrapper.rs +++ b/ffi/derive/src/wrapper.rs @@ -1,3 +1,4 @@ +use iroha_macro_utils::Emitter; use manyhow::emit; use proc_macro2::{Span, TokenStream}; use quote::quote; @@ -6,7 +7,6 @@ use syn2::{parse_quote, visit_mut::VisitMut, Attribute, Ident, Type}; use crate::{ attr_parse::derive::{Derive, RustcDerive}, convert::FfiTypeInput, - emitter::Emitter, ffi_fn, getset_gen::{gen_resolve_type, gen_store_name}, impl_visitor::{unwrap_result_type, Arg, FnDescriptor, ImplDescriptor, TypeImplTraitResolver}, diff --git a/macro/utils/Cargo.toml b/macro/utils/Cargo.toml index 74e9e6faf51..08c1dce1270 100644 --- a/macro/utils/Cargo.toml +++ b/macro/utils/Cargo.toml @@ -14,6 +14,9 @@ maintenance = { status = "actively-developed" } [dependencies] syn = { workspace = true, features = ["default", "parsing", "printing"] } +syn2 = { workspace = true, features = ["default", "parsing", "printing"] } +darling = { workspace = true } quote = { workspace = true } proc-macro2 = { workspace = true } -proc-macro-error = { workspace = true } +manyhow = { workspace = true } +drop_bomb = "0.1.5" diff --git a/ffi/derive/src/emitter.rs b/macro/utils/src/emitter.rs similarity index 97% rename from ffi/derive/src/emitter.rs rename to macro/utils/src/emitter.rs index 193d961c663..f5edda28b1b 100644 --- a/ffi/derive/src/emitter.rs +++ b/macro/utils/src/emitter.rs @@ -1,8 +1,9 @@ +//! A wrapper type around [`manyhow::Emitter`] that provides a more ergonomic API. + use drop_bomb::DropBomb; use manyhow::ToTokensError; use proc_macro2::TokenStream; -// TODO: move this type to `derive-primitives` crate /// A wrapper type around [`manyhow::Emitter`] that provides a more ergonomic API. /// /// This type is used to accumulate errors during parsing and code generation. diff --git a/macro/utils/src/lib.rs b/macro/utils/src/lib.rs index 2d6d6ef3e70..9f19785d07c 100644 --- a/macro/utils/src/lib.rs +++ b/macro/utils/src/lib.rs @@ -1,5 +1,9 @@ //! Module for various functions and structs to build macros in iroha. +mod emitter; + +pub use emitter::Emitter; + /// Trait for attribute parsing generalization pub trait AttrParser { /// Attribute identifier `#[IDENT...]` @@ -65,3 +69,101 @@ macro_rules! attr_struct { } }; } + +/// Parses a single attribute of the form `#[attr_name(...)]` for darling using a `syn::parse::Parse` implementation. +/// +/// If no attribute with specified name is found, returns `Ok(None)`. +pub fn parse_single_list_attr_opt( + attr_name: &str, + attrs: &[syn2::Attribute], +) -> darling::Result> { + let mut accumulator = darling::error::Accumulator::default(); + + // first, ensure there is only one attribute with the requested name + // take the first one if there are multiple + let matching_attrs = attrs + .iter() + .filter(|a| a.path().is_ident(attr_name)) + .collect::>(); + let attr = match *matching_attrs.as_slice() { + [] => { + return accumulator.finish_with(None); + } + [attr] => attr, + [attr, ref tail @ ..] => { + // allow parsing to proceed further to collect more errors + accumulator.push( + darling::Error::custom(format!("Only one #[{}] attribute is allowed!", attr_name)) + .with_span( + &tail + .iter() + .map(syn2::spanned::Spanned::span) + .reduce(|a, b| a.join(b).unwrap()) + .unwrap(), + ), + ); + attr + } + }; + + let mut kind = None; + + match &attr.meta { + syn2::Meta::Path(_) | syn2::Meta::NameValue(_) => accumulator.push(darling::Error::custom( + format!("Expected #[{}(...)] attribute to be a list", attr_name), + )), + syn2::Meta::List(list) => { + kind = accumulator.handle(syn2::parse2(list.tokens.clone()).map_err(Into::into)); + } + } + + accumulator.finish_with(kind) +} + +/// Parses a single attribute of the form `#[attr_name(...)]` for darling using a `syn::parse::Parse` implementation. +/// +/// If no attribute with specified name is found, returns an error. +pub fn parse_single_list_attr( + attr_name: &str, + attrs: &[syn2::Attribute], +) -> darling::Result { + parse_single_list_attr_opt(attr_name, attrs)? + .ok_or_else(|| darling::Error::custom(format!("Missing `#[{}(...)]` attribute", attr_name))) +} + +/// Macro for automatic [`syn::parse::Parse`] impl generation for keyword +/// attribute structs in derive macros. +#[macro_export] +macro_rules! attr_struct2 { + // Matching struct with named fields + ( + $( #[$meta:meta] )* + // ^~~~attributes~~~~^ + $vis:vis struct $name:ident { + $( + $( #[$field_meta:meta] )* + // ^~~~field attributes~~~!^ + $field_vis:vis $field_name:ident : $field_ty:ty + // ^~~~~~~~~~~~~~~~~a single field~~~~~~~~~~~~~~~^ + ),* + $(,)? } + ) => { + $( #[$meta] )* + $vis struct $name { + $( + $( #[$field_meta] )* + $field_vis $field_name : $field_ty + ),* + } + + impl syn2::parse::Parse for $name { + fn parse(input: syn2::parse::ParseStream) -> syn2::Result { + Ok(Self { + $( + $field_name: input.parse()?, + )* + }) + } + } + }; +} From d985d9baa0cbdfc8967c9e41ef55a776de16a467 Mon Sep 17 00:00:00 2001 From: Nikita Strygin Date: Wed, 13 Sep 2023 14:56:23 +0300 Subject: [PATCH 2/9] [refactor] #3882: Make derive(Filter) use darling Signed-off-by: Nikita Strygin --- Cargo.lock | 3 + data_model/derive/Cargo.toml | 3 + data_model/derive/src/filter.rs | 273 ++++++++++++++---------------- data_model/derive/src/lib.rs | 11 +- data_model/derive/tests/filter.rs | 110 ++++++++++++ 5 files changed, 255 insertions(+), 145 deletions(-) create mode 100644 data_model/derive/tests/filter.rs diff --git a/Cargo.lock b/Cargo.lock index b26ac45ea5f..0f31f1171e9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3173,9 +3173,12 @@ name = "iroha_data_model_derive" version = "2.0.0-pre-rc.19" dependencies = [ "darling", + "derive_more", "iroha_data_model", "iroha_macro_utils", + "iroha_schema", "manyhow", + "parity-scale-codec", "proc-macro2", "quote", "serde", diff --git a/data_model/derive/Cargo.toml b/data_model/derive/Cargo.toml index bee036dd7ef..c170ebc37e1 100644 --- a/data_model/derive/Cargo.toml +++ b/data_model/derive/Cargo.toml @@ -23,6 +23,9 @@ iroha_macro_utils = { workspace = true } [dev-dependencies] iroha_data_model = { workspace = true, features = ["http"] } +iroha_schema = { workspace = true } +parity-scale-codec = { workspace = true } +derive_more = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } diff --git a/data_model/derive/src/filter.rs b/data_model/derive/src/filter.rs index 4725f0c45ac..3fccf4e73ac 100644 --- a/data_model/derive/src/filter.rs +++ b/data_model/derive/src/filter.rs @@ -1,50 +1,92 @@ #![allow( clippy::mixed_read_write_in_expression, - clippy::unwrap_in_result, clippy::arithmetic_side_effects )] +use darling::{FromDeriveInput, FromVariant}; +use iroha_macro_utils::Emitter; +use manyhow::emit; use proc_macro2::TokenStream; use quote::{format_ident, quote}; -use syn2::{ - parse::{Parse, ParseStream}, - punctuated::Punctuated, - Attribute, Generics, Ident, Token, Variant, Visibility, -}; +use syn2::{Generics, Ident, Variant, Visibility}; -pub struct EventEnum { +#[derive(FromDeriveInput)] +#[darling(supports(enum_tuple))] +struct EventEnum { vis: Visibility, ident: Ident, generics: Generics, - variants: Punctuated, + data: darling::ast::Data, } -pub enum EventVariant { - EventField { variant: Ident, field: Ident }, - IdField(Ident), +enum EventVariant { + /// A variant of event that delegates to some other event. Identified by conventional naming of the event types: ending with `Event`. + /// Delegates all the filterting to the corresponding event's filter. + Delegating { + variant_name: Ident, + delegated_event_ty_name: Ident, + }, + /// An actual event. Has either an Id or an identifiable object as a payload + /// The presense of the Id field is not required by this macro per se, but will be enfored by `OriginFilter` requiring a `HasOrigin` impl. + Direct(Ident), +} + +impl FromVariant for EventVariant { + fn from_variant(variant: &Variant) -> darling::Result { + let syn2::Fields::Unnamed(fields) = &variant.fields else { + return Err(darling::Error::custom("Expected an enum with unnamed fields").with_span(&variant.fields)); + }; + // note: actually, we have only one field in the event variants + // this is not enforced by this macro, but by `IntoSchema` + let Some(first_field_ty) = fields.unnamed.first().map(|v| &v.ty) else { + return Err(darling::Error::custom("Expected at least one field").with_span(&fields)); + }; + let syn2::Type::Path(path) = first_field_ty else { + return Err(darling::Error::custom("Only identifiers supported as event types").with_span(first_field_ty)); + }; + let Some(first_field_ty_name) = path.path.get_ident() else { + return Err(darling::Error::custom("Only identifiers supported as event types").with_span(first_field_ty)); + }; + + if first_field_ty_name.to_string().ends_with("Event") { + Ok(EventVariant::Delegating { + variant_name: variant.ident.clone(), + delegated_event_ty_name: first_field_ty_name.clone(), + }) + } else { + Ok(EventVariant::Direct(variant.ident.clone())) + } + } } impl EventEnum { + fn variants(&self) -> &[EventVariant] { + match &self.data { + darling::ast::Data::Enum(variants) => variants, + _ => unreachable!("BUG: only enums should be here"), + } + } + /// Used to produce fields like `ByAccount(crate::prelude::FilterOpt)` in `DomainEventFilter`. - fn generate_filter_variants_with_event_fields(&self) -> Vec { - self.variants + fn generate_filter_variants_for_delegating_events(&self) -> Vec { + self.variants() .iter() .filter_map(|variant| match variant { - EventVariant::IdField(_) => None, - EventVariant::EventField { - variant: variant_ident, - field: field_ident, + EventVariant::Direct(_) => None, + EventVariant::Delegating { + variant_name, + delegated_event_ty_name, } => { // E.g. `Account` field in the event => `ByAccount` in the event filter - let filter_variant_ident = format_ident!("By{}", variant_ident); + let filter_variant_ident = format_ident!("By{}", variant_name); // E.g. `AccountEvent` inner field from `Account` variant in event => // `AccountFilter` inside the event filter - let inner_filter_ident = format_ident!( + let inner_filter_ident = + format_ident!( "{}Filter", - field_ident - .to_string() - .strip_suffix("Event") - .expect("Variant name should have suffix `Event`"), + delegated_event_ty_name.to_string().strip_suffix("Event").expect( + "BUG: Variant name should have suffix `Event` (checked in FromVariant)" + ), ); let import_path = quote! {crate::prelude}; Some(quote! { @@ -55,36 +97,36 @@ impl EventEnum { } /// Used to produce fields like `ByCreated` in `DomainEventFilter`. - fn generate_filter_variants_with_id_fields(&self) -> Vec { - self.variants + fn generate_filter_variants_for_direct_events(&self) -> Vec { + self.variants() .iter() .filter_map(|variant| match variant { - EventVariant::IdField(event_variant_ident) => { + EventVariant::Direct(event_variant_ident) => { // Event fields such as `MetadataRemoved` get mapped to `ByMetadataRemoved` let filter_variant_ident = format_ident!("By{}", event_variant_ident); Some(filter_variant_ident) } - EventVariant::EventField { .. } => None, + EventVariant::Delegating { .. } => None, }) .collect() } /// Match arms for `Filter` impls of event filters of the form /// `(Self::ByAccount(filter_opt), crate::prelude::DomainEvent::Account(event)) => {filter_opt.matches(event)}`. - fn generate_filter_impls_with_event_fields(&self) -> Vec { - self.variants + fn generate_filter_impls_for_delegaring_events(&self) -> Vec { + self.variants() .iter() .filter_map(|variant| match variant { - EventVariant::IdField(_) => None, - EventVariant::EventField { - variant: event_variant_ident, + EventVariant::Direct(_) => None, + EventVariant::Delegating { + variant_name, .. } => { let event_ident = &self.ident; - let filter_variant_ident = format_ident!("By{}", event_variant_ident); + let filter_variant_ident = format_ident!("By{}", variant_name); let import_path = quote! {crate::prelude}; Some(quote! { - (Self::#filter_variant_ident(filter_opt), #import_path::#event_ident::#event_variant_ident(event)) => { + (Self::#filter_variant_ident(filter_opt), #import_path::#event_ident::#variant_name(event)) => { filter_opt.matches(event) }}) @@ -93,11 +135,11 @@ impl EventEnum { /// Match arms for `Filter` impls of event filters of the form /// `(Self::ByCreated, crate::prelude::DomainEvent::Created(_))`. - fn generate_filter_impls_with_id_fields(&self) -> Vec { - self.variants + fn generate_filter_impls_for_direct_events(&self) -> Vec { + self.variants() .iter() .filter_map(|variant| match variant { - EventVariant::IdField(event_variant_ident) => { + EventVariant::Direct(event_variant_ident) => { let event_ident = &self.ident; let filter_variant_ident = format_ident!("By{}", event_variant_ident); let import_path = quote! {crate::prelude}; @@ -106,74 +148,15 @@ impl EventEnum { (Self::#filter_variant_ident, #import_path::#event_ident::#event_variant_ident(_)) }) }, - EventVariant::EventField { .. } => None, + EventVariant::Delegating { .. } => None, }) .collect() } } -impl Parse for EventEnum { - fn parse(input: ParseStream) -> syn2::Result { - let _attrs = input.call(Attribute::parse_outer)?; - let vis = input.parse()?; - let _enum_token = input.parse::()?; - let ident = input.parse::()?; - let generics = input.parse::()?; - let content; - let _brace_token = syn2::braced!(content in input); - let variants = content.parse_terminated(EventVariant::parse, Token![,])?; - if ident.to_string().ends_with("Event") { - Ok(EventEnum { - vis, - ident, - generics, - variants, - }) - } else { - Err(syn2::Error::new_spanned( - ident, - "Bad ident: only derivable for `...Event` enums", - )) - } - } -} - -impl Parse for EventVariant { - fn parse(input: ParseStream) -> syn2::Result { - let variant = input.parse::()?; - let variant_ident = variant.ident; - let field_type = variant - .fields - .into_iter() - .next() - .expect("Variant should have at least one unnamed field") - .ty; - if let syn2::Type::Path(path) = field_type { - let field_ident = path - .path - .get_ident() - .expect("Should be an ident-convertible path"); - - if field_ident.to_string().ends_with("Event") { - Ok(EventVariant::EventField { - variant: variant_ident, - field: field_ident.clone(), - }) - } else { - Ok(EventVariant::IdField(variant_ident)) - } - } else { - Err(syn2::Error::new_spanned( - field_type, - "Unexpected AST type variant", - )) - } - } -} - -/// Generates the filter for the event. E.g. for `AccountEvent`, `AccountFilter` +/// Generates the event filter for the event. E.g. for `AccountEvent`, `AccountEventFilter` /// and its `impl Filter` are generated. -pub fn impl_filter(event: &EventEnum) -> TokenStream { +fn impl_event_filter(event: &EventEnum) -> proc_macro2::TokenStream { let EventEnum { vis, ident: event_ident, @@ -181,91 +164,97 @@ pub fn impl_filter(event: &EventEnum) -> TokenStream { .. } = event; - let event_filter_and_impl = impl_event_filter(event); + let id_variants = event.generate_filter_variants_for_direct_events(); + let event_variants = event.generate_filter_variants_for_delegating_events(); - let filter_ident = format_ident!( - "{}Filter", - event_ident - .to_string() - .strip_suffix("Event") - .expect("Events should follow the naming format") - ); - let event_filter_ident = format_ident!("{}Filter", event_ident); + let id_impls = event.generate_filter_impls_for_direct_events(); + let event_impls = event.generate_filter_impls_for_delegaring_events(); + let event_filter_ident = format_ident!("{}Filter", event_ident); let import_path = quote! { crate::prelude }; - let fil_opt = quote! { #import_path::FilterOpt }; - let orig_fil = quote! { #import_path::OriginFilter }; let imp_event = quote! { #import_path::#event_ident }; - let filter_doc = format!(" Filter for {event_ident} entity"); + let event_filter_doc = format!(" Event filter for {event_ident} entity"); quote! { iroha_data_model_derive::model_single! { - #[derive(Debug, Clone, PartialEq, Eq, derive_more::Constructor, Decode, Encode, Deserialize, Serialize, IntoSchema)] - #[doc = #filter_doc] - #vis struct #filter_ident #generics { - origin_filter: #fil_opt<#orig_fil<#imp_event>>, - event_filter: #fil_opt<#event_filter_ident> + #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] + #[allow(clippy::enum_variant_names, missing_docs)] + #[doc = #event_filter_doc] + #vis enum #event_filter_ident #generics { + #(#id_variants),*, + #(#event_variants),* } } #[cfg(feature = "transparent_api")] - impl #import_path::Filter for #filter_ident { + impl #import_path::Filter for #event_filter_ident { type Event = #imp_event; - fn matches(&self, event: &Self::Event) -> bool { - self.origin_filter.matches(event) && self.event_filter.matches(event) + fn matches(&self, event: &#imp_event) -> bool { + match (self, event) { + #(#id_impls)|* => true, + #(#event_impls),* + _ => false, + } } } - - #event_filter_and_impl } } -/// Generates the event filter for the event. E.g. for `AccountEvent`, `AccountEventFilter` +/// Generates the filter for the event. E.g. for `AccountEvent`, `AccountFilter` /// and its `impl Filter` are generated. -fn impl_event_filter(event: &EventEnum) -> proc_macro2::TokenStream { +pub fn impl_filter(emitter: &mut Emitter, input: &syn2::DeriveInput) -> TokenStream { + let Some(event) = emitter.handle(EventEnum::from_derive_input(input)) else { + return quote!(); + }; + let EventEnum { vis, ident: event_ident, generics, .. - } = event; + } = &event; - let id_variants = event.generate_filter_variants_with_id_fields(); - let event_variants = event.generate_filter_variants_with_event_fields(); + let event_filter_and_impl = impl_event_filter(&event); - let id_impls = event.generate_filter_impls_with_id_fields(); - let event_impls = event.generate_filter_impls_with_event_fields(); + let event_base = event_ident.to_string().strip_suffix("Event").map_or_else( + || { + emit!(emitter, event_ident, "Event name should end with `Event`"); + event_ident.to_string() + }, + ToString::to_string, + ); + let filter_ident = format_ident!("{}Filter", event_base); let event_filter_ident = format_ident!("{}Filter", event_ident); + let import_path = quote! { crate::prelude }; + let fil_opt = quote! { #import_path::FilterOpt }; + let orig_fil = quote! { #import_path::OriginFilter }; let imp_event = quote! { #import_path::#event_ident }; - let event_filter_doc = format!(" Event filter for {event_ident} entity"); + let filter_doc = format!(" Filter for {event_ident} entity"); quote! { iroha_data_model_derive::model_single! { - #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] - #[allow(clippy::enum_variant_names, missing_docs)] - #[doc = #event_filter_doc] - #vis enum #event_filter_ident #generics { - #(#id_variants),*, - #(#event_variants),* + #[derive(Debug, Clone, PartialEq, Eq, derive_more::Constructor, Decode, Encode, Deserialize, Serialize, IntoSchema)] + #[doc = #filter_doc] + #vis struct #filter_ident #generics { + origin_filter: #fil_opt<#orig_fil<#imp_event>>, + event_filter: #fil_opt<#event_filter_ident> } } #[cfg(feature = "transparent_api")] - impl #import_path::Filter for #event_filter_ident { + impl #import_path::Filter for #filter_ident { type Event = #imp_event; - fn matches(&self, event: &#imp_event) -> bool { - match (self, event) { - #(#id_impls)|* => true, - #(#event_impls),* - _ => false, - } + fn matches(&self, event: &Self::Event) -> bool { + self.origin_filter.matches(event) && self.event_filter.matches(event) } } + + #event_filter_and_impl } } diff --git a/data_model/derive/src/lib.rs b/data_model/derive/src/lib.rs index 61b17a51b09..577404cdaa4 100644 --- a/data_model/derive/src/lib.rs +++ b/data_model/derive/src/lib.rs @@ -400,10 +400,15 @@ pub fn id_eq_ord_hash(input: TokenStream) -> Result { /// It assumes that the derive is imported and referred to by its original name. #[manyhow] #[proc_macro_derive(Filter)] -pub fn filter_derive(input: TokenStream) -> Result { - let input = syn2::parse2(input)?; +pub fn filter_derive(input: TokenStream) -> TokenStream { + let mut emitter = Emitter::new(); - Ok(filter::impl_filter(&input)) + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + let result = filter::impl_filter(&mut emitter, &input); + emitter.finish_token_stream_with(result) } /// Derive `::serde::Serialize` trait for `enum` with possibility to avoid tags for selected variants diff --git a/data_model/derive/tests/filter.rs b/data_model/derive/tests/filter.rs new file mode 100644 index 00000000000..27e54a056f1 --- /dev/null +++ b/data_model/derive/tests/filter.rs @@ -0,0 +1,110 @@ +//! A smoke-test for the `derive(Filter)` + +use iroha_data_model::{ + prelude::{HasOrigin, Identifiable}, + IdBox, +}; +use iroha_data_model_derive::{Filter, IdEqOrdHash}; +use iroha_schema::IntoSchema; +use parity_scale_codec::{Decode, Encode}; +use serde::{Deserialize, Serialize}; + +// These are dummy types for the FilterDerive to work +// They would not work with `feature = transparent_api`, but are enough for the smoke test +mod prelude { + use iroha_schema::IntoSchema; + use parity_scale_codec::{Decode, Encode}; + use serde::{Deserialize, Serialize}; + + #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, Encode, Decode, IntoSchema)] + pub struct FilterOpt(T); + + #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, Encode, Decode, IntoSchema)] + pub struct OriginFilter(T); + + pub use super::LayerEvent; +} + +#[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Serialize, + Deserialize, + Encode, + Decode, + IntoSchema, +)] +pub struct SubLayerEvent; + +#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, Encode, Decode, IntoSchema)] +pub struct SubLayerFilter; + +#[derive( + Copy, + Clone, + IntoSchema, + Ord, + PartialOrd, + Eq, + PartialEq, + Serialize, + Deserialize, + Decode, + Encode, + Debug, + Hash, +)] +pub struct LayerId { + name: u32, +} + +impl HasOrigin for LayerEvent { + type Origin = Layer; + + fn origin_id(&self) -> &::Id { + todo!() + } +} + +#[derive(Debug, IdEqOrdHash)] +pub struct Layer { + id: LayerId, +} + +impl From for IdBox { + fn from(_: LayerId) -> Self { + unreachable!() + } +} + +/// The tested type +#[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Serialize, + Deserialize, + Encode, + Decode, + IntoSchema, + Filter, +)] +pub enum LayerEvent { + SubLayer(SubLayerEvent), + Created(LayerId), +} + +#[test] +fn filter() { + // nothing much to test here... + // I guess we do test that it compiles +} From 2308d9446186efe91f8f664a526f8cbae7ed472a Mon Sep 17 00:00:00 2001 From: Nikita Strygin Date: Thu, 14 Sep 2023 10:47:31 +0300 Subject: [PATCH 3/9] [refactor] #3882: Make derive(IdEqOrdHash) use darling, add tests Signed-off-by: Nikita Strygin --- data_model/derive/src/id.rs | 189 ++++++++++++------ data_model/derive/src/lib.rs | 11 +- data_model/derive/tests/id_eq_ord_hash.rs | 117 +++++++++++ .../derive/tests/{ => ui_pass}/filter.rs | 6 +- 4 files changed, 255 insertions(+), 68 deletions(-) create mode 100644 data_model/derive/tests/id_eq_ord_hash.rs rename data_model/derive/tests/{ => ui_pass}/filter.rs (95%) diff --git a/data_model/derive/src/id.rs b/data_model/derive/src/id.rs index ad57dfd789e..18af318dcf7 100644 --- a/data_model/derive/src/id.rs +++ b/data_model/derive/src/id.rs @@ -1,16 +1,109 @@ #![allow(clippy::str_to_string, clippy::mixed_read_write_in_expression)] -use manyhow::{bail, Result}; +use darling::{FromAttributes, FromDeriveInput, FromField}; +use iroha_macro_utils::Emitter; +use manyhow::emit; use proc_macro2::TokenStream; -use quote::quote; +use quote::{quote, ToTokens}; use syn2::parse_quote; -pub fn impl_id(input: &syn2::ItemStruct) -> Result { +mod kw { + syn2::custom_keyword!(transparent); +} + +enum IdAttr { + Missing, + Normal, + Transparent, +} + +impl FromAttributes for IdAttr { + fn from_attributes(attrs: &[syn2::Attribute]) -> darling::Result { + let mut accumulator = darling::error::Accumulator::default(); + let attrs = attrs + .iter() + .filter(|v| v.path().is_ident("id")) + .collect::>(); + let attr = match attrs.as_slice() { + [] => { + return accumulator.finish_with(IdAttr::Missing); + } + [attr] => attr, + [attr, ref tail @ ..] => { + accumulator.push( + darling::Error::custom("Only one `#[id]` attribute is allowed!").with_span( + &tail + .iter() + .map(syn2::spanned::Spanned::span) + .reduce(|a, b| a.join(b).unwrap()) + .unwrap(), + ), + ); + attr + } + }; + + let result = match &attr.meta { + syn2::Meta::Path(_) => IdAttr::Normal, + syn2::Meta::List(list) if list.parse_args::().is_ok() => { + IdAttr::Transparent + } + _ => { + accumulator.push( + darling::Error::custom("Expected `#[id]` or `#[id(transparent)]`") + .with_span(&attr), + ); + IdAttr::Normal + } + }; + + accumulator.finish_with(result) + } +} + +#[derive(FromDeriveInput)] +#[darling(supports(struct_any))] +struct IdDeriveInput { + ident: syn2::Ident, + generics: syn2::Generics, + data: darling::ast::Data, +} + +struct IdField { + ident: Option, + ty: syn2::Type, + id_attr: IdAttr, +} + +impl FromField for IdField { + fn from_field(field: &syn2::Field) -> darling::Result { + let ident = field.ident.clone(); + let ty = field.ty.clone(); + let id_attr = IdAttr::from_attributes(&field.attrs)?; + + Ok(Self { ident, ty, id_attr }) + } +} + +impl IdDeriveInput { + fn fields(&self) -> &darling::ast::Fields { + match &self.data { + darling::ast::Data::Struct(fields) => fields, + _ => unreachable!(), + } + } +} + +pub fn impl_id_eq_ord_hash(emitter: &mut Emitter, input: &syn2::DeriveInput) -> TokenStream { + let Some(input) = emitter.handle(IdDeriveInput::from_derive_input(input)) else { + return quote!(); + }; + let name = &input.ident; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let identifiable_derive = derive_identifiable(input)?; + let identifiable_derive = derive_identifiable(emitter, &input); - Ok(quote! { + quote! { #identifiable_derive impl #impl_generics ::core::cmp::PartialOrd for #name #ty_generics #where_clause where Self: Identifiable { @@ -38,15 +131,15 @@ pub fn impl_id(input: &syn2::ItemStruct) -> Result { self.id().hash(state); } } - }) + } } -fn derive_identifiable(input: &syn2::ItemStruct) -> Result { +fn derive_identifiable(emitter: &mut Emitter, input: &IdDeriveInput) -> TokenStream { let name = &input.ident; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let (id_type, id_expr) = get_id_type(input)?; + let (id_type, id_expr) = get_id_type(emitter, input); - Ok(quote! { + quote! { impl #impl_generics Identifiable for #name #ty_generics #where_clause { type Id = #id_type; @@ -55,66 +148,42 @@ fn derive_identifiable(input: &syn2::ItemStruct) -> Result { #id_expr } } - }) + } } -fn get_id_type(input: &syn2::ItemStruct) -> Result<(TokenStream, TokenStream)> { - match &input.fields { - syn2::Fields::Named(fields) => { - for field in &fields.named { - let (field_name, field_ty) = (&field.ident, &field.ty); - - if is_identifier(&field.attrs) { - return Ok((quote! {#field_ty}, quote! {&self.#field_name})); - } - if is_transparent(&field.attrs) { - return Ok(( - quote! {<#field_ty as Identifiable>::Id}, - quote! {Identifiable::id(&self.#field_name)}, - )); - } +fn get_id_type(emitter: &mut Emitter, input: &IdDeriveInput) -> (syn2::Type, syn2::Expr) { + for (field_index, IdField { ty, ident, id_attr }) in input.fields().iter().enumerate() { + let field_name = ident.as_ref().map_or_else( + || syn2::Index::from(field_index).to_token_stream(), + ToTokens::to_token_stream, + ); + match id_attr { + IdAttr::Normal => { + return (ty.clone(), parse_quote! {&self.#field_name}); } - } - syn2::Fields::Unnamed(fields) => { - for (i, field) in fields.unnamed.iter().enumerate() { - let (field_id, field_ty): (syn2::Index, _) = (i.into(), &field.ty); - - if is_identifier(&field.attrs) { - return Ok((quote! {#field_ty}, quote! {&self.#field_id})); - } - if is_transparent(&field.attrs) { - return Ok(( - quote! {<#field_ty as Identifiable>::Id}, - quote! {Identifiable::id(&self.#field_id)}, - )); - } + IdAttr::Transparent => { + return ( + parse_quote! {<#ty as Identifiable>::Id}, + parse_quote! {Identifiable::id(&self.#field_name)}, + ); + } + IdAttr::Missing => { + // nothing here } } - syn2::Fields::Unit => {} } - match &input.fields { - syn2::Fields::Named(named) => { - for field in &named.named { - let field_ty = &field.ty; - - if field.ident.as_ref().expect("Field must be named") == "id" { - return Ok((quote! {#field_ty}, quote! {&self.id})); - } - } + for field in input.fields().iter() { + if field.ident.as_ref().is_some_and(|i| i == "id") { + return (field.ty.clone(), parse_quote! {&self.id}); } - syn2::Fields::Unnamed(_) | syn2::Fields::Unit => {} } - bail!(input, "Identifier not found") -} - -fn is_identifier(attrs: &[syn2::Attribute]) -> bool { - attrs.iter().any(|attr| attr == &parse_quote! {#[id]}) -} + emit!( + emitter, + "Could not find the identifier field. Either mark it with `#[id]` or have it named `id`" + ); -fn is_transparent(attrs: &[syn2::Attribute]) -> bool { - attrs - .iter() - .any(|attr| attr == &parse_quote! {#[id(transparent)]}) + // return dummy types + (parse_quote! {()}, parse_quote! {()}) } diff --git a/data_model/derive/src/lib.rs b/data_model/derive/src/lib.rs index 577404cdaa4..657a23633b5 100644 --- a/data_model/derive/src/lib.rs +++ b/data_model/derive/src/lib.rs @@ -230,10 +230,15 @@ pub fn model_single(input: TokenStream) -> TokenStream { /// #[manyhow] #[proc_macro_derive(IdEqOrdHash, attributes(id, opaque))] -pub fn id_eq_ord_hash(input: TokenStream) -> Result { - let input = syn2::parse2(input)?; +pub fn id_eq_ord_hash(input: TokenStream) -> TokenStream { + let mut emitter = Emitter::new(); - id::impl_id(&input) + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + let result = id::impl_id_eq_ord_hash(&mut emitter, &input); + emitter.finish_token_stream_with(result) } /// [`Filter`] is used for code generation of `...Filter` structs and `...EventFilter` enums, as well as diff --git a/data_model/derive/tests/id_eq_ord_hash.rs b/data_model/derive/tests/id_eq_ord_hash.rs new file mode 100644 index 00000000000..91e94df415d --- /dev/null +++ b/data_model/derive/tests/id_eq_ord_hash.rs @@ -0,0 +1,117 @@ +//! Basic tests for traits derived by [`IdEqOrdHash`] macro + +use std::collections::BTreeSet; + +use iroha_data_model_derive::IdEqOrdHash; + +/// fake `Identifiable` trait +/// +/// Doesn't require `Into` implementation +pub trait Identifiable: Ord + Eq { + /// Type of the entity identifier + type Id: Ord + Eq + core::hash::Hash; + + /// Get reference to the type identifier + fn id(&self) -> &Self::Id; +} + +#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] +struct ObjectId(char); + +#[derive(Debug, IdEqOrdHash)] +struct Object { + id: ObjectId, + #[allow(unused)] + data: i32, +} +#[derive(Debug, IdEqOrdHash)] +struct ObjectWithExplicitId { + #[id] + definitely_not_id: ObjectId, + #[allow(unused)] + data: i32, +} +#[derive(Debug, IdEqOrdHash)] +struct ObjectWithTransparentId { + #[id(transparent)] // delegate the id to `Object` type + definitely_not_id: Object, + #[allow(unused)] + data: i32, +} + +// some objects to play with in tests +const ID_A: ObjectId = ObjectId('A'); +const ID_B: ObjectId = ObjectId('B'); +const OBJECT_1A: Object = Object { id: ID_A, data: 1 }; +const OBJECT_1B: Object = Object { id: ID_B, data: 1 }; +const OBJECT_2A: Object = Object { id: ID_A, data: 2 }; +const EXPLICIT_OBJECT_1A: ObjectWithExplicitId = ObjectWithExplicitId { + definitely_not_id: ID_A, + data: 1, +}; +const EXPLICIT_OBJECT_1B: ObjectWithExplicitId = ObjectWithExplicitId { + definitely_not_id: ID_B, + data: 1, +}; +const EXPLICIT_OBJECT_2A: ObjectWithExplicitId = ObjectWithExplicitId { + definitely_not_id: ID_A, + data: 2, +}; +const TRANSPARENT_OBJECT_1A: ObjectWithTransparentId = ObjectWithTransparentId { + definitely_not_id: OBJECT_1A, + data: 1, +}; +const TRANSPARENT_OBJECT_1B: ObjectWithTransparentId = ObjectWithTransparentId { + definitely_not_id: OBJECT_1B, + data: 1, +}; +const TRANSPARENT_OBJECT_2A: ObjectWithTransparentId = ObjectWithTransparentId { + definitely_not_id: OBJECT_2A, + data: 2, +}; + +#[test] +fn id() { + assert_eq!(OBJECT_1A.id(), &ID_A); + assert_eq!(OBJECT_1B.id(), &ID_B); + assert_eq!(EXPLICIT_OBJECT_1A.id(), &ID_A); + assert_eq!(EXPLICIT_OBJECT_1B.id(), &ID_B); + assert_eq!(TRANSPARENT_OBJECT_1A.id(), &ID_A); + assert_eq!(TRANSPARENT_OBJECT_1B.id(), &ID_B); +} + +#[test] +fn id_eq() { + assert_eq!(OBJECT_1A, OBJECT_2A); + assert_ne!(OBJECT_1B, OBJECT_2A); + assert_eq!(EXPLICIT_OBJECT_1A, EXPLICIT_OBJECT_2A); + assert_ne!(EXPLICIT_OBJECT_1B, EXPLICIT_OBJECT_2A); + assert_eq!(TRANSPARENT_OBJECT_1A, TRANSPARENT_OBJECT_2A); + assert_ne!(TRANSPARENT_OBJECT_1B, TRANSPARENT_OBJECT_2A); +} + +#[test] +fn id_ord() { + assert!(OBJECT_1A < OBJECT_1B); + assert!(OBJECT_1B > OBJECT_1A); + assert!(EXPLICIT_OBJECT_1A < EXPLICIT_OBJECT_1B); + assert!(EXPLICIT_OBJECT_1B > EXPLICIT_OBJECT_1A); + assert!(TRANSPARENT_OBJECT_1A < TRANSPARENT_OBJECT_1B); + assert!(TRANSPARENT_OBJECT_1B > TRANSPARENT_OBJECT_1A); +} + +#[test] +fn id_hash() { + let mut set = BTreeSet::new(); + set.insert(OBJECT_1A); + set.insert(OBJECT_2A); + assert_eq!(set.len(), 1); + assert!(set.contains(&OBJECT_1A)); + assert!(!set.contains(&OBJECT_1B)); + assert!(set.contains(&OBJECT_2A)); + set.insert(OBJECT_1B); + assert_eq!(set.len(), 2); + assert!(set.contains(&OBJECT_1A)); + assert!(set.contains(&OBJECT_1B)); + assert!(set.contains(&OBJECT_2A)); +} diff --git a/data_model/derive/tests/filter.rs b/data_model/derive/tests/ui_pass/filter.rs similarity index 95% rename from data_model/derive/tests/filter.rs rename to data_model/derive/tests/ui_pass/filter.rs index 27e54a056f1..94dccc72e95 100644 --- a/data_model/derive/tests/filter.rs +++ b/data_model/derive/tests/ui_pass/filter.rs @@ -103,8 +103,4 @@ pub enum LayerEvent { Created(LayerId), } -#[test] -fn filter() { - // nothing much to test here... - // I guess we do test that it compiles -} +fn main() {} From 77ab7fc2709c4a82e2391b260e5df396f1798bbc Mon Sep 17 00:00:00 2001 From: Nikita Strygin Date: Fri, 15 Sep 2023 15:01:50 +0300 Subject: [PATCH 4/9] [refactor] #3882: Make PartiallyTaggedSerialize/Deserialize use darling Signed-off-by: Nikita Strygin --- data_model/derive/src/lib.rs | 4 +- data_model/derive/src/partially_tagged.rs | 127 ++++++------------ .../derive/tests/partial_tagged_serde.rs | 7 +- 3 files changed, 49 insertions(+), 89 deletions(-) diff --git a/data_model/derive/src/lib.rs b/data_model/derive/src/lib.rs index 657a23633b5..e61eff96d8c 100644 --- a/data_model/derive/src/lib.rs +++ b/data_model/derive/src/lib.rs @@ -447,7 +447,7 @@ pub fn filter_derive(input: TokenStream) -> TokenStream { pub fn partially_tagged_serialize_derive(input: TokenStream) -> Result { let input = syn2::parse2(input)?; - Ok(partially_tagged::impl_partially_tagged_serialize(&input)) + partially_tagged::impl_partially_tagged_serialize(&input) } /// Derive `::serde::Deserialize` trait for `enum` with possibility to avoid tags for selected variants @@ -510,7 +510,7 @@ pub fn partially_tagged_serialize_derive(input: TokenStream) -> Result Result { let input = syn2::parse2(input)?; - Ok(partially_tagged::impl_partially_tagged_deserialize(&input)) + partially_tagged::impl_partially_tagged_deserialize(&input) } /// Derive macro for `HasOrigin`. diff --git a/data_model/derive/src/partially_tagged.rs b/data_model/derive/src/partially_tagged.rs index 830d4e65c6a..f446f3e1b91 100644 --- a/data_model/derive/src/partially_tagged.rs +++ b/data_model/derive/src/partially_tagged.rs @@ -1,89 +1,52 @@ #![allow(clippy::too_many_lines)] +// darling-generated code triggers this lint +#![allow(clippy::option_if_let_else)] + +use darling::{FromDeriveInput, FromVariant}; +use manyhow::Result; use proc_macro2::TokenStream; use quote::{format_ident, quote}; -use syn2::{ - parse::{Parse, ParseStream}, - parse_quote, - punctuated::Punctuated, - spanned::Spanned, - Attribute, Generics, Ident, Token, Type, Variant, Visibility, -}; +use syn2::{parse_quote, Attribute, Generics, Ident, Type}; +#[derive(FromDeriveInput)] +#[darling(forward_attrs(serde), supports(enum_newtype))] pub struct PartiallyTaggedEnum { - attrs: Vec, ident: Ident, - variants: Punctuated, generics: Generics, + data: darling::ast::Data, + attrs: Vec, } +#[derive(FromVariant)] +#[darling(forward_attrs(serde), attributes(serde_partially_tagged))] pub struct PartiallyTaggedVariant { - attrs: Vec, ident: Ident, - ty: Type, - is_untagged: bool, -} - -impl Parse for PartiallyTaggedEnum { - fn parse(input: ParseStream) -> syn2::Result { - let mut attrs = input.call(Attribute::parse_outer)?; - let _vis = input.parse::()?; - let _enum_token = input.parse::()?; - let ident = input.parse::()?; - let generics = input.parse::()?; - let content; - let _brace_token = syn2::braced!(content in input); - let variants = content.parse_terminated(PartiallyTaggedVariant::parse, Token![,])?; - attrs.retain(is_serde_attr); - Ok(PartiallyTaggedEnum { - attrs, - ident, - variants, - generics, - }) - } -} - -impl Parse for PartiallyTaggedVariant { - fn parse(input: ParseStream) -> syn2::Result { - let variant = input.parse::()?; - let Variant { - ident, - fields, - mut attrs, - .. - } = variant; - let field = match fields { - syn2::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => fields - .unnamed - .into_iter() - .next() - .expect("Guaranteed to have exactly one field"), - fields => { - return Err(syn2::Error::new( - fields.span(), - "Only supports tuple variants with single field", - )) - } - }; - let ty = field.ty; - let is_untagged = attrs.iter().any(is_untagged_attr); - attrs.retain(is_serde_attr); - Ok(PartiallyTaggedVariant { - attrs, - ident, - ty, - is_untagged, - }) - } + fields: darling::ast::Fields, + attrs: Vec, + #[darling(default)] + untagged: bool, } impl PartiallyTaggedEnum { fn variants(&self) -> impl Iterator { - self.variants.iter() + match &self.data { + darling::ast::Data::Enum(variants) => variants.iter(), + _ => unreachable!( + "Only enums are supported. Enforced by `darling(supports(enum_newtype))`" + ), + } } fn untagged_variants(&self) -> impl Iterator { - self.variants.iter().filter(|variant| variant.is_untagged) + self.variants().filter(|variant| variant.untagged) + } +} + +impl PartiallyTaggedVariant { + fn ty(&self) -> &syn2::Type { + self.fields.fields.first().expect( + "BUG: Only newtype enums are supported. Enforced by `darling(supports(enum_newtype))`", + ) } } @@ -95,26 +58,16 @@ fn variants_to_tuple<'lt, I: Iterator>( (Vec::new(), Vec::new(), Vec::new()), |(mut idents, mut types, mut attrs), variant| { idents.push(&variant.ident); - types.push(&variant.ty); + types.push(&variant.ty()); attrs.push(&variant.attrs); (idents, types, attrs) }, ) } -/// Check if enum variant should be treated as untagged -fn is_untagged_attr(attr: &Attribute) -> bool { - attr == &parse_quote!(#[serde_partially_tagged(untagged)]) -} +pub fn impl_partially_tagged_serialize(input: &syn2::DeriveInput) -> Result { + let enum_ = PartiallyTaggedEnum::from_derive_input(input)?; -/// Check if `#[serde...]` attribute -fn is_serde_attr(attr: &Attribute) -> bool { - attr.path() - .get_ident() - .map_or_else(|| false, |ident| ident.to_string().eq("serde")) -} - -pub fn impl_partially_tagged_serialize(enum_: &PartiallyTaggedEnum) -> TokenStream { let enum_ident = &enum_.ident; let enum_attrs = &enum_.attrs; let ref_internal_repr_ident = format_ident!("{}RefInternalRepr", enum_ident); @@ -133,7 +86,7 @@ pub fn impl_partially_tagged_serialize(enum_: &PartiallyTaggedEnum) -> TokenStre let (ref_internal_impl_generics, ref_internal_type_generics, ref_internal_where_clause) = ref_internal_generics.split_for_impl(); - quote! { + Ok(quote! { impl #impl_generics ::serde::Serialize for #enum_ident #type_generics #where_clause { fn serialize(&self, serializer: S) -> Result where @@ -181,10 +134,12 @@ pub fn impl_partially_tagged_serialize(enum_: &PartiallyTaggedEnum) -> TokenStre wrapper.serialize(serializer) } } - } + }) } -pub fn impl_partially_tagged_deserialize(enum_: &PartiallyTaggedEnum) -> TokenStream { +pub fn impl_partially_tagged_deserialize(input: &syn2::DeriveInput) -> Result { + let enum_ = PartiallyTaggedEnum::from_derive_input(input)?; + let enum_ident = &enum_.ident; let enum_attrs = &enum_.attrs; let internal_repr_ident = format_ident!("{}InternalRepr", enum_ident); @@ -211,7 +166,7 @@ pub fn impl_partially_tagged_deserialize(enum_: &PartiallyTaggedEnum) -> TokenSt let (internal_repr_impl_generics, internal_repr_type_generics, internal_repr_where_clause) = internal_repr_generics.split_for_impl(); - quote! { + Ok(quote! { impl #impl_generics ::serde::Deserialize<'de> for #enum_ident #type_generics #where_clause { fn deserialize(deserializer: D) -> Result where @@ -346,5 +301,5 @@ pub fn impl_partially_tagged_deserialize(enum_: &PartiallyTaggedEnum) -> TokenSt } } } - } + }) } diff --git a/data_model/derive/tests/partial_tagged_serde.rs b/data_model/derive/tests/partial_tagged_serde.rs index 99e11e06e0e..d04f79868e6 100644 --- a/data_model/derive/tests/partial_tagged_serde.rs +++ b/data_model/derive/tests/partial_tagged_serde.rs @@ -7,6 +7,7 @@ use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; #[derive(Debug, PartialEq, Eq, PartiallyTaggedDeserialize, PartiallyTaggedSerialize)] enum Value { Bool(bool), + #[serde(rename = "StringRenamed")] String(String), #[serde_partially_tagged(untagged)] Numeric(NumericValue), @@ -62,7 +63,11 @@ fn partially_tagged_serde() { Value::String("I am string".to_owned()), Value::Numeric(NumericValue(42)), ]; - let serialized_values = [r#"{"Bool":true}"#, r#"{"String":"I am string"}"#, r#""42""#]; + let serialized_values = [ + r#"{"Bool":true}"#, + r#"{"StringRenamed":"I am string"}"#, + r#""42""#, + ]; for (value, serialized_value) in values.iter().zip(serialized_values.iter()) { let serialized = serde_json::to_string(value) From 8a279e3dbfb1dce0386ef7c76e5b9bc72283447d Mon Sep 17 00:00:00 2001 From: Nikita Strygin Date: Fri, 15 Sep 2023 15:44:25 +0300 Subject: [PATCH 5/9] [feature] #3737: Add support for usage of Self type in serde partially tagged enums Signed-off-by: Nikita Strygin --- data_model/derive/Cargo.toml | 2 +- .../mod.rs} | 35 ++++++++--- .../src/partially_tagged/resolve_self.rs | 63 +++++++++++++++++++ .../derive/tests/partial_tagged_serde_self.rs | 39 ++++++++++++ data_model/src/predicate.rs | 6 +- 5 files changed, 132 insertions(+), 13 deletions(-) rename data_model/derive/src/{partially_tagged.rs => partially_tagged/mod.rs} (92%) create mode 100644 data_model/derive/src/partially_tagged/resolve_self.rs create mode 100644 data_model/derive/tests/partial_tagged_serde_self.rs diff --git a/data_model/derive/Cargo.toml b/data_model/derive/Cargo.toml index c170ebc37e1..5cb877d609f 100644 --- a/data_model/derive/Cargo.toml +++ b/data_model/derive/Cargo.toml @@ -14,7 +14,7 @@ workspace = true proc-macro = true [dependencies] -syn2 = { workspace = true, features = ["default", "full", "extra-traits"] } +syn2 = { workspace = true, features = ["default", "full", "extra-traits", "visit-mut"] } quote = { workspace = true } darling = { workspace = true } proc-macro2 = { workspace = true } diff --git a/data_model/derive/src/partially_tagged.rs b/data_model/derive/src/partially_tagged/mod.rs similarity index 92% rename from data_model/derive/src/partially_tagged.rs rename to data_model/derive/src/partially_tagged/mod.rs index f446f3e1b91..a17fd95aeb0 100644 --- a/data_model/derive/src/partially_tagged.rs +++ b/data_model/derive/src/partially_tagged/mod.rs @@ -2,6 +2,8 @@ // darling-generated code triggers this lint #![allow(clippy::option_if_let_else)] +mod resolve_self; + use darling::{FromDeriveInput, FromVariant}; use manyhow::Result; use proc_macro2::TokenStream; @@ -40,25 +42,36 @@ impl PartiallyTaggedEnum { fn untagged_variants(&self) -> impl Iterator { self.variants().filter(|variant| variant.untagged) } + + /// Returns a type that corresponds to `Self`, handling the generics as necessary + fn self_ty(&self) -> syn2::Type { + let ident = &self.ident; + let (_, type_generics, _) = self.generics.split_for_impl(); + + parse_quote!(#ident #type_generics) + } } impl PartiallyTaggedVariant { - fn ty(&self) -> &syn2::Type { - self.fields.fields.first().expect( + fn ty(&self, self_ty: &syn2::Type) -> syn2::Type { + let ty = self.fields.fields.first().expect( "BUG: Only newtype enums are supported. Enforced by `darling(supports(enum_newtype))`", - ) + ).clone(); + + resolve_self::resolve_self(self_ty, ty) } } /// Convert from vector of variants to tuple of vectors consisting of variant's fields fn variants_to_tuple<'lt, I: Iterator>( + self_ty: &syn2::Type, variants: I, -) -> (Vec<&'lt Ident>, Vec<&'lt Type>, Vec<&'lt [Attribute]>) { +) -> (Vec<&'lt Ident>, Vec, Vec<&'lt [Attribute]>) { variants.fold( (Vec::new(), Vec::new(), Vec::new()), |(mut idents, mut types, mut attrs), variant| { idents.push(&variant.ident); - types.push(&variant.ty()); + types.push(variant.ty(self_ty)); attrs.push(&variant.attrs); (idents, types, attrs) }, @@ -72,9 +85,11 @@ pub fn impl_partially_tagged_serialize(input: &syn2::DeriveInput) -> Result Result { + self_ty: &'a syn2::Type, +} + +impl VisitMut for Visitor<'_> { + fn visit_type_mut(&mut self, ty: &mut syn2::Type) { + match ty { + syn2::Type::Path(path_ty) + if path_ty.qself.is_none() && path_ty.path.is_ident("Self") => + { + *ty = self.self_ty.clone(); + } + _ => syn2::visit_mut::visit_type_mut(self, ty), + } + } +} + +/// Transforms the [`resolving_ty`] by replacing `Self` with [`self_ty`]. +/// +/// This is required to be able to use `Self` in `PartiallyTaggedSerialize` and `PartiallyTaggedDeserialize`, +/// as they define an additional intermediate type during serialization/deserialization. Using `Self` there would refer to an incorrect type. +pub fn resolve_self(self_ty: &syn2::Type, mut resolving_ty: syn2::Type) -> syn2::Type { + Visitor { self_ty }.visit_type_mut(&mut resolving_ty); + resolving_ty +} + +#[cfg(test)] +mod tests { + use quote::ToTokens; + use syn2::{parse_quote, Type}; + + #[test] + fn test_resolve_self() { + let test_types = [ + parse_quote!(i32), + parse_quote!(Self), + parse_quote!(Vec), + parse_quote!((Self, Self)), + parse_quote!(::Type), + ]; + let expected_types = [ + parse_quote!(i32), + parse_quote!(()), + parse_quote!(Vec<()>), + parse_quote!(((), ())), + parse_quote!(<() as Trait>::Type), + ]; + let _: &Type = &test_types[0]; + let _: &Type = &expected_types[0]; + + for (test_type, expected_type) in test_types.iter().zip(expected_types.iter()) { + let resolved = super::resolve_self(&parse_quote!(()), test_type.clone()); + assert_eq!( + resolved, + *expected_type, + "Failed to resolve `Self` in `{}`", + test_type.to_token_stream() + ); + } + } +} diff --git a/data_model/derive/tests/partial_tagged_serde_self.rs b/data_model/derive/tests/partial_tagged_serde_self.rs new file mode 100644 index 00000000000..e4520e6ee03 --- /dev/null +++ b/data_model/derive/tests/partial_tagged_serde_self.rs @@ -0,0 +1,39 @@ +//! A test for `PartiallyTaggedSerialize` and `PartiallyTaggedDeserialize` which uses `Self` as a type + +use iroha_data_model_derive::{PartiallyTaggedDeserialize, PartiallyTaggedSerialize}; + +#[derive(Debug, PartialEq, Eq, PartiallyTaggedSerialize, PartiallyTaggedDeserialize)] +enum Expr { + Negate(Box), + #[serde_partially_tagged(untagged)] + Atom(T), +} + +#[test] +fn partially_tagged_serde() { + use Expr::*; + + let values = [ + Atom(42), + Negate(Box::new(Atom(42))), + Negate(Box::new(Negate(Box::new(Atom(42))))), + ]; + let serialized_values = [r#"42"#, r#"{"Negate":42}"#, r#"{"Negate":{"Negate":42}}"#]; + + for (value, serialized_value) in values.iter().zip(serialized_values.iter()) { + let serialized = serde_json::to_string(value) + .unwrap_or_else(|e| panic!("Failed to serialize `{:?}`: {:?}", value, e)); + assert_eq!( + serialized, *serialized_value, + "Serialized form of `{:?}` does not match the expected value", + value + ); + let deserialized: Expr = serde_json::from_str(serialized_value) + .unwrap_or_else(|e| panic!("Failed to deserialize `{:?}`: {:?}", serialized_value, e)); + assert_eq!( + *value, deserialized, + "Deserialized form of `{:?}` does not match the expected value", + value + ); + } +} diff --git a/data_model/src/predicate.rs b/data_model/src/predicate.rs index aff82bc25de..da9cc267b59 100644 --- a/data_model/src/predicate.rs +++ b/data_model/src/predicate.rs @@ -91,11 +91,11 @@ macro_rules! nontrivial { // references (e.g. &Value). pub enum GenericPredicateBox

{ /// Logically `&&` the results of applying the predicates. - And(NonTrivial>), + And(NonTrivial), /// Logically `||` the results of applying the predicats. - Or(NonTrivial>), + Or(NonTrivial), /// Negate the result of applying the predicate. - Not(Box>), + Not(Box), /// The raw predicate that must be applied. #[serde_partially_tagged(untagged)] Raw(P), From 04df2c61a00340d4d2363d287146ff481b2b606a Mon Sep 17 00:00:00 2001 From: Nikita Strygin Date: Mon, 18 Sep 2023 16:10:18 +0300 Subject: [PATCH 6/9] [refactor] #2437: Improve naming, simplify repeated .filter_maps & get rid of unnecessary .except in derive(Filter) This addresses the last of the concerns raised in #2437 Signed-off-by: Nikita Strygin --- data_model/derive/src/filter.rs | 157 ++++++++++++++++---------------- 1 file changed, 79 insertions(+), 78 deletions(-) diff --git a/data_model/derive/src/filter.rs b/data_model/derive/src/filter.rs index 3fccf4e73ac..42a79a93270 100644 --- a/data_model/derive/src/filter.rs +++ b/data_model/derive/src/filter.rs @@ -24,7 +24,8 @@ enum EventVariant { /// Delegates all the filterting to the corresponding event's filter. Delegating { variant_name: Ident, - delegated_event_ty_name: Ident, + /// A name of the event this variant delegates to, without the the `Event` suffix + delegated_event_name_base: String, }, /// An actual event. Has either an Id or an identifiable object as a payload /// The presense of the Id field is not required by this macro per se, but will be enfored by `OriginFilter` requiring a `HasOrigin` impl. @@ -48,10 +49,14 @@ impl FromVariant for EventVariant { return Err(darling::Error::custom("Only identifiers supported as event types").with_span(first_field_ty)); }; - if first_field_ty_name.to_string().ends_with("Event") { + // What clippy suggests is much less readable in this case + #[allow(clippy::option_if_let_else)] + if let Some(delegated_event_name_base) = + first_field_ty_name.to_string().strip_suffix("Event") + { Ok(EventVariant::Delegating { variant_name: variant.ident.clone(), - delegated_event_ty_name: first_field_ty_name.clone(), + delegated_event_name_base: delegated_event_name_base.to_string(), }) } else { Ok(EventVariant::Direct(variant.ident.clone())) @@ -67,90 +72,86 @@ impl EventEnum { } } + fn filter_map_variants Option>(&self, fun: F) -> Vec { + self.variants().iter().filter_map(fun).collect() + } + /// Used to produce fields like `ByAccount(crate::prelude::FilterOpt)` in `DomainEventFilter`. - fn generate_filter_variants_for_delegating_events(&self) -> Vec { - self.variants() - .iter() - .filter_map(|variant| match variant { - EventVariant::Direct(_) => None, - EventVariant::Delegating { - variant_name, - delegated_event_ty_name, - } => { - // E.g. `Account` field in the event => `ByAccount` in the event filter - let filter_variant_ident = format_ident!("By{}", variant_name); - // E.g. `AccountEvent` inner field from `Account` variant in event => - // `AccountFilter` inside the event filter - let inner_filter_ident = - format_ident!( - "{}Filter", - delegated_event_ty_name.to_string().strip_suffix("Event").expect( - "BUG: Variant name should have suffix `Event` (checked in FromVariant)" - ), - ); - let import_path = quote! {crate::prelude}; - Some(quote! { - #filter_variant_ident(#import_path::FilterOpt<#inner_filter_ident>) }) - } - }) - .collect() + fn generate_filter_variants_for_delegating_events(&self) -> Vec { + self.filter_map_variants(|variant| { + if let EventVariant::Delegating { + variant_name, + delegated_event_name_base, + } = variant + { + // E.g. `Account` field in the event => `ByAccount` in the event filter + let filter_variant_ident = format_ident!("By{}", variant_name); + // E.g. `AccountEvent` inner field from `Account` variant in event => + // `AccountFilter` inside the event filter + let inner_filter_ident = format_ident!("{}Filter", delegated_event_name_base); + let import_path = quote! {crate::prelude}; + Some(quote! { + #filter_variant_ident(#import_path::FilterOpt<#inner_filter_ident>) + }) + } else { + None + } + }) } /// Used to produce fields like `ByCreated` in `DomainEventFilter`. fn generate_filter_variants_for_direct_events(&self) -> Vec { - self.variants() - .iter() - .filter_map(|variant| match variant { - EventVariant::Direct(event_variant_ident) => { - // Event fields such as `MetadataRemoved` get mapped to `ByMetadataRemoved` - let filter_variant_ident = format_ident!("By{}", event_variant_ident); - Some(filter_variant_ident) - } - EventVariant::Delegating { .. } => None, - }) - .collect() + self.filter_map_variants(|variant| { + if let EventVariant::Direct(event_variant_ident) = variant { + // Event fields such as `MetadataRemoved` get mapped to `ByMetadataRemoved` + let filter_variant_ident = format_ident!("By{}", event_variant_ident); + Some(filter_variant_ident) + } else { + None + } + }) } /// Match arms for `Filter` impls of event filters of the form /// `(Self::ByAccount(filter_opt), crate::prelude::DomainEvent::Account(event)) => {filter_opt.matches(event)}`. - fn generate_filter_impls_for_delegaring_events(&self) -> Vec { - self.variants() - .iter() - .filter_map(|variant| match variant { - EventVariant::Direct(_) => None, - EventVariant::Delegating { - variant_name, - .. - } => { - let event_ident = &self.ident; - let filter_variant_ident = format_ident!("By{}", variant_name); - let import_path = quote! {crate::prelude}; - Some(quote! { - (Self::#filter_variant_ident(filter_opt), #import_path::#event_ident::#variant_name(event)) => { - filter_opt.matches(event) - }}) - - }}).collect() + fn generate_filter_arms_for_delegating_events(&self) -> Vec { + self.filter_map_variants(|variant| { + if let EventVariant::Delegating { variant_name, .. } = variant { + let event_ident = &self.ident; + let filter_variant_ident = format_ident!("By{}", variant_name); + let import_path = quote! {crate::prelude}; + Some(quote! { + ( + Self::#filter_variant_ident(filter_opt), + #import_path::#event_ident::#variant_name(event) + ) => { + filter_opt.matches(event) + } + }) + } else { + None + } + }) } /// Match arms for `Filter` impls of event filters of the form /// `(Self::ByCreated, crate::prelude::DomainEvent::Created(_))`. - fn generate_filter_impls_for_direct_events(&self) -> Vec { - self.variants() - .iter() - .filter_map(|variant| match variant { - EventVariant::Direct(event_variant_ident) => { - let event_ident = &self.ident; - let filter_variant_ident = format_ident!("By{}", event_variant_ident); - let import_path = quote! {crate::prelude}; - Some( - quote! { - (Self::#filter_variant_ident, #import_path::#event_ident::#event_variant_ident(_)) - }) - }, - EventVariant::Delegating { .. } => None, - }) - .collect() + fn generate_filter_patterns_for_direct_events(&self) -> Vec { + self.filter_map_variants(|variant| { + if let EventVariant::Direct(event_variant_ident) = variant { + let event_ident = &self.ident; + let filter_variant_ident = format_ident!("By{}", event_variant_ident); + let import_path = quote! {crate::prelude}; + Some(quote! { + ( + Self::#filter_variant_ident, + #import_path::#event_ident::#event_variant_ident(_) + ) + }) + } else { + None + } + }) } } @@ -167,8 +168,8 @@ fn impl_event_filter(event: &EventEnum) -> proc_macro2::TokenStream { let id_variants = event.generate_filter_variants_for_direct_events(); let event_variants = event.generate_filter_variants_for_delegating_events(); - let id_impls = event.generate_filter_impls_for_direct_events(); - let event_impls = event.generate_filter_impls_for_delegaring_events(); + let id_patterns = event.generate_filter_patterns_for_direct_events(); + let event_arms = event.generate_filter_arms_for_delegating_events(); let event_filter_ident = format_ident!("{}Filter", event_ident); let import_path = quote! { crate::prelude }; @@ -193,8 +194,8 @@ fn impl_event_filter(event: &EventEnum) -> proc_macro2::TokenStream { fn matches(&self, event: &#imp_event) -> bool { match (self, event) { - #(#id_impls)|* => true, - #(#event_impls),* + #(#id_patterns)|* => true, + #(#event_arms),* _ => false, } } From 21ab1e0b0a39507bb8597fe74561ddf0898a6c7f Mon Sep 17 00:00:00 2001 From: Nikita Strygin Date: Mon, 18 Sep 2023 17:11:21 +0300 Subject: [PATCH 7/9] [refactor] #3882: Add tests for derive(HasOrigin) macro, reduce repetition in derive(IdEqOrdHash), fix error reporting on stable Signed-off-by: Nikita Strygin --- data_model/derive/src/id.rs | 25 +----- data_model/derive/tests/has_origin.rs | 53 +++++++++++ .../ui_fail/has_origin_multiple_attributes.rs | 9 ++ .../has_origin_multiple_attributes.stderr | 6 ++ macro/utils/src/lib.rs | 87 +++++++++++++++---- 5 files changed, 141 insertions(+), 39 deletions(-) create mode 100644 data_model/derive/tests/has_origin.rs create mode 100644 data_model/derive/tests/ui_fail/has_origin_multiple_attributes.rs create mode 100644 data_model/derive/tests/ui_fail/has_origin_multiple_attributes.stderr diff --git a/data_model/derive/src/id.rs b/data_model/derive/src/id.rs index 18af318dcf7..39983e1ad8e 100644 --- a/data_model/derive/src/id.rs +++ b/data_model/derive/src/id.rs @@ -1,7 +1,7 @@ #![allow(clippy::str_to_string, clippy::mixed_read_write_in_expression)] use darling::{FromAttributes, FromDeriveInput, FromField}; -use iroha_macro_utils::Emitter; +use iroha_macro_utils::{find_single_attr_opt, Emitter}; use manyhow::emit; use proc_macro2::TokenStream; use quote::{quote, ToTokens}; @@ -20,27 +20,8 @@ enum IdAttr { impl FromAttributes for IdAttr { fn from_attributes(attrs: &[syn2::Attribute]) -> darling::Result { let mut accumulator = darling::error::Accumulator::default(); - let attrs = attrs - .iter() - .filter(|v| v.path().is_ident("id")) - .collect::>(); - let attr = match attrs.as_slice() { - [] => { - return accumulator.finish_with(IdAttr::Missing); - } - [attr] => attr, - [attr, ref tail @ ..] => { - accumulator.push( - darling::Error::custom("Only one `#[id]` attribute is allowed!").with_span( - &tail - .iter() - .map(syn2::spanned::Spanned::span) - .reduce(|a, b| a.join(b).unwrap()) - .unwrap(), - ), - ); - attr - } + let Some(attr) = find_single_attr_opt(&mut accumulator, "id", attrs) else { + return accumulator.finish_with(IdAttr::Missing); }; let result = match &attr.meta { diff --git a/data_model/derive/tests/has_origin.rs b/data_model/derive/tests/has_origin.rs new file mode 100644 index 00000000000..8522c4268fd --- /dev/null +++ b/data_model/derive/tests/has_origin.rs @@ -0,0 +1,53 @@ +use iroha_data_model::prelude::{HasOrigin, Identifiable}; +use iroha_data_model_derive::{HasOrigin, IdEqOrdHash}; + +#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] +struct ObjectId(pub i32); + +// fake impl for `#[derive(IdEqOrdHash)]` +impl From for iroha_data_model::IdBox { + fn from(_: ObjectId) -> Self { + unimplemented!("fake impl") + } +} + +#[derive(Debug, IdEqOrdHash)] +struct Object { + id: ObjectId, +} + +impl Object { + fn id(&self) -> &ObjectId { + &self.id + } +} + +#[allow(clippy::enum_variant_names)] // it's a test, duh +#[derive(Debug, HasOrigin)] +#[has_origin(origin = Object)] +enum ObjectEvent { + EventWithId(ObjectId), + #[has_origin(event => &event.0)] + EventWithExtractor((ObjectId, i32)), + #[has_origin(obj => obj.id())] + EventWithAnotherExtractor(Object), +} + +#[test] +fn has_origin() { + let events = vec![ + ObjectEvent::EventWithId(ObjectId(1)), + ObjectEvent::EventWithExtractor((ObjectId(2), 2)), + ObjectEvent::EventWithAnotherExtractor(Object { id: ObjectId(3) }), + ]; + let expected_ids = vec![ObjectId(1), ObjectId(2), ObjectId(3)]; + + for (event, expected_id) in events.into_iter().zip(expected_ids) { + assert_eq!( + event.origin_id(), + &expected_id, + "mismatched origin id for event {:?}", + event + ); + } +} diff --git a/data_model/derive/tests/ui_fail/has_origin_multiple_attributes.rs b/data_model/derive/tests/ui_fail/has_origin_multiple_attributes.rs new file mode 100644 index 00000000000..ad09416af20 --- /dev/null +++ b/data_model/derive/tests/ui_fail/has_origin_multiple_attributes.rs @@ -0,0 +1,9 @@ +use iroha_data_model_derive::HasOrigin; + +#[derive(HasOrigin)] +#[has_origin(origin = Object)] +#[has_origin(origin = Object)] +#[has_origin(origin = Object)] +enum MultipleAttributes {} + +fn main() {} diff --git a/data_model/derive/tests/ui_fail/has_origin_multiple_attributes.stderr b/data_model/derive/tests/ui_fail/has_origin_multiple_attributes.stderr new file mode 100644 index 00000000000..35511493350 --- /dev/null +++ b/data_model/derive/tests/ui_fail/has_origin_multiple_attributes.stderr @@ -0,0 +1,6 @@ +error: Only one #[has_origin] attribute is allowed! + --> tests/ui_fail/has_origin_multiple_attributes.rs:5:1 + | +5 | / #[has_origin(origin = Object)] +6 | | #[has_origin(origin = Object)] + | |______________________________^ diff --git a/macro/utils/src/lib.rs b/macro/utils/src/lib.rs index 9f19785d07c..09eba6f07e8 100644 --- a/macro/utils/src/lib.rs +++ b/macro/utils/src/lib.rs @@ -70,42 +70,89 @@ macro_rules! attr_struct { }; } -/// Parses a single attribute of the form `#[attr_name(...)]` for darling using a `syn::parse::Parse` implementation. +/// Extension trait for [`darling::Error`]. /// -/// If no attribute with specified name is found, returns `Ok(None)`. -pub fn parse_single_list_attr_opt( - attr_name: &str, - attrs: &[syn2::Attribute], -) -> darling::Result> { - let mut accumulator = darling::error::Accumulator::default(); +/// Currently exists to add `with_spans` method. +pub trait DarlingErrorExt: Sized { + /// Attaches a combination of multiple spans to the error. + /// + /// Note that it only attaches the first span on stable rustc, as the `Span::join` method is not yet stabilized (https://github.com/rust-lang/rust/issues/54725#issuecomment-649078500). + fn with_spans(self, spans: impl IntoIterator>) -> Self; +} - // first, ensure there is only one attribute with the requested name - // take the first one if there are multiple +impl DarlingErrorExt for darling::Error { + fn with_spans(self, spans: impl IntoIterator>) -> Self { + // Unfortunately, the story for combining multiple spans in rustc proc macro is not yet complete. + // (see https://github.com/rust-lang/rust/issues/54725#issuecomment-649078500, https://github.com/rust-lang/rust/issues/54725#issuecomment-1547795742) + // syn does some hacks to get error reporting that is a bit better: https://docs.rs/syn/2.0.37/src/syn/error.rs.html#282 + // we can't to that because darling's error type does not let us do that. + + // on nightly, we are fine, as `.join` method works. On stable, we fall back to returning the first span. + + let mut iter = spans.into_iter(); + let Some(first) = iter.next() else { + return self; + }; + let first: proc_macro2::Span = first.into(); + let r = iter + .try_fold(first, |a, b| a.join(b.into())) + .unwrap_or(first); + + self.with_span(&r) + } +} + +/// Finds an optional single attribute with specified name. +/// +/// Returns `None` if no attributes with specified name are found. +/// +/// Emits an error into accumulator if multiple attributes with specified name are found. +#[must_use] +pub fn find_single_attr_opt<'a>( + accumulator: &mut darling::error::Accumulator, + attr_name: &str, + attrs: &'a [syn2::Attribute], +) -> Option<&'a syn2::Attribute> { let matching_attrs = attrs .iter() .filter(|a| a.path().is_ident(attr_name)) .collect::>(); let attr = match *matching_attrs.as_slice() { [] => { - return accumulator.finish_with(None); + return None; } [attr] => attr, [attr, ref tail @ ..] => { // allow parsing to proceed further to collect more errors accumulator.push( darling::Error::custom(format!("Only one #[{}] attribute is allowed!", attr_name)) - .with_span( - &tail - .iter() - .map(syn2::spanned::Spanned::span) - .reduce(|a, b| a.join(b).unwrap()) - .unwrap(), - ), + .with_spans(tail.iter().map(syn2::spanned::Spanned::span)), ); attr } }; + Some(attr) +} + +/// Parses a single attribute of the form `#[attr_name(...)]` for darling using a `syn::parse::Parse` implementation. +/// +/// If no attribute with specified name is found, returns `Ok(None)`. +/// +/// # Errors +/// +/// - If multiple attributes with specified name are found +/// - If attribute is not a list +pub fn parse_single_list_attr_opt( + attr_name: &str, + attrs: &[syn2::Attribute], +) -> darling::Result> { + let mut accumulator = darling::error::Accumulator::default(); + + let Some(attr) = find_single_attr_opt(&mut accumulator, attr_name, attrs) else { + return accumulator.finish_with(None); + }; + let mut kind = None; match &attr.meta { @@ -123,6 +170,12 @@ pub fn parse_single_list_attr_opt( /// Parses a single attribute of the form `#[attr_name(...)]` for darling using a `syn::parse::Parse` implementation. /// /// If no attribute with specified name is found, returns an error. +/// +/// # Errors +/// +/// - If multiple attributes with specified name are found +/// - If attribute is not a list +/// - If attribute is not found pub fn parse_single_list_attr( attr_name: &str, attrs: &[syn2::Attribute], From e99f198d4ea950ee07fb04d33b3e27e9d5f66ce8 Mon Sep 17 00:00:00 2001 From: Nikita Strygin Date: Mon, 18 Sep 2023 17:17:20 +0300 Subject: [PATCH 8/9] [refactor] #3882: Clean up Emitter APIs documentation to make clippy happy Signed-off-by: Nikita Strygin --- macro/utils/src/emitter.rs | 27 ++++++++++++++++++++++++--- macro/utils/src/lib.rs | 3 ++- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/macro/utils/src/emitter.rs b/macro/utils/src/emitter.rs index f5edda28b1b..9509b43930d 100644 --- a/macro/utils/src/emitter.rs +++ b/macro/utils/src/emitter.rs @@ -16,6 +16,7 @@ pub struct Emitter { } impl Emitter { + /// Creates a new emitter. It must be consumed by calling any of the `finish_*` functions before dropping or it will panic. pub fn new() -> Self { Self { inner: manyhow::Emitter::new(), @@ -54,18 +55,32 @@ impl Emitter { } /// Consume the emitter, returning a [`manyhow::Error`] if any errors were emitted. + /// + /// # Errors + /// + /// This function returns an error if the emitter has some errors accumulated. pub fn finish(mut self) -> manyhow::Result<()> { self.bomb.defuse(); self.inner.into_result() } /// Same as [`Emitter::finish`], but returns the given value if no errors were emitted. + /// + /// # Errors + /// + /// This function returns an error if the emitter has some errors accumulated. #[allow(unused)] pub fn finish_with(self, result: T) -> manyhow::Result { self.finish().map(|_| result) } /// Handles the given [`manyhow::Result`] and consumes the emitter. + /// + /// # Errors + /// + /// This function returns an error if: + /// - The given result is `Err` + /// - The emitter has some errors accumulated #[allow(unused)] pub fn finish_and( mut self, @@ -81,7 +96,7 @@ impl Emitter { } /// Consume the emitter, convert all errors into a token stream and append it to the given token stream. - pub fn into_tokens(self, tokens: &mut TokenStream) { + pub fn finish_to_token_stream(self, tokens: &mut TokenStream) { match self.finish() { Ok(()) => {} Err(e) => e.to_tokens(tokens), @@ -91,7 +106,7 @@ impl Emitter { /// Consume the emitter, convert all errors into a token stream. pub fn finish_token_stream(self) -> TokenStream { let mut tokens_stream = TokenStream::new(); - self.into_tokens(&mut tokens_stream); + self.finish_to_token_stream(&mut tokens_stream); tokens_stream } @@ -99,11 +114,17 @@ impl Emitter { /// /// This function is useful when you want to handle errors in a macro, but want to emit some tokens even in case of an error. pub fn finish_token_stream_with(self, mut tokens_stream: TokenStream) -> TokenStream { - self.into_tokens(&mut tokens_stream); + self.finish_to_token_stream(&mut tokens_stream); tokens_stream } } +impl Default for Emitter { + fn default() -> Self { + Self::new() + } +} + impl Extend for Emitter { fn extend>(&mut self, iter: T) { self.inner.extend(iter) diff --git a/macro/utils/src/lib.rs b/macro/utils/src/lib.rs index 09eba6f07e8..1e069d1bfd8 100644 --- a/macro/utils/src/lib.rs +++ b/macro/utils/src/lib.rs @@ -76,7 +76,8 @@ macro_rules! attr_struct { pub trait DarlingErrorExt: Sized { /// Attaches a combination of multiple spans to the error. /// - /// Note that it only attaches the first span on stable rustc, as the `Span::join` method is not yet stabilized (https://github.com/rust-lang/rust/issues/54725#issuecomment-649078500). + /// Note that it only attaches the first span on stable rustc, as the `Span::join` method is not yet stabilized (). + #[must_use] fn with_spans(self, spans: impl IntoIterator>) -> Self; } From 71f92cef363fa71248f9761ff58e97d9b3ac5d86 Mon Sep 17 00:00:00 2001 From: Nikita Strygin Date: Mon, 25 Sep 2023 14:38:02 +0300 Subject: [PATCH 9/9] [refactor] #3882: Add basic generics support to `derive(HasOrigin)` Signed-off-by: Nikita Strygin --- Cargo.lock | 4 +- data_model/derive/src/has_origin.rs | 29 ++++++---- data_model/derive/src/lib.rs | 12 +++-- .../derive/tests/has_origin_generics.rs | 53 +++++++++++++++++++ 4 files changed, 83 insertions(+), 15 deletions(-) create mode 100644 data_model/derive/tests/has_origin_generics.rs diff --git a/Cargo.lock b/Cargo.lock index 0f31f1171e9..9407cd43944 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3183,7 +3183,7 @@ dependencies = [ "quote", "serde", "serde_json", - "syn 2.0.26", + "syn 2.0.28", "trybuild", ] @@ -3321,7 +3321,7 @@ dependencies = [ "proc-macro2", "quote", "syn 1.0.109", - "syn 2.0.26", + "syn 2.0.28", ] [[package]] diff --git a/data_model/derive/src/has_origin.rs b/data_model/derive/src/has_origin.rs index 18df35f8d10..31033128b42 100644 --- a/data_model/derive/src/has_origin.rs +++ b/data_model/derive/src/has_origin.rs @@ -5,8 +5,9 @@ )] use darling::{FromDeriveInput, FromVariant}; -use iroha_macro_utils::{attr_struct2, parse_single_list_attr, parse_single_list_attr_opt}; -use manyhow::Result; +use iroha_macro_utils::{ + attr_struct2, parse_single_list_attr, parse_single_list_attr_opt, Emitter, +}; use proc_macro2::TokenStream; use quote::quote; use syn2::{parse_quote, Ident, Token, Type}; @@ -19,6 +20,8 @@ const HAS_ORIGIN_ATTR: &str = "has_origin"; pub struct HasOriginEnum { ident: Ident, + #[allow(unused)] + generics: syn2::Generics, variants: Vec, origin: Type, } @@ -26,6 +29,7 @@ pub struct HasOriginEnum { impl FromDeriveInput for HasOriginEnum { fn from_derive_input(input: &syn2::DeriveInput) -> darling::Result { let ident = input.ident.clone(); + let generics = input.generics.clone(); let Some(variants) = darling::ast::Data::::try_from(&input.data)?.take_enum() else { return Err(darling::Error::custom("Expected enum")); @@ -35,6 +39,7 @@ impl FromDeriveInput for HasOriginEnum { Ok(Self { ident, + generics, variants, origin, }) @@ -71,12 +76,14 @@ attr_struct2! { } } -pub fn impl_has_origin(input: &syn2::DeriveInput) -> Result { - let enum_ = HasOriginEnum::from_derive_input(input)?; +pub fn impl_has_origin(emitter: &mut Emitter, input: &syn2::DeriveInput) -> TokenStream { + let Some(enum_) = emitter.handle(HasOriginEnum::from_derive_input(input)) else { + return quote!(); + }; - // TODO: verify enum is non-empty (or make it work with empty enums) - // TODO: verify all the enum variants are newtype variants - // TODO: verify there are no generics on the enum + if enum_.variants.is_empty() { + return quote!(); + } let enum_ident = &enum_.ident; let enum_origin = &enum_.origin; @@ -96,8 +103,10 @@ pub fn impl_has_origin(input: &syn2::DeriveInput) -> Result { }) .collect::>(); - Ok(quote! { - impl HasOrigin for #enum_ident { + let (impl_generics, ty_generics, where_clause) = enum_.generics.split_for_impl(); + + quote! { + impl #impl_generics HasOrigin for #enum_ident #ty_generics #where_clause { type Origin = #enum_origin; fn origin_id(&self) -> &::Id { @@ -109,5 +118,5 @@ pub fn impl_has_origin(input: &syn2::DeriveInput) -> Result { } } } - }) + } } diff --git a/data_model/derive/src/lib.rs b/data_model/derive/src/lib.rs index e61eff96d8c..6351fa41329 100644 --- a/data_model/derive/src/lib.rs +++ b/data_model/derive/src/lib.rs @@ -598,8 +598,14 @@ pub fn partially_tagged_deserialize_derive(input: TokenStream) -> Result Result { - let input = syn2::parse2(input)?; +pub fn has_origin_derive(input: TokenStream) -> TokenStream { + let mut emitter = Emitter::new(); - has_origin::impl_has_origin(&input) + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream() + }; + + let result = has_origin::impl_has_origin(&mut emitter, &input); + + emitter.finish_token_stream_with(result) } diff --git a/data_model/derive/tests/has_origin_generics.rs b/data_model/derive/tests/has_origin_generics.rs new file mode 100644 index 00000000000..b344aba802e --- /dev/null +++ b/data_model/derive/tests/has_origin_generics.rs @@ -0,0 +1,53 @@ +use iroha_data_model::prelude::{HasOrigin, Identifiable}; +use iroha_data_model_derive::{HasOrigin, IdEqOrdHash}; + +#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] +struct ObjectId(pub i32); + +// fake impl for `#[derive(IdEqOrdHash)]` +impl From for iroha_data_model::IdBox { + fn from(_: ObjectId) -> Self { + unimplemented!("fake impl") + } +} + +#[derive(Debug, IdEqOrdHash)] +struct Object { + id: ObjectId, +} + +impl Object { + fn id(&self) -> &ObjectId { + &self.id + } +} + +#[allow(clippy::enum_variant_names)] // it's a test, duh +#[derive(Debug, HasOrigin)] +#[has_origin(origin = Object)] +enum ObjectEvent> { + EventWithId(ObjectId), + #[has_origin(event => &event.0)] + EventWithExtractor((ObjectId, i32)), + #[has_origin(obj => obj.id())] + EventWithAnotherExtractor(T), +} + +#[test] +fn has_origin() { + let events = vec![ + ObjectEvent::EventWithId(ObjectId(1)), + ObjectEvent::EventWithExtractor((ObjectId(2), 2)), + ObjectEvent::EventWithAnotherExtractor(Object { id: ObjectId(3) }), + ]; + let expected_ids = vec![ObjectId(1), ObjectId(2), ObjectId(3)]; + + for (event, expected_id) in events.into_iter().zip(expected_ids) { + assert_eq!( + event.origin_id(), + &expected_id, + "mismatched origin id for event {:?}", + event + ); + } +}