From 53431d1e138ce0dde7163e77cae3620a6d8ea720 Mon Sep 17 00:00:00 2001 From: Nikita Strygin Date: Tue, 12 Sep 2023 14:42:54 +0300 Subject: [PATCH] [refactor] #3882: Update iroha_data_model_derive to use syn 2.0 Signed-off-by: Nikita Strygin --- Cargo.lock | 12 +- data_model/derive/Cargo.toml | 6 +- data_model/derive/src/filter.rs | 19 ++- data_model/derive/src/has_origin.rs | 117 +++++++----------- data_model/derive/src/id.rs | 80 ++++++------ data_model/derive/src/lib.rs | 73 +++++++---- data_model/derive/src/model.rs | 56 +++++---- data_model/derive/src/partially_tagged.rs | 31 ++--- .../derive/tests/partial_tagged_serde.rs | 83 +++++++++++++ ffi/derive/Cargo.toml | 3 +- ffi/derive/src/convert.rs | 64 ++-------- ffi/derive/src/getset_gen.rs | 2 +- ffi/derive/src/impl_visitor.rs | 3 +- ffi/derive/src/lib.rs | 3 +- ffi/derive/src/wrapper.rs | 2 +- macro/utils/Cargo.toml | 5 +- {ffi/derive => macro/utils}/src/emitter.rs | 3 +- macro/utils/src/lib.rs | 102 +++++++++++++++ 18 files changed, 413 insertions(+), 251 deletions(-) create mode 100644 data_model/derive/tests/partial_tagged_serde.rs rename {ffi/derive => macro/utils}/src/emitter.rs (97%) diff --git a/Cargo.lock b/Cargo.lock index 35701a3bc9e..b26ac45ea5f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3172,14 +3172,15 @@ dependencies = [ name = "iroha_data_model_derive" version = "2.0.0-pre-rc.19" dependencies = [ + "darling", "iroha_data_model", "iroha_macro_utils", - "proc-macro-error", + "manyhow", "proc-macro2", "quote", "serde", "serde_json", - "syn 1.0.109", + "syn 2.0.26", "trybuild", ] @@ -3226,9 +3227,9 @@ name = "iroha_ffi_derive" version = "2.0.0-pre-rc.19" dependencies = [ "darling", - "drop_bomb", "getset", "iroha_ffi", + "iroha_macro_utils", "manyhow", "parse-display", "proc-macro2", @@ -3311,10 +3312,13 @@ dependencies = [ name = "iroha_macro_utils" version = "2.0.0-pre-rc.19" dependencies = [ - "proc-macro-error", + "darling", + "drop_bomb", + "manyhow", "proc-macro2", "quote", "syn 1.0.109", + "syn 2.0.26", ] [[package]] diff --git a/data_model/derive/Cargo.toml b/data_model/derive/Cargo.toml index 0fb3f485bbd..bee036dd7ef 100644 --- a/data_model/derive/Cargo.toml +++ b/data_model/derive/Cargo.toml @@ -14,12 +14,12 @@ workspace = true proc-macro = true [dependencies] -syn = { workspace = true, features = ["default", "full", "extra-traits"] } +syn2 = { workspace = true, features = ["default", "full", "extra-traits"] } quote = { workspace = true } +darling = { workspace = true } proc-macro2 = { workspace = true } -proc-macro-error = { workspace = true } +manyhow = { workspace = true } iroha_macro_utils = { workspace = true } -serde_json = { workspace = true, features = ["std"] } [dev-dependencies] iroha_data_model = { workspace = true, features = ["http"] } diff --git a/data_model/derive/src/filter.rs b/data_model/derive/src/filter.rs index f9cbe87c09a..4725f0c45ac 100644 --- a/data_model/derive/src/filter.rs +++ b/data_model/derive/src/filter.rs @@ -4,9 +4,9 @@ clippy::arithmetic_side_effects )] -use proc_macro::TokenStream; +use proc_macro2::TokenStream; use quote::{format_ident, quote}; -use syn::{ +use syn2::{ parse::{Parse, ParseStream}, punctuated::Punctuated, Attribute, Generics, Ident, Token, Variant, Visibility, @@ -113,15 +113,15 @@ impl EventEnum { } impl Parse for EventEnum { - fn parse(input: ParseStream) -> syn::Result { + fn parse(input: ParseStream) -> syn2::Result { let _attrs = input.call(Attribute::parse_outer)?; let vis = input.parse()?; let _enum_token = input.parse::()?; let ident = input.parse::()?; let generics = input.parse::()?; let content; - let _brace_token = syn::braced!(content in input); - let variants = content.parse_terminated(EventVariant::parse)?; + let _brace_token = syn2::braced!(content in input); + let variants = content.parse_terminated(EventVariant::parse, Token![,])?; if ident.to_string().ends_with("Event") { Ok(EventEnum { vis, @@ -130,7 +130,7 @@ impl Parse for EventEnum { variants, }) } else { - Err(syn::Error::new_spanned( + Err(syn2::Error::new_spanned( ident, "Bad ident: only derivable for `...Event` enums", )) @@ -139,7 +139,7 @@ impl Parse for EventEnum { } impl Parse for EventVariant { - fn parse(input: ParseStream) -> syn::Result { + fn parse(input: ParseStream) -> syn2::Result { let variant = input.parse::()?; let variant_ident = variant.ident; let field_type = variant @@ -148,7 +148,7 @@ impl Parse for EventVariant { .next() .expect("Variant should have at least one unnamed field") .ty; - if let syn::Type::Path(path) = field_type { + if let syn2::Type::Path(path) = field_type { let field_ident = path .path .get_ident() @@ -163,7 +163,7 @@ impl Parse for EventVariant { Ok(EventVariant::IdField(variant_ident)) } } else { - Err(syn::Error::new_spanned( + Err(syn2::Error::new_spanned( field_type, "Unexpected AST type variant", )) @@ -220,7 +220,6 @@ pub fn impl_filter(event: &EventEnum) -> TokenStream { #event_filter_and_impl } - .into() } /// Generates the event filter for the event. E.g. for `AccountEvent`, `AccountEventFilter` diff --git a/data_model/derive/src/has_origin.rs b/data_model/derive/src/has_origin.rs index 85dab5114bb..18df35f8d10 100644 --- a/data_model/derive/src/has_origin.rs +++ b/data_model/derive/src/has_origin.rs @@ -4,102 +4,80 @@ clippy::unwrap_in_result )] -use iroha_macro_utils::{attr_struct, AttrParser}; -use proc_macro::TokenStream; -use proc_macro_error::abort; +use darling::{FromDeriveInput, FromVariant}; +use iroha_macro_utils::{attr_struct2, parse_single_list_attr, parse_single_list_attr_opt}; +use manyhow::Result; +use proc_macro2::TokenStream; use quote::quote; -use syn::{ - parse::{Parse, ParseStream}, - parse_quote, - punctuated::Punctuated, - Attribute, Generics, Ident, Token, Type, Variant, Visibility, -}; +use syn2::{parse_quote, Ident, Token, Type}; mod kw { - syn::custom_keyword!(origin); - syn::custom_keyword!(variant); + syn2::custom_keyword!(origin); } +const HAS_ORIGIN_ATTR: &str = "has_origin"; + pub struct HasOriginEnum { ident: Ident, - variants: Punctuated, + variants: Vec, origin: Type, } +impl FromDeriveInput for HasOriginEnum { + fn from_derive_input(input: &syn2::DeriveInput) -> darling::Result { + let ident = input.ident.clone(); + + let Some(variants) = darling::ast::Data::::try_from(&input.data)?.take_enum() else { + return Err(darling::Error::custom("Expected enum")); + }; + + let origin = parse_single_list_attr::(HAS_ORIGIN_ATTR, &input.attrs)?.ty; + + Ok(Self { + ident, + variants, + origin, + }) + } +} + pub struct HasOriginVariant { ident: Ident, - extractor: Option, + extractor: Option, } -struct HasOriginAttr(core::marker::PhantomData); +impl FromVariant for HasOriginVariant { + fn from_variant(variant: &syn2::Variant) -> darling::Result { + let ident = variant.ident.clone(); + let extractor = parse_single_list_attr_opt(HAS_ORIGIN_ATTR, &variant.attrs)?; -impl AttrParser for HasOriginAttr { - const IDENT: &'static str = "has_origin"; + Ok(Self { ident, extractor }) + } } -attr_struct! { - pub struct Origin { +attr_struct2! { + pub struct OriginAttr { _kw: kw::origin, _eq: Token![=], ty: Type, } } -attr_struct! { - pub struct OriginExtractor { +attr_struct2! { + pub struct OriginExtractorAttr { ident: Ident, _eq: Token![=>], - extractor: syn::Expr, + extractor: syn2::Expr, } } -impl Parse for HasOriginEnum { - fn parse(input: ParseStream) -> syn::Result { - let attrs = input.call(Attribute::parse_outer)?; - let _vis = input.parse::()?; - let _enum_token = input.parse::()?; - let ident = input.parse::()?; - let generics = input.parse::()?; - if !generics.params.is_empty() { - abort!(generics, "Generics are not supported"); - } - let content; - let _brace_token = syn::braced!(content in input); - let variants = content.parse_terminated(HasOriginVariant::parse)?; - let origin = attrs - .iter() - .find_map(|attr| HasOriginAttr::::parse(attr).ok()) - .map(|origin| origin.ty) - .expect("Attribute `#[has_origin(origin = Type)]` is required"); - Ok(HasOriginEnum { - ident, - variants, - origin, - }) - } -} +pub fn impl_has_origin(input: &syn2::DeriveInput) -> Result { + let enum_ = HasOriginEnum::from_derive_input(input)?; -impl Parse for HasOriginVariant { - fn parse(input: ParseStream) -> syn::Result { - let variant = input.parse::()?; - let Variant { - ident, - fields, - attrs, - .. - } = variant; - match fields { - syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {} - fields => abort!(fields, "Only supports tuple variants with single field"), - }; - let extractor = attrs - .iter() - .find_map(|attr| HasOriginAttr::::parse(attr).ok()); - Ok(HasOriginVariant { ident, extractor }) - } -} + // TODO: verify enum is non-empty (or make it work with empty enums) + // TODO: verify all the enum variants are newtype variants + // TODO: verify there are no generics on the enum -pub fn impl_has_origin(enum_: &HasOriginEnum) -> TokenStream { let enum_ident = &enum_.ident; let enum_origin = &enum_.origin; let variants_match_arms = &enum_ @@ -116,9 +94,9 @@ pub fn impl_has_origin(enum_: &HasOriginEnum) -> TokenStream { }, ) }) - .collect::>(); + .collect::>(); - quote! { + Ok(quote! { impl HasOrigin for #enum_ident { type Origin = #enum_origin; @@ -131,6 +109,5 @@ pub fn impl_has_origin(enum_: &HasOriginEnum) -> TokenStream { } } } - } - .into() + }) } diff --git a/data_model/derive/src/id.rs b/data_model/derive/src/id.rs index afb742b2420..ad57dfd789e 100644 --- a/data_model/derive/src/id.rs +++ b/data_model/derive/src/id.rs @@ -1,33 +1,16 @@ #![allow(clippy::str_to_string, clippy::mixed_read_write_in_expression)] +use manyhow::{bail, Result}; use proc_macro2::TokenStream; -use proc_macro_error::abort; use quote::quote; -use syn::parse_quote; +use syn2::parse_quote; -fn derive_identifiable(input: &syn::ItemStruct) -> TokenStream { +pub fn impl_id(input: &syn2::ItemStruct) -> Result { let name = &input.ident; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let (id_type, id_expr) = get_id_type(input); + let identifiable_derive = derive_identifiable(input)?; - quote! { - impl #impl_generics Identifiable for #name #ty_generics #where_clause { - type Id = #id_type; - - #[inline] - fn id(&self) -> &Self::Id { - #id_expr - } - } - } -} - -pub fn impl_id(input: &syn::ItemStruct) -> TokenStream { - let name = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let identifiable_derive = derive_identifiable(input); - - quote! { + Ok(quote! { #identifiable_derive impl #impl_generics ::core::cmp::PartialOrd for #name #ty_generics #where_clause where Self: Identifiable { @@ -55,65 +38,82 @@ pub fn impl_id(input: &syn::ItemStruct) -> TokenStream { self.id().hash(state); } } - } + }) +} + +fn derive_identifiable(input: &syn2::ItemStruct) -> Result { + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let (id_type, id_expr) = get_id_type(input)?; + + Ok(quote! { + impl #impl_generics Identifiable for #name #ty_generics #where_clause { + type Id = #id_type; + + #[inline] + fn id(&self) -> &Self::Id { + #id_expr + } + } + }) } -fn get_id_type(input: &syn::ItemStruct) -> (TokenStream, TokenStream) { +fn get_id_type(input: &syn2::ItemStruct) -> Result<(TokenStream, TokenStream)> { match &input.fields { - syn::Fields::Named(fields) => { + syn2::Fields::Named(fields) => { for field in &fields.named { let (field_name, field_ty) = (&field.ident, &field.ty); if is_identifier(&field.attrs) { - return (quote! {#field_ty}, quote! {&self.#field_name}); + return Ok((quote! {#field_ty}, quote! {&self.#field_name})); } if is_transparent(&field.attrs) { - return ( + return Ok(( quote! {<#field_ty as Identifiable>::Id}, quote! {Identifiable::id(&self.#field_name)}, - ); + )); } } } - syn::Fields::Unnamed(fields) => { + syn2::Fields::Unnamed(fields) => { for (i, field) in fields.unnamed.iter().enumerate() { - let (field_id, field_ty): (syn::Index, _) = (i.into(), &field.ty); + let (field_id, field_ty): (syn2::Index, _) = (i.into(), &field.ty); if is_identifier(&field.attrs) { - return (quote! {#field_ty}, quote! {&self.#field_id}); + return Ok((quote! {#field_ty}, quote! {&self.#field_id})); } if is_transparent(&field.attrs) { - return ( + return Ok(( quote! {<#field_ty as Identifiable>::Id}, quote! {Identifiable::id(&self.#field_id)}, - ); + )); } } } - syn::Fields::Unit => {} + syn2::Fields::Unit => {} } match &input.fields { - syn::Fields::Named(named) => { + syn2::Fields::Named(named) => { for field in &named.named { let field_ty = &field.ty; if field.ident.as_ref().expect("Field must be named") == "id" { - return (quote! {#field_ty}, quote! {&self.id}); + return Ok((quote! {#field_ty}, quote! {&self.id})); } } } - syn::Fields::Unnamed(_) | syn::Fields::Unit => {} + syn2::Fields::Unnamed(_) | syn2::Fields::Unit => {} } - abort!(input, "Identifier not found") + bail!(input, "Identifier not found") } -fn is_identifier(attrs: &[syn::Attribute]) -> bool { +fn is_identifier(attrs: &[syn2::Attribute]) -> bool { attrs.iter().any(|attr| attr == &parse_quote! {#[id]}) } -fn is_transparent(attrs: &[syn::Attribute]) -> bool { +fn is_transparent(attrs: &[syn2::Attribute]) -> bool { attrs .iter() .any(|attr| attr == &parse_quote! {#[id(transparent)]}) diff --git a/data_model/derive/src/lib.rs b/data_model/derive/src/lib.rs index 607ff1720e6..61b17a51b09 100644 --- a/data_model/derive/src/lib.rs +++ b/data_model/derive/src/lib.rs @@ -7,8 +7,9 @@ mod id; mod model; mod partially_tagged; -use proc_macro::TokenStream; -use syn::parse_macro_input; +use iroha_macro_utils::Emitter; +use manyhow::{emit, manyhow, Result}; +use proc_macro2::TokenStream; /// Macro which controls how to export item's API. The behaviour is controlled with `transparent_api` /// feature flag. If the flag is active, item's public fields will be exposed as public, however, if @@ -80,19 +81,37 @@ use syn::parse_macro_input; /// ``` /// /// It assumes that the derive is imported and referred to by its original name. +#[manyhow] #[proc_macro_attribute] -#[proc_macro_error::proc_macro_error] -pub fn model(_attr: TokenStream, input: TokenStream) -> TokenStream { - model::impl_model(&parse_macro_input!(input)).into() +pub fn model(attr: TokenStream, input: TokenStream) -> TokenStream { + let mut emitter = Emitter::new(); + + if !attr.is_empty() { + emit!(emitter, attr, "This attribute does not take any arguments"); + } + + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + let result = model::impl_model(&mut emitter, &input); + + emitter.finish_token_stream_with(result) } /// Same as [`model`] macro, but only processes a single item. /// /// You should prefer using [`model`] macro over this one. +#[manyhow] #[proc_macro] -#[proc_macro_error::proc_macro_error] pub fn model_single(input: TokenStream) -> TokenStream { - model::process_item(parse_macro_input!(input)).into() + let mut emitter = Emitter::new(); + + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + emitter.finish_token_stream_with(model::process_item(input)) } /// Derive macro for `Identifiable` trait which also automatically implements [`Ord`], [`Eq`], @@ -209,10 +228,12 @@ pub fn model_single(input: TokenStream) -> TokenStream { /// } /// ``` /// -#[proc_macro_error::proc_macro_error] +#[manyhow] #[proc_macro_derive(IdEqOrdHash, attributes(id, opaque))] -pub fn id_eq_ord_hash(input: TokenStream) -> TokenStream { - id::impl_id(&parse_macro_input!(input)).into() +pub fn id_eq_ord_hash(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + id::impl_id(&input) } /// [`Filter`] is used for code generation of `...Filter` structs and `...EventFilter` enums, as well as @@ -377,10 +398,12 @@ pub fn id_eq_ord_hash(input: TokenStream) -> TokenStream { /// ``` /// /// It assumes that the derive is imported and referred to by its original name. +#[manyhow] #[proc_macro_derive(Filter)] -pub fn filter_derive(input: TokenStream) -> TokenStream { - let event = parse_macro_input!(input as filter::EventEnum); - filter::impl_filter(&event) +pub fn filter_derive(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + Ok(filter::impl_filter(&input)) } /// Derive `::serde::Serialize` trait for `enum` with possibility to avoid tags for selected variants @@ -409,10 +432,12 @@ pub fn filter_derive(input: TokenStream) -> TokenStream { /// &serde_json::to_string(&Outer::A(42)).expect("Failed to serialize"), r#"{"A":42}"# /// ); /// ``` -#[proc_macro_error::proc_macro_error] +#[manyhow] #[proc_macro_derive(PartiallyTaggedSerialize, attributes(serde_partially_tagged, serde))] -pub fn partially_tagged_serialize_derive(input: TokenStream) -> TokenStream { - partially_tagged::impl_partially_tagged_serialize(&parse_macro_input!(input)) +pub fn partially_tagged_serialize_derive(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + Ok(partially_tagged::impl_partially_tagged_serialize(&input)) } /// Derive `::serde::Deserialize` trait for `enum` with possibility to avoid tags for selected variants @@ -470,10 +495,12 @@ pub fn partially_tagged_serialize_derive(input: TokenStream) -> TokenStream { /// serde_json::from_str::(r#"{"B":42}"#).expect("Failed to deserialize"), Outer::Inner1(Inner::B(42)) /// ); /// ``` -#[proc_macro_error::proc_macro_error] +#[manyhow] #[proc_macro_derive(PartiallyTaggedDeserialize, attributes(serde_partially_tagged, serde))] -pub fn partially_tagged_deserialize_derive(input: TokenStream) -> TokenStream { - partially_tagged::impl_partially_tagged_deserialize(&parse_macro_input!(input)) +pub fn partially_tagged_deserialize_derive(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + Ok(partially_tagged::impl_partially_tagged_deserialize(&input)) } /// Derive macro for `HasOrigin`. @@ -559,8 +586,10 @@ pub fn partially_tagged_deserialize_derive(input: TokenStream) -> TokenStream { /// assert_eq!(&layer_id, layer_sub_layer_event.origin_id()); /// assert_eq!(&sub_layer_id, sub_layer_created_event.origin_id()); /// ``` -#[proc_macro_error::proc_macro_error] +#[manyhow] #[proc_macro_derive(HasOrigin, attributes(has_origin))] -pub fn has_origin_derive(input: TokenStream) -> TokenStream { - has_origin::impl_has_origin(&parse_macro_input!(input)) +pub fn has_origin_derive(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + has_origin::impl_has_origin(&input) } diff --git a/data_model/derive/src/model.rs b/data_model/derive/src/model.rs index 8a7426baca3..73aa757c87c 100644 --- a/data_model/derive/src/model.rs +++ b/data_model/derive/src/model.rs @@ -1,10 +1,11 @@ +use iroha_macro_utils::Emitter; +use manyhow::emit; use proc_macro2::TokenStream; -use proc_macro_error::abort; use quote::{quote, ToTokens}; -use syn::{parse_quote, Attribute}; +use syn2::{parse_quote, Attribute}; -pub fn impl_model(input: &syn::ItemMod) -> TokenStream { - let syn::ItemMod { +pub fn impl_model(emitter: &mut Emitter, input: &syn2::ItemMod) -> TokenStream { + let syn2::ItemMod { attrs, vis, mod_token, @@ -14,14 +15,17 @@ pub fn impl_model(input: &syn::ItemMod) -> TokenStream { .. } = input; - let syn::Visibility::Public(vis_public) = vis else { - abort!( + let syn2::Visibility::Public(vis_public) = vis else { + emit!( + emitter, input, "The `model` attribute can only be used on public modules" ); + return quote!(); }; if ident != "model" { - abort!( + emit!( + emitter, input, "The `model` attribute can only be used on the `model` module" ); @@ -40,16 +44,16 @@ pub fn impl_model(input: &syn::ItemMod) -> TokenStream { } } -pub fn process_item(item: syn::Item) -> TokenStream { - let mut input: syn::DeriveInput = match item { - syn::Item::Struct(item_struct) => item_struct.into(), - syn::Item::Enum(item_enum) => item_enum.into(), - syn::Item::Union(item_union) => item_union.into(), +pub fn process_item(item: syn2::Item) -> TokenStream { + let mut input: syn2::DeriveInput = match item { + syn2::Item::Struct(item_struct) => item_struct.into(), + syn2::Item::Enum(item_enum) => item_enum.into(), + syn2::Item::Union(item_union) => item_union.into(), other => return other.into_token_stream(), }; let vis = &input.vis; - if matches!(vis, syn::Visibility::Public(_)) { + if matches!(vis, syn2::Visibility::Public(_)) { return process_pub_item(input); } @@ -70,21 +74,21 @@ pub fn process_item(item: syn::Item) -> TokenStream { } } -fn process_pub_item(input: syn::DeriveInput) -> TokenStream { +fn process_pub_item(input: syn2::DeriveInput) -> TokenStream { let (impl_generics, _, where_clause) = input.generics.split_for_impl(); let attrs = input.attrs; let ident = input.ident; match input.data { - syn::Data::Struct(item) => match &item.fields { - syn::Fields::Named(fields) => { + syn2::Data::Struct(item) => match &item.fields { + syn2::Fields::Named(fields) => { let fields = fields.named.iter().map(|field| { let field_attrs = &field.attrs; let field_name = &field.ident; let field_ty = &field.ty; - if !matches!(field.vis, syn::Visibility::Public(_)) { + if !matches!(field.vis, syn2::Visibility::Public(_)) { return quote! {#field,}; } @@ -107,12 +111,12 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { expose_ffi(attrs, &item) } - syn::Fields::Unnamed(fields) => { + syn2::Fields::Unnamed(fields) => { let fields = fields.unnamed.iter().map(|field| { let field_attrs = &field.attrs; let field_ty = &field.ty; - if !matches!(field.vis, syn::Visibility::Public(_)) { + if !matches!(field.vis, syn2::Visibility::Public(_)) { return quote! {#field,}; } @@ -133,7 +137,7 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { expose_ffi(attrs, &item) } - syn::Fields::Unit => { + syn2::Fields::Unit => { let item = quote! { pub struct #ident #impl_generics #where_clause; }; @@ -141,7 +145,7 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { expose_ffi(attrs, &item) } }, - syn::Data::Enum(item) => { + syn2::Data::Enum(item) => { let variants = &item.variants; let item = quote! { @@ -154,13 +158,13 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { } // Triggers in `quote!` side, see https://github.com/rust-lang/rust-clippy/issues/10417 #[allow(clippy::arithmetic_side_effects)] - syn::Data::Union(item) => { + syn2::Data::Union(item) => { let fields = item.fields.named.iter().map(|field| { let field_attrs = &field.attrs; let field_name = &field.ident; let field_ty = &field.ty; - if !matches!(field.vis, syn::Visibility::Public(_)) { + if !matches!(field.vis, syn2::Visibility::Public(_)) { return quote! {#field,}; } @@ -189,7 +193,9 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { } fn expose_ffi(mut attrs: Vec, item: &TokenStream) -> TokenStream { - let mut ffi_attrs = attrs.iter().filter(|&attr| attr.path.is_ident("ffi_type")); + let mut ffi_attrs = attrs + .iter() + .filter(|&attr| attr.path().is_ident("ffi_type")); if ffi_attrs.next().is_none() { return quote! { @@ -201,7 +207,7 @@ fn expose_ffi(mut attrs: Vec, item: &TokenStream) -> TokenStream { attrs.retain(|attr| *attr != parse_quote! (#[ffi_type])); let no_ffi_attrs: Vec<_> = attrs .iter() - .filter(|&attr| !attr.path.is_ident("ffi_type")) + .filter(|&attr| !attr.path().is_ident("ffi_type")) .collect(); quote! { diff --git a/data_model/derive/src/partially_tagged.rs b/data_model/derive/src/partially_tagged.rs index 845520f0670..830d4e65c6a 100644 --- a/data_model/derive/src/partially_tagged.rs +++ b/data_model/derive/src/partially_tagged.rs @@ -1,11 +1,11 @@ #![allow(clippy::too_many_lines)] -use proc_macro::TokenStream; -use proc_macro_error::abort; +use proc_macro2::TokenStream; use quote::{format_ident, quote}; -use syn::{ +use syn2::{ parse::{Parse, ParseStream}, parse_quote, punctuated::Punctuated, + spanned::Spanned, Attribute, Generics, Ident, Token, Type, Variant, Visibility, }; @@ -24,15 +24,15 @@ pub struct PartiallyTaggedVariant { } impl Parse for PartiallyTaggedEnum { - fn parse(input: ParseStream) -> syn::Result { + fn parse(input: ParseStream) -> syn2::Result { let mut attrs = input.call(Attribute::parse_outer)?; let _vis = input.parse::()?; let _enum_token = input.parse::()?; let ident = input.parse::()?; let generics = input.parse::()?; let content; - let _brace_token = syn::braced!(content in input); - let variants = content.parse_terminated(PartiallyTaggedVariant::parse)?; + let _brace_token = syn2::braced!(content in input); + let variants = content.parse_terminated(PartiallyTaggedVariant::parse, Token![,])?; attrs.retain(is_serde_attr); Ok(PartiallyTaggedEnum { attrs, @@ -44,7 +44,7 @@ impl Parse for PartiallyTaggedEnum { } impl Parse for PartiallyTaggedVariant { - fn parse(input: ParseStream) -> syn::Result { + fn parse(input: ParseStream) -> syn2::Result { let variant = input.parse::()?; let Variant { ident, @@ -53,12 +53,17 @@ impl Parse for PartiallyTaggedVariant { .. } = variant; let field = match fields { - syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => fields + syn2::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => fields .unnamed .into_iter() .next() .expect("Guaranteed to have exactly one field"), - fields => abort!(fields, "Only supports tuple variants with single field"), + fields => { + return Err(syn2::Error::new( + fields.span(), + "Only supports tuple variants with single field", + )) + } }; let ty = field.ty; let is_untagged = attrs.iter().any(is_untagged_attr); @@ -104,7 +109,7 @@ fn is_untagged_attr(attr: &Attribute) -> bool { /// Check if `#[serde...]` attribute fn is_serde_attr(attr: &Attribute) -> bool { - attr.path + attr.path() .get_ident() .map_or_else(|| false, |ident| ident.to_string().eq("serde")) } @@ -117,7 +122,7 @@ pub fn impl_partially_tagged_serialize(enum_: &PartiallyTaggedEnum) -> TokenStre let (variants_ident, variants_ty, variants_attrs) = variants_to_tuple(enum_.variants()); let (untagged_variants_ident, untagged_variants_ty, untagged_variants_attrs) = variants_to_tuple(enum_.untagged_variants()); - let serialize_trait_bound: syn::TypeParamBound = parse_quote!(::serde::Serialize); + let serialize_trait_bound: syn2::TypeParamBound = parse_quote!(::serde::Serialize); let mut generics = enum_.generics.clone(); generics .type_params_mut() @@ -177,7 +182,6 @@ pub fn impl_partially_tagged_serialize(enum_: &PartiallyTaggedEnum) -> TokenStre } } } - .into() } pub fn impl_partially_tagged_deserialize(enum_: &PartiallyTaggedEnum) -> TokenStream { @@ -190,7 +194,7 @@ pub fn impl_partially_tagged_deserialize(enum_: &PartiallyTaggedEnum) -> TokenSt let (variants_ident, variants_ty, variants_attrs) = variants_to_tuple(enum_.variants()); let (untagged_variants_ident, untagged_variants_ty, untagged_variants_attrs) = variants_to_tuple(enum_.untagged_variants()); - let deserialize_trait_bound: syn::TypeParamBound = parse_quote!(::serde::de::DeserializeOwned); + let deserialize_trait_bound: syn2::TypeParamBound = parse_quote!(::serde::de::DeserializeOwned); let variants_ty_deserialize_bound = variants_ty .iter() .map(|ty| quote!(#ty: #deserialize_trait_bound).to_string()) @@ -343,5 +347,4 @@ pub fn impl_partially_tagged_deserialize(enum_: &PartiallyTaggedEnum) -> TokenSt } } } - .into() } diff --git a/data_model/derive/tests/partial_tagged_serde.rs b/data_model/derive/tests/partial_tagged_serde.rs new file mode 100644 index 00000000000..99e11e06e0e --- /dev/null +++ b/data_model/derive/tests/partial_tagged_serde.rs @@ -0,0 +1,83 @@ +use std::fmt::Formatter; + +use iroha_data_model_derive::{PartiallyTaggedDeserialize, PartiallyTaggedSerialize}; +use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; + +#[allow(variant_size_differences)] // it's a test, duh +#[derive(Debug, PartialEq, Eq, PartiallyTaggedDeserialize, PartiallyTaggedSerialize)] +enum Value { + Bool(bool), + String(String), + #[serde_partially_tagged(untagged)] + Numeric(NumericValue), +} + +// a simpler version of NumericValue than used in data_model +// this one is always i32, but is still serialized as a string literal +// NOTE: debug is actually required for `PartiallyTaggedDeserialize`! +#[derive(Debug, PartialEq, Eq)] +struct NumericValue(i32); + +impl Serialize for NumericValue { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(&self.0.to_string()) + } +} + +struct NumericValueVisitor; + +impl de::Visitor<'_> for NumericValueVisitor { + type Value = NumericValue; + + fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result { + formatter.write_str("a string literal containing a number") + } + + fn visit_str(self, v: &str) -> Result + where + E: de::Error, + { + let parsed = v.parse::().map_err(|e| E::custom(e))?; + + Ok(NumericValue(parsed)) + } +} + +impl<'de> Deserialize<'de> for NumericValue { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_str(NumericValueVisitor) + } +} + +#[test] +fn partially_tagged_serde() { + let values = [ + Value::Bool(true), + Value::String("I am string".to_owned()), + Value::Numeric(NumericValue(42)), + ]; + let serialized_values = [r#"{"Bool":true}"#, r#"{"String":"I am string"}"#, r#""42""#]; + + for (value, serialized_value) in values.iter().zip(serialized_values.iter()) { + let serialized = serde_json::to_string(value) + .unwrap_or_else(|e| panic!("Failed to serialize `{:?}`: {:?}", value, e)); + assert_eq!( + serialized, *serialized_value, + "Serialized form of `{:?}` does not match the expected value", + value + ); + let deserialized: Value = serde_json::from_str(serialized_value) + .unwrap_or_else(|e| panic!("Failed to deserialize `{:?}`: {:?}", serialized_value, e)); + assert_eq!( + *value, deserialized, + "Deserialized form of `{:?}` does not match the expected value", + value + ); + } +} diff --git a/ffi/derive/Cargo.toml b/ffi/derive/Cargo.toml index d8c320bfa11..5004ea9a52c 100644 --- a/ffi/derive/Cargo.toml +++ b/ffi/derive/Cargo.toml @@ -15,6 +15,8 @@ workspace = true proc-macro = true [dependencies] +iroha_macro_utils = { workspace = true } + syn2 = { workspace = true, features = ["full", "visit", "visit-mut", "extra-traits"] } quote = { workspace = true } proc-macro2 = { workspace = true } @@ -22,7 +24,6 @@ manyhow = { workspace = true } darling = { workspace = true } rustc-hash = { workspace = true } -drop_bomb = "0.1.5" parse-display = "0.8.2" [dev-dependencies] diff --git a/ffi/derive/src/convert.rs b/ffi/derive/src/convert.rs index cb409c622af..d8995fedc4f 100644 --- a/ffi/derive/src/convert.rs +++ b/ffi/derive/src/convert.rs @@ -4,21 +4,17 @@ use std::fmt::{Display, Formatter}; use darling::{ ast::Style, util::SpannedValue, FromAttributes, FromDeriveInput, FromField, FromVariant, }; +use iroha_macro_utils::{parse_single_list_attr_opt, Emitter}; use manyhow::{emit, error_message}; use proc_macro2::{Delimiter, Span, TokenStream}; use quote::quote; -use syn2::{ - parse::ParseStream, spanned::Spanned as _, visit::Visit as _, Attribute, Field, Ident, Meta, -}; +use syn2::{parse::ParseStream, spanned::Spanned as _, visit::Visit as _, Attribute, Field, Ident}; -use crate::{ - attr_parse::{ - derive::DeriveAttrs, - doc::DocAttrs, - getset::{GetSetFieldAttrs, GetSetStructAttrs}, - repr::{Repr, ReprKind, ReprPrimitive}, - }, - emitter::Emitter, +use crate::attr_parse::{ + derive::DeriveAttrs, + doc::DocAttrs, + getset::{GetSetFieldAttrs, GetSetStructAttrs}, + repr::{Repr, ReprKind, ReprPrimitive}, }; #[derive(Debug)] @@ -135,47 +131,7 @@ impl syn2::parse::Parse for FfiTypeKindFieldAttribute { } } -fn parse_ffi_type_attr(attrs: &[Attribute]) -> darling::Result> { - let mut accumulator = darling::error::Accumulator::default(); - - // first, ensure there is only one "ffi_type" attribute (we don't support multiple) - let ffi_type_attrs = attrs - .iter() - .filter(|a| a.path().is_ident("ffi_type")) - .collect::>(); - let attr = match *ffi_type_attrs.as_slice() { - [] => { - return accumulator.finish_with(None); - } - [attr] => attr, - [attr, ref tail @ ..] => { - // allow parsing to proceed further to collect more errors - accumulator.push( - darling::Error::custom("Only one #[ffi_type] attribute is allowed!").with_span( - &tail - .iter() - .map(syn2::spanned::Spanned::span) - .reduce(|a, b| a.join(b).unwrap()) - .unwrap(), - ), - ); - attr - } - }; - - let mut kind = None; - - match &attr.meta { - Meta::Path(_) | Meta::NameValue(_) => accumulator.push(darling::Error::custom( - "Expected #[ffi_type(...)] attribute to be a list", - )), - Meta::List(list) => { - kind = accumulator.handle(syn2::parse2(list.tokens.clone()).map_err(Into::into)); - } - } - - accumulator.finish_with(kind) -} +const FFI_TYPE_ATTR: &str = "ffi_type"; pub struct FfiTypeAttr { pub kind: Option, @@ -183,7 +139,7 @@ pub struct FfiTypeAttr { impl FromAttributes for FfiTypeAttr { fn from_attributes(attrs: &[Attribute]) -> darling::Result { - parse_ffi_type_attr(attrs).map(|kind| Self { kind }) + parse_single_list_attr_opt(FFI_TYPE_ATTR, attrs).map(|kind| Self { kind }) } } @@ -193,7 +149,7 @@ pub struct FfiTypeFieldAttr { impl FromAttributes for FfiTypeFieldAttr { fn from_attributes(attrs: &[Attribute]) -> darling::Result { - parse_ffi_type_attr(attrs).map(|kind| Self { kind }) + parse_single_list_attr_opt(FFI_TYPE_ATTR, attrs).map(|kind| Self { kind }) } } diff --git a/ffi/derive/src/getset_gen.rs b/ffi/derive/src/getset_gen.rs index 6458c04a030..89c628de71b 100644 --- a/ffi/derive/src/getset_gen.rs +++ b/ffi/derive/src/getset_gen.rs @@ -1,6 +1,7 @@ use std::default::Default; use darling::ast::Style; +use iroha_macro_utils::Emitter; use manyhow::emit; use proc_macro2::TokenStream; use quote::quote; @@ -13,7 +14,6 @@ use crate::{ getset::{GetSetGenMode, GetSetStructAttrs}, }, convert::{FfiTypeField, FfiTypeFields}, - emitter::Emitter, impl_visitor::{unwrap_result_type, Arg, FnDescriptor}, }; diff --git a/ffi/derive/src/impl_visitor.rs b/ffi/derive/src/impl_visitor.rs index 6c547b10020..c5bd408b01b 100644 --- a/ffi/derive/src/impl_visitor.rs +++ b/ffi/derive/src/impl_visitor.rs @@ -2,6 +2,7 @@ //! //! It also defines descriptors - types that are used for the codegen step +use iroha_macro_utils::Emitter; use manyhow::emit; use proc_macro2::Span; use syn2::{ @@ -11,8 +12,6 @@ use syn2::{ Attribute, Ident, Path, Type, Visibility, }; -use crate::emitter::Emitter; - pub struct Arg { self_ty: Option, name: Ident, diff --git a/ffi/derive/src/lib.rs b/ffi/derive/src/lib.rs index 2bbd93b6489..fdd0673192b 100644 --- a/ffi/derive/src/lib.rs +++ b/ffi/derive/src/lib.rs @@ -3,6 +3,7 @@ use darling::FromDeriveInput; use impl_visitor::{FnDescriptor, ImplDescriptor}; +use iroha_macro_utils::Emitter; use manyhow::{emit, manyhow}; use proc_macro2::TokenStream; use quote::quote; @@ -12,12 +13,10 @@ use wrapper::wrap_method; use crate::{ attr_parse::derive::Derive, convert::{derive_ffi_type, FfiTypeData, FfiTypeInput}, - emitter::Emitter, }; mod attr_parse; mod convert; -mod emitter; mod ffi_fn; mod getset_gen; mod impl_visitor; diff --git a/ffi/derive/src/wrapper.rs b/ffi/derive/src/wrapper.rs index 17fcb77e083..8ea1286eeb4 100644 --- a/ffi/derive/src/wrapper.rs +++ b/ffi/derive/src/wrapper.rs @@ -1,3 +1,4 @@ +use iroha_macro_utils::Emitter; use manyhow::emit; use proc_macro2::{Span, TokenStream}; use quote::quote; @@ -6,7 +7,6 @@ use syn2::{parse_quote, visit_mut::VisitMut, Attribute, Ident, Type}; use crate::{ attr_parse::derive::{Derive, RustcDerive}, convert::FfiTypeInput, - emitter::Emitter, ffi_fn, getset_gen::{gen_resolve_type, gen_store_name}, impl_visitor::{unwrap_result_type, Arg, FnDescriptor, ImplDescriptor, TypeImplTraitResolver}, diff --git a/macro/utils/Cargo.toml b/macro/utils/Cargo.toml index 74e9e6faf51..08c1dce1270 100644 --- a/macro/utils/Cargo.toml +++ b/macro/utils/Cargo.toml @@ -14,6 +14,9 @@ maintenance = { status = "actively-developed" } [dependencies] syn = { workspace = true, features = ["default", "parsing", "printing"] } +syn2 = { workspace = true, features = ["default", "parsing", "printing"] } +darling = { workspace = true } quote = { workspace = true } proc-macro2 = { workspace = true } -proc-macro-error = { workspace = true } +manyhow = { workspace = true } +drop_bomb = "0.1.5" diff --git a/ffi/derive/src/emitter.rs b/macro/utils/src/emitter.rs similarity index 97% rename from ffi/derive/src/emitter.rs rename to macro/utils/src/emitter.rs index 193d961c663..f5edda28b1b 100644 --- a/ffi/derive/src/emitter.rs +++ b/macro/utils/src/emitter.rs @@ -1,8 +1,9 @@ +//! A wrapper type around [`manyhow::Emitter`] that provides a more ergonomic API. + use drop_bomb::DropBomb; use manyhow::ToTokensError; use proc_macro2::TokenStream; -// TODO: move this type to `derive-primitives` crate /// A wrapper type around [`manyhow::Emitter`] that provides a more ergonomic API. /// /// This type is used to accumulate errors during parsing and code generation. diff --git a/macro/utils/src/lib.rs b/macro/utils/src/lib.rs index 2d6d6ef3e70..9f19785d07c 100644 --- a/macro/utils/src/lib.rs +++ b/macro/utils/src/lib.rs @@ -1,5 +1,9 @@ //! Module for various functions and structs to build macros in iroha. +mod emitter; + +pub use emitter::Emitter; + /// Trait for attribute parsing generalization pub trait AttrParser { /// Attribute identifier `#[IDENT...]` @@ -65,3 +69,101 @@ macro_rules! attr_struct { } }; } + +/// Parses a single attribute of the form `#[attr_name(...)]` for darling using a `syn::parse::Parse` implementation. +/// +/// If no attribute with specified name is found, returns `Ok(None)`. +pub fn parse_single_list_attr_opt( + attr_name: &str, + attrs: &[syn2::Attribute], +) -> darling::Result> { + let mut accumulator = darling::error::Accumulator::default(); + + // first, ensure there is only one attribute with the requested name + // take the first one if there are multiple + let matching_attrs = attrs + .iter() + .filter(|a| a.path().is_ident(attr_name)) + .collect::>(); + let attr = match *matching_attrs.as_slice() { + [] => { + return accumulator.finish_with(None); + } + [attr] => attr, + [attr, ref tail @ ..] => { + // allow parsing to proceed further to collect more errors + accumulator.push( + darling::Error::custom(format!("Only one #[{}] attribute is allowed!", attr_name)) + .with_span( + &tail + .iter() + .map(syn2::spanned::Spanned::span) + .reduce(|a, b| a.join(b).unwrap()) + .unwrap(), + ), + ); + attr + } + }; + + let mut kind = None; + + match &attr.meta { + syn2::Meta::Path(_) | syn2::Meta::NameValue(_) => accumulator.push(darling::Error::custom( + format!("Expected #[{}(...)] attribute to be a list", attr_name), + )), + syn2::Meta::List(list) => { + kind = accumulator.handle(syn2::parse2(list.tokens.clone()).map_err(Into::into)); + } + } + + accumulator.finish_with(kind) +} + +/// Parses a single attribute of the form `#[attr_name(...)]` for darling using a `syn::parse::Parse` implementation. +/// +/// If no attribute with specified name is found, returns an error. +pub fn parse_single_list_attr( + attr_name: &str, + attrs: &[syn2::Attribute], +) -> darling::Result { + parse_single_list_attr_opt(attr_name, attrs)? + .ok_or_else(|| darling::Error::custom(format!("Missing `#[{}(...)]` attribute", attr_name))) +} + +/// Macro for automatic [`syn::parse::Parse`] impl generation for keyword +/// attribute structs in derive macros. +#[macro_export] +macro_rules! attr_struct2 { + // Matching struct with named fields + ( + $( #[$meta:meta] )* + // ^~~~attributes~~~~^ + $vis:vis struct $name:ident { + $( + $( #[$field_meta:meta] )* + // ^~~~field attributes~~~!^ + $field_vis:vis $field_name:ident : $field_ty:ty + // ^~~~~~~~~~~~~~~~~a single field~~~~~~~~~~~~~~~^ + ),* + $(,)? } + ) => { + $( #[$meta] )* + $vis struct $name { + $( + $( #[$field_meta] )* + $field_vis $field_name : $field_ty + ),* + } + + impl syn2::parse::Parse for $name { + fn parse(input: syn2::parse::ParseStream) -> syn2::Result { + Ok(Self { + $( + $field_name: input.parse()?, + )* + }) + } + } + }; +}