Skip to content

Commit

Permalink
[refactor]: update iroha_torii_macro to use syn 2.0 (hyperledger-ir…
Browse files Browse the repository at this point in the history
…oha#4118)

Signed-off-by: VAmuzing <[email protected]>
Signed-off-by: Asem-Abdelhady <[email protected]>
  • Loading branch information
VAmuzing authored and Asem-Abdelhady committed Jan 22, 2024
1 parent b377679 commit 8684a71
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 32 deletions.
3 changes: 2 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion torii/macro/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,10 @@ workspace = true
proc-macro = true

[dependencies]
syn = { workspace = true, features = ["default", "parsing", "printing", "extra-traits"] }
syn2 = { workspace = true }
quote = { workspace = true }
proc-macro2 = { workspace = true }
manyhow = { workspace = true }

[dev-dependencies]
warp = { workspace = true }
53 changes: 23 additions & 30 deletions torii/macro/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
//! Crate with a proc macro for torii endpoint generation
use proc_macro::TokenStream;
use proc_macro2::Span;
use manyhow::{manyhow, Result};
use proc_macro2::{Span, TokenStream};
use quote::quote;
use syn::{
use syn2::{
parse::{Parse, ParseStream},
parse_macro_input,
punctuated::Punctuated,
Ident, LitInt, Result as SynResult, Token,
Ident, LitInt, Token,
};

/// Generate warp filters for endpoints, accepting functions
Expand Down Expand Up @@ -49,16 +48,17 @@ use syn::{
/// // defaults, such as `endpoint3`.
/// generate_endpoints!(3, my_endpoint: 2, 4, anotherOne: 5, );
/// ```
#[manyhow]
#[proc_macro]
pub fn generate_endpoints(input: TokenStream) -> TokenStream {
let EndpointList(list) = parse_macro_input!(input as EndpointList);
let arg_names = (1_u8..).map(|count| {
pub fn generate_endpoints(input: TokenStream) -> Result<TokenStream> {
let EndpointList(list) = syn2::parse2(input)?;
let lazy_arg_names = (1_u8..).map(|count| {
Ident::new(
format!("__endpoint_arg_{count}").as_str(),
Span::call_site(),
)
});
let arg_types = (1_u8..).map(|count| {
let lazy_arg_types = (1_u8..).map(|count| {
Ident::new(
format!("__Endpoint_Arg_{count}").as_str(),
Span::call_site(),
Expand All @@ -67,29 +67,23 @@ pub fn generate_endpoints(input: TokenStream) -> TokenStream {
let mut endpoints = Vec::new();

for item in list {
let (fun_name, arg_names, arg_types) = match item {
let (fun_name, arg_count) = match item {
EndpointItem::ArgCount(arg_count) => {
let fun_name = Ident::new(&format!("endpoint{arg_count}"), Span::call_site());
let count = arg_count
.base10_parse::<usize>()
.expect("Already checked at parse stage");
let arg_names = arg_names.clone().take(count).collect::<Vec<_>>();
let arg_types = arg_types.clone().take(count).collect::<Vec<_>>();
(fun_name, arg_names, arg_types)
(fun_name, arg_count)
}
EndpointItem::NameAndArgCount {
name: fun_name,
arg_count,
} => {
let count = arg_count
.base10_parse::<usize>()
.expect("Already checked at parse stage");
let arg_names = arg_names.clone().take(count).collect::<Vec<_>>();
let arg_types = arg_types.clone().take(count).collect::<Vec<_>>();
(*fun_name, arg_names, arg_types)
}
} => (*fun_name, arg_count),
};

let count = arg_count
.base10_parse::<usize>()
.expect("Already checked at parse stage");
let arg_names = lazy_arg_names.clone().take(count).collect::<Vec<_>>();
let arg_types = lazy_arg_types.clone().take(count).collect::<Vec<_>>();

let expanded = quote! {
#[inline]
#[allow(clippy::redundant_pub_crate)]
Expand All @@ -112,10 +106,9 @@ pub fn generate_endpoints(input: TokenStream) -> TokenStream {
endpoints.push(expanded);
}

quote! {
Ok(quote! {
#( #endpoints )*
}
.into()
})
}

#[derive(Debug)]
Expand All @@ -128,7 +121,7 @@ enum EndpointItem {
}

impl Parse for EndpointList {
fn parse(input: ParseStream) -> SynResult<Self> {
fn parse(input: ParseStream) -> syn2::Result<Self> {
let items = Punctuated::<EndpointItem, Token![,]>::parse_terminated(input)?;
let mut seen_arg_counts = Vec::new();
for item in &items {
Expand All @@ -137,7 +130,7 @@ impl Parse for EndpointList {
| EndpointItem::ArgCount(arg_count) => {
let curr_count = arg_count.base10_parse::<u8>()?;
if seen_arg_counts.contains(&curr_count) {
return Err(syn::Error::new_spanned(
return Err(syn2::Error::new_spanned(
arg_count.token(),
"argument counts for all endpoints should be distinct",
));
Expand All @@ -152,7 +145,7 @@ impl Parse for EndpointList {
}

impl Parse for EndpointItem {
fn parse(input: ParseStream) -> SynResult<Self> {
fn parse(input: ParseStream) -> syn2::Result<Self> {
let lookahead = input.lookahead1();
if lookahead.peek(LitInt) {
input.parse().map(EndpointItem::ArgCount)
Expand Down

0 comments on commit 8684a71

Please sign in to comment.