diff --git a/Cargo.lock b/Cargo.lock index ae48df55a1..879b2d06ce 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3223,7 +3223,7 @@ dependencies = [ "quote", "sqlx-core", "sqlx-macros-core", - "syn 1.0.107", + "syn 2.0.22", ] [[package]] @@ -3245,7 +3245,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 1.0.107", + "syn 2.0.22", "tempfile", "tokio", "url", diff --git a/sqlx-macros-core/Cargo.toml b/sqlx-macros-core/Cargo.toml index 48e8d26491..adfc788bda 100644 --- a/sqlx-macros-core/Cargo.toml +++ b/sqlx-macros-core/Cargo.toml @@ -56,8 +56,7 @@ proc-macro2 = { version = "1.0.36", default-features = false } serde = { version = "1.0.132", features = ["derive"] } serde_json = { version = "1.0.73" } sha2 = { version = "0.10.0" } -syn = { version = "1.0.84", default-features = false, features = ["full", "derive", "parsing", "printing", "clone-impls"] } +syn = { version = "2.0", default-features = false, features = ["full", "derive", "parsing", "printing", "clone-impls"] } tempfile = { version = "3.3.0" } -quote = { version = "1.0.14", default-features = false } +quote = { version = "1.0.26", default-features = false } url = { version = "2.2.2", default-features = false } - diff --git a/sqlx-macros-core/src/derives/attributes.rs b/sqlx-macros-core/src/derives/attributes.rs index 7d32e0037f..059d06882f 100644 --- a/sqlx-macros-core/src/derives/attributes.rs +++ b/sqlx-macros-core/src/derives/attributes.rs @@ -1,9 +1,8 @@ use proc_macro2::{Ident, Span, TokenStream}; use quote::quote; use syn::punctuated::Punctuated; -use syn::spanned::Spanned; use syn::token::Comma; -use syn::{Attribute, DeriveInput, Field, Lit, Meta, MetaNameValue, NestedMeta, Type, Variant}; +use syn::{Attribute, DeriveInput, Field, LitStr, Meta, Token, Type, Variant}; macro_rules! assert_attribute { ($e:expr, $err:expr, $input:expr) => { @@ -72,74 +71,49 @@ pub fn parse_container_attributes(input: &[Attribute]) -> syn::Result { - for value in list.nested.iter() { - match value { - NestedMeta::Meta(meta) => match meta { - Meta::Path(p) if p.is_ident("transparent") => { - try_set!(transparent, true, value) - } - - Meta::NameValue(MetaNameValue { - path, - lit: Lit::Str(val), - .. - }) if path.is_ident("rename_all") => { - let val = match &*val.value() { - "lowercase" => RenameAll::LowerCase, - "snake_case" => RenameAll::SnakeCase, - "UPPERCASE" => RenameAll::UpperCase, - "SCREAMING_SNAKE_CASE" => RenameAll::ScreamingSnakeCase, - "kebab-case" => RenameAll::KebabCase, - "camelCase" => RenameAll::CamelCase, - "PascalCase" => RenameAll::PascalCase, - _ => fail!(meta, "unexpected value for rename_all"), - }; - - try_set!(rename_all, val, value) - } - - Meta::NameValue(MetaNameValue { - path, - lit: Lit::Str(val), - .. - }) if path.is_ident("type_name") => { - try_set!( - type_name, - TypeName { - val: val.value(), - span: value.span(), - }, - value - ) - } - - u => fail!(u, "unexpected attribute"), - }, - u => fail!(u, "unexpected attribute"), - } - } - } - Meta::List(list) if list.path.is_ident("repr") => { - if list.nested.len() != 1 { - fail!(&list.nested, "expected one value") - } - match list.nested.first().unwrap() { - NestedMeta::Meta(Meta::Path(p)) if p.get_ident().is_some() => { - try_set!(repr, p.get_ident().unwrap().clone(), list); - } - u => fail!(u, "unexpected value"), + for attr in input { + if attr.path().is_ident("sqlx") { + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("transparent") { + try_set!(transparent, true, attr); + } else if meta.path.is_ident("rename_all") { + meta.input.parse::()?; + let lit: LitStr = meta.input.parse()?; + + let val = match lit.value().as_str() { + "lowercase" => RenameAll::LowerCase, + "snake_case" => RenameAll::SnakeCase, + "UPPERCASE" => RenameAll::UpperCase, + "SCREAMING_SNAKE_CASE" => RenameAll::ScreamingSnakeCase, + "kebab-case" => RenameAll::KebabCase, + "camelCase" => RenameAll::CamelCase, + "PascalCase" => RenameAll::PascalCase, + _ => fail!(lit, "unexpected value for rename_all"), + }; + + try_set!(rename_all, val, lit) + } else if meta.path.is_ident("type_name") { + meta.input.parse::()?; + let lit: LitStr = meta.input.parse()?; + let name = TypeName { + val: lit.value(), + span: lit.span(), + }; + + try_set!(type_name, name, lit) + } else { + fail!(meta.path, "unexpected attribute") } + + Ok(()) + })?; + } else if attr.path().is_ident("repr") { + let list: Punctuated = + attr.parse_args_with(>::parse_terminated)?; + + if let Some(path) = list.iter().find_map(|f| f.require_path_only().ok()) { + try_set!(repr, path.get_ident().unwrap().clone(), list); } - _ => {} } } @@ -158,34 +132,31 @@ pub fn parse_child_attributes(input: &[Attribute]) -> syn::Result match meta { - Meta::NameValue(MetaNameValue { - path, - lit: Lit::Str(val), - .. - }) if path.is_ident("rename") => try_set!(rename, val.value(), value), - Meta::NameValue(MetaNameValue { - path, - lit: Lit::Str(val), - .. - }) if path.is_ident("try_from") => try_set!(try_from, val.parse()?, value), - Meta::Path(path) if path.is_ident("default") => default = true, - Meta::Path(path) if path.is_ident("flatten") => flatten = true, - Meta::Path(path) if path.is_ident("skip") => skip = true, - u => fail!(u, "unexpected attribute"), - }, - u => fail!(u, "unexpected attribute"), - } + for attr in input.iter().filter(|a| a.path().is_ident("sqlx")) { + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("rename") { + meta.input.parse::()?; + let val: LitStr = meta.input.parse()?; + try_set!(rename, val.value(), val); + return Ok(()); + } else if meta.path.is_ident("try_from") { + meta.input.parse::()?; + let val: LitStr = meta.input.parse()?; + let val = val.parse()?; + try_set!(try_from, val, val); + return Ok(()); } - } + + if meta.path.is_ident("default") { + default = true; + } else if meta.path.is_ident("flatten") { + flatten = true; + } else if meta.path.is_ident("skip") { + skip = true; + } + + Ok(()) + })?; } Ok(SqlxChildAttributes { diff --git a/sqlx-macros-core/src/derives/encode.rs b/sqlx-macros-core/src/derives/encode.rs index 7bb568210f..823af65ad3 100644 --- a/sqlx-macros-core/src/derives/encode.rs +++ b/sqlx-macros-core/src/derives/encode.rs @@ -9,7 +9,7 @@ use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{ parse_quote, Data, DataEnum, DataStruct, DeriveInput, Expr, Field, Fields, FieldsNamed, - FieldsUnnamed, Lifetime, LifetimeDef, Stmt, Variant, + FieldsUnnamed, Lifetime, LifetimeParam, Stmt, Variant, }; pub fn expand_derive_encode(input: &DeriveInput) -> syn::Result { @@ -66,7 +66,7 @@ fn expand_derive_encode_transparent( let mut generics = generics.clone(); generics .params - .insert(0, LifetimeDef::new(lifetime.clone()).into()); + .insert(0, LifetimeParam::new(lifetime.clone()).into()); generics .params diff --git a/sqlx-macros-core/src/query/args.rs b/sqlx-macros-core/src/query/args.rs index 1a5b061e8d..764af5f2cd 100644 --- a/sqlx-macros-core/src/query/args.rs +++ b/sqlx-macros-core/src/query/args.rs @@ -5,7 +5,7 @@ use proc_macro2::{Ident, TokenStream}; use quote::{format_ident, quote, quote_spanned}; use sqlx_core::describe::Describe; use syn::spanned::Spanned; -use syn::{Expr, ExprCast, ExprGroup, ExprType, Type}; +use syn::{Expr, ExprCast, ExprGroup, Type}; /// Returns a tokenstream which typechecks the arguments passed to the macro /// and binds them to `DB::Arguments` with the ident `query_args`. @@ -150,8 +150,7 @@ fn create_warning(name: Ident, ty: &Type, expr: &Expr) -> TokenStream { fn get_type_override(expr: &Expr) -> Option<(&Type, bool)> { match expr { Expr::Group(group) => get_type_override(&group.expr), - Expr::Cast(cast) => Some((&cast.ty, false)), - Expr::Type(ascription) => Some((&ascription.ty, true)), + Expr::Cast(cast) => Some(&cast.ty), _ => None, } } @@ -167,8 +166,6 @@ fn strip_wildcard(expr: Expr) -> Expr { group_token, expr: Box::new(strip_wildcard(*expr)), }), - // type ascription syntax is experimental so we always strip it - Expr::Type(ExprType { expr, .. }) => *expr, // we want to retain casts if they semantically matter Expr::Cast(ExprCast { attrs, diff --git a/sqlx-macros-core/src/test_attr.rs b/sqlx-macros-core/src/test_attr.rs index 9f25d5661f..558d7af080 100644 --- a/sqlx-macros-core/src/test_attr.rs +++ b/sqlx-macros-core/src/test_attr.rs @@ -1,12 +1,20 @@ -use proc_macro2::{Span, TokenStream}; +use proc_macro2::TokenStream; use quote::quote; -use syn::LitStr; +use syn::{parse::Parser, Meta}; +#[cfg(feature = "migrate")] +use proc_macro2::Span; +#[cfg(feature = "migrate")] +use syn::{punctuated::Punctuated, Expr, ExprLit, Lit, LitStr, MetaNameValue, Token}; + +#[cfg(feature = "migrate")] struct Args { fixtures: Vec, + #[cfg(feature = "migrate")] migrations: MigrationsOpt, } +#[cfg(feature = "migrate")] enum MigrationsOpt { InferredPath, ExplicitPath(LitStr), @@ -14,7 +22,12 @@ enum MigrationsOpt { Disabled, } -pub fn expand(args: syn::AttributeArgs, input: syn::ItemFn) -> crate::Result { +type AttributeArgs = syn::punctuated::Punctuated; + +pub fn expand(args: TokenStream, input: syn::ItemFn) -> crate::Result { + let parser = AttributeArgs::parse_terminated; + let args = parser.parse2(args)?; + if input.sig.inputs.is_empty() { if !args.is_empty() { if cfg!(feature = "migrate") { @@ -61,7 +74,7 @@ fn expand_simple(input: syn::ItemFn) -> TokenStream { } #[cfg(feature = "migrate")] -fn expand_advanced(args: syn::AttributeArgs, input: syn::ItemFn) -> crate::Result { +fn expand_advanced(args: AttributeArgs, input: syn::ItemFn) -> crate::Result { let ret = &input.sig.output; let name = &input.sig.ident; let inputs = &input.sig.inputs; @@ -127,87 +140,72 @@ fn expand_advanced(args: syn::AttributeArgs, input: syn::ItemFn) -> crate::Resul } #[cfg(feature = "migrate")] -fn parse_args(attr_args: syn::AttributeArgs) -> syn::Result { +fn parse_args(args: AttributeArgs) -> Result { let mut fixtures = vec![]; let mut migrations = MigrationsOpt::InferredPath; - for arg in attr_args { + for arg in args { + let path = arg.path().clone(); + match arg { - syn::NestedMeta::Meta(syn::Meta::List(list)) if list.path.is_ident("fixtures") => { + Meta::List(list) if path.is_ident("fixtures") => { if !fixtures.is_empty() { - return Err(syn::Error::new_spanned(list, "duplicate `fixtures` arg")); + return Err(syn::Error::new_spanned(path, "duplicate `fixtures` arg")); } - for nested in list.nested { - match nested { - syn::NestedMeta::Lit(syn::Lit::Str(litstr)) => fixtures.push(litstr), - other => { - return Err(syn::Error::new_spanned(other, "expected string literal")) - } - } - } + let parser = >::parse_terminated; + let list = parser.parse2(list.tokens)?; + fixtures.extend(list); } - syn::NestedMeta::Meta(syn::Meta::NameValue(namevalue)) - if namevalue.path.is_ident("migrations") => - { + Meta::NameValue(MetaNameValue { value, .. }) if path.is_ident("migrations") => { if !matches!(migrations, MigrationsOpt::InferredPath) { return Err(syn::Error::new_spanned( - namevalue, + path, "cannot have more than one `migrations` or `migrator` arg", )); } - migrations = match namevalue.lit { - syn::Lit::Bool(litbool) => { - if !litbool.value { - // migrations = false - MigrationsOpt::Disabled - } else { - // migrations = true - return Err(syn::Error::new_spanned( - litbool, - "`migrations = true` is redundant", - )); - } - } - // migrations = "" - syn::Lit::Str(litstr) => MigrationsOpt::ExplicitPath(litstr), - _ => { - return Err(syn::Error::new_spanned( - namevalue, - "expected string or `false`", - )) - } + let Expr::Lit(ExprLit { lit, .. }) = value else { + return Err(syn::Error::new_spanned(path, "expected string for `false`")) }; - } - syn::NestedMeta::Meta(syn::Meta::NameValue(namevalue)) - if namevalue.path.is_ident("migrator") => - { - if !matches!(migrations, MigrationsOpt::InferredPath) { + + migrations = match lit { + // migrations = false + Lit::Bool(b) if !b.value => MigrationsOpt::Disabled, + // migrations = true + Lit::Bool(b) => { return Err(syn::Error::new_spanned( - namevalue, - "cannot have more than one `migrations` or `migrator` arg", + b, + "`migrations = true` is redundant", )); } - - migrations = match namevalue.lit { - // migrator = "" - syn::Lit::Str(litstr) => MigrationsOpt::ExplicitMigrator(litstr.parse()?), - _ => { - return Err(syn::Error::new_spanned( - namevalue, - "expected string", - )) - } - }; + // migrations = "path" + Lit::Str(s) => MigrationsOpt::ExplicitPath(s), + lit => return Err(syn::Error::new_spanned(lit, "expected string or `false`")), + }; + } + // migrator = "path" + Meta::NameValue(MetaNameValue { value, .. }) if path.is_ident("migrator") => { + if !matches!(migrations, MigrationsOpt::InferredPath) { + return Err(syn::Error::new_spanned( + path, + "cannot have more than one `migrations` or `migrator` arg", + )); } - other => { + + let Expr::Lit(ExprLit { lit: Lit::Str(lit), .. }) = value else { + return Err(syn::Error::new_spanned(path, "expected string")) + }; + + migrations = MigrationsOpt::ExplicitMigrator(lit.parse()?); + } + arg => { return Err(syn::Error::new_spanned( - other, - "expected `fixtures(\"\", ...)` or `migrations = \"\" | false` or `migrator = \"\"`", - )) + arg, + r#"expected `fixtures("", ...)` or `migrations = "" | false` or `migrator = ""`"#, + )); } - } + }; } Ok(Args { diff --git a/sqlx-macros/Cargo.toml b/sqlx-macros/Cargo.toml index 82fecab730..4632437824 100644 --- a/sqlx-macros/Cargo.toml +++ b/sqlx-macros/Cargo.toml @@ -44,5 +44,5 @@ sqlx-core = { workspace = true, features = ["any"] } sqlx-macros-core = { workspace = true } proc-macro2 = { version = "1.0.36", default-features = false } -syn = { version = "1.0.84", default-features = false, features = ["parsing", "proc-macro"] } -quote = { version = "1.0.14", default-features = false } +syn = { version = "2.0", default-features = false, features = ["parsing", "proc-macro"] } +quote = { version = "1.0.26", default-features = false } diff --git a/sqlx-macros/src/lib.rs b/sqlx-macros/src/lib.rs index 229c1030b1..8d1a66abdc 100644 --- a/sqlx-macros/src/lib.rs +++ b/sqlx-macros/src/lib.rs @@ -51,7 +51,6 @@ pub fn derive_type(tokenstream: TokenStream) -> TokenStream { #[proc_macro_derive(FromRow, attributes(sqlx))] pub fn derive_from_row(input: TokenStream) -> TokenStream { let input = syn::parse_macro_input!(input as syn::DeriveInput); - match derives::expand_derive_from_row(&input) { Ok(ts) => ts.into(), Err(e) => e.to_compile_error().into(), @@ -79,10 +78,9 @@ pub fn migrate(input: TokenStream) -> TokenStream { #[proc_macro_attribute] pub fn test(args: TokenStream, input: TokenStream) -> TokenStream { - let args = syn::parse_macro_input!(args as syn::AttributeArgs); let input = syn::parse_macro_input!(input as syn::ItemFn); - match test_attr::expand(args, input) { + match test_attr::expand(args.into(), input) { Ok(ts) => ts.into(), Err(e) => { if let Some(parse_err) = e.downcast_ref::() {