From 1792a2a0d4fc9a3ef308716e11830a419ea67e3f Mon Sep 17 00:00:00 2001 From: masato Date: Sun, 5 Oct 2025 04:27:03 +0900 Subject: [PATCH 1/4] Support for specifying multiple databases in test macros. --- .github/workflows/sqlx.yml | 54 ++++++ Cargo.toml | 8 + sqlx-core/src/testing/mod.rs | 245 +++++++++++++++++++++++- sqlx-macros-core/src/test_attr.rs | 190 +++++++++++++++--- sqlx-mysql/src/testing/mod.rs | 67 +++++-- sqlx-postgres/src/testing/mod.rs | 67 +++++-- sqlx-sqlite/src/testing/mod.rs | 9 +- tests/mixed/fixtures/mysql/comments.sql | 16 ++ tests/mixed/fixtures/mysql/posts.sql | 9 + tests/mixed/fixtures/postgres/users.sql | 2 + tests/mixed/test-attr.rs | 73 +++++++ tests/mysql/test-attr.rs | 53 +++++ tests/postgres/test-attr.rs | 46 ++++- tests/sqlite/test-attr.rs | 44 +++++ 14 files changed, 813 insertions(+), 70 deletions(-) create mode 100644 tests/mixed/fixtures/mysql/comments.sql create mode 100644 tests/mixed/fixtures/mysql/posts.sql create mode 100644 tests/mixed/fixtures/postgres/users.sql create mode 100644 tests/mixed/test-attr.rs diff --git a/.github/workflows/sqlx.yml b/.github/workflows/sqlx.yml index b2f81b75ad..5da68f1276 100644 --- a/.github/workflows/sqlx.yml +++ b/.github/workflows/sqlx.yml @@ -514,3 +514,57 @@ jobs: env: DATABASE_URL: mysql://root@localhost:3306/sqlx?sslmode=verify_ca&ssl-ca=.%2Ftests%2Fcerts%2Fca.crt&ssl-key=.%2Ftests%2Fcerts%2Fkeys%2Fclient.key&ssl-cert=.%2Ftests%2Fcerts%2Fclient.crt RUSTFLAGS: --cfg mariadb="${{ matrix.mariadb }}" + + mixed: + name: Mixed in Postgres and MySQL + runs-on: ubuntu-24.04 + strategy: + matrix: + postgres: [ 17, 13 ] + mysql: [ 8 ] + runtime: [ async-global-executor, smol, tokio ] + steps: + - uses: actions/checkout@v4 + + - name: Setup Rust + run: rustup show active-toolchain || rustup toolchain install + + - uses: Swatinem/rust-cache@v2 + + - run: cargo build --features any,postgres,mysql,macros,migrate,_unstable-all-types,runtime-${{ matrix.runtime }} + + - run: | + docker compose -f tests/docker-compose.yml run -d -p 5432:5432 --name postgres_${{ matrix.postgres }} postgres_${{ matrix.postgres }} + docker exec postgres_${{ matrix.postgres }} bash -c "until pg_isready; do sleep 1; done" + + - run: | + docker compose -f tests/docker-compose.yml run -d -p 5433:5432 --name postgres_users_${{ matrix.postgres }} postgres_${{ matrix.postgres }} + docker exec postgres_users_${{ matrix.postgres }} bash -c "until pg_isready; do sleep 1; done" + + - run: | + docker compose -f tests/docker-compose.yml run -d -p 3306:3306 --name mysql_posts_${{ matrix.mysql }} mysql_${{ matrix.mysql }} + docker exec mysql_posts_${{ matrix.mysql }} bash -c "until mysqladmin ping; do sleep 1; done" + + - run: | + docker compose -f tests/docker-compose.yml run -d -p 3307:3306 --name mysql_comments_${{ matrix.mysql }} mysql_${{ matrix.mysql }} + docker exec mysql_comments_${{ matrix.mysql }} bash -c "until mysqladmin ping; do sleep 1; done" + + # Create data dir for offline mode + - run: mkdir .sqlx + + # Run the `test-attr` test again to cover cleanup. + - run: > + cargo test + --test mixed-test-attr + --no-default-features + --features any,postgres,mysql,macros,migrate,_unstable-all-types,runtime-${{ matrix.runtime }} + env: + DATABASE_URL: postgres://postgres:password@localhost:5432/sqlx + PG_USERS_DATABASE_URL: postgres://postgres:password@localhost:5433/sqlx + MYSQL_POSTS_DATABASE_URL: mysql://root:password@localhost:3306/sqlx + MYSQL_COMMENTS_DATABASE_URL: mysql://root:password@localhost:3307/sqlx + SQLX_OFFLINE_DIR: .sqlx + RUSTFLAGS: -D warnings --cfg postgres="${{ matrix.postgres }}" --cfg mysql_${{ matrix.mysql }} + + # Remove test artifacts + - run: cargo clean -p sqlx diff --git a/Cargo.toml b/Cargo.toml index b24b59cfa0..fd3c05a85a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -453,3 +453,11 @@ required-features = ["postgres"] name = "postgres-rustsec" path = "tests/postgres/rustsec.rs" required-features = ["postgres", "macros", "migrate"] + +# +# Mixed in Postgres and MySQL +# +[[test]] +name = "mixed-test-attr" +path = "tests/mixed/test-attr.rs" +required-features = ["postgres", "mysql", "macros", "migrate"] diff --git a/sqlx-core/src/testing/mod.rs b/sqlx-core/src/testing/mod.rs index 17022b4652..436d4029f7 100644 --- a/sqlx-core/src/testing/mod.rs +++ b/sqlx-core/src/testing/mod.rs @@ -28,7 +28,7 @@ pub trait TestSupport: Database { args: &TestArgs, ) -> impl Future, Error>> + Send + '_; - fn cleanup_test(db_name: &str) -> impl Future> + Send + '_; + fn cleanup_test(args: &TestArgs) -> impl Future> + Send + '_; /// Cleanup any test databases that are no longer in-use. /// @@ -38,6 +38,13 @@ pub trait TestSupport: Database { /// The user credentials it contains must have the privilege to create and drop databases. fn cleanup_test_dbs() -> impl Future, Error>> + Send + 'static; + /// Cleanup any test databases that are no longer in-use. + /// + /// Returns a count of the databases deleted, if possible. + fn cleanup_test_dbs_by_url( + url: &str, + ) -> impl Future, Error>> + Send + '_; + /// Take a snapshot of the current state of the database (data only). /// /// This snapshot can then be used to generate test fixtures. @@ -66,6 +73,7 @@ pub struct TestArgs { pub test_path: &'static str, pub migrator: Option<&'static Migrator>, pub fixtures: &'static [TestFixture], + pub database_url_var: &'static str, } pub trait TestFn { @@ -158,6 +166,7 @@ impl TestArgs { test_path, migrator: None, fixtures: &[], + database_url_var: "DATABASE_URL", } } @@ -165,9 +174,17 @@ impl TestArgs { self.migrator = Some(migrator); } + pub fn no_migrator(&mut self) { + self.migrator = None; + } + pub fn fixtures(&mut self, fixtures: &'static [TestFixture]) { self.fixtures = fixtures; } + + pub fn database_url_var(&mut self, database_url_var: &'static str) { + self.database_url_var = database_url_var; + } } impl TestTermination for () { @@ -231,7 +248,7 @@ where let res = test_fn(test_context.pool_opts, test_context.connect_opts).await; if res.is_success() { - if let Err(e) = DB::cleanup_test(&DB::db_name(&args)).await { + if let Err(e) = DB::cleanup_test(&args).await { eprintln!( "failed to delete database {:?}: {}", test_context.db_name, e @@ -273,3 +290,227 @@ async fn setup_test_db( .await .expect("failed to close setup connection"); } + +macro_rules! impl_test_fn { + ( + $name:ident; + $run_fn:ident; + $run_with_pool_fn:ident; + $( + ( + $lt:lifetime $db:ident, + $args:ident, $testctx:ident, $testpath:ident, + $poolopts:ident, $connopts:ident, + $pool:ident, $conn:ident + ), + )*; + ) => { + pub trait $name { + type Output; + + fn run_test(self, $($args: TestArgs,)*) -> Self::Output; + } + + impl<$($db,)* Fut> $name for fn($(Pool<$db>,)*) -> Fut + where + $( + $db: TestSupport + Database, + $db::Connection: Migrate, + for<$lt> &$lt mut $db::Connection: Executor<$lt, Database = $db>, + )* + Fut: Future, + Fut::Output: TestTermination, + { + type Output = Fut::Output; + + fn run_test(self, $($args: TestArgs,)*) -> Self::Output { + $run_with_pool_fn($($args,)* self) + } + } + + impl<$($db,)* Fut> $name for fn($(PoolConnection<$db>,)*) -> Fut + where + $( + $db: TestSupport + Database, + $db::Connection: Migrate, + for<$lt> &$lt mut $db::Connection: Executor<$lt, Database = $db>, + )* + Fut: Future, + Fut::Output: TestTermination, + { + type Output = Fut::Output; + + fn run_test(self, $($args: TestArgs,)*) -> Self::Output { + $run_with_pool_fn($($args,)* |$($pool,)*| async move { + $( + let $conn = $pool + .acquire() + .await + .expect("failed to acquire test pool connection"); + )* + + + let res = (self)($($conn,)*).await; + + $( + $pool.close().await; + )* + + res + }) + } + } + + impl<$($db,)* Fut> $name + for fn( + $( + (PoolOptions<$db>, <$db::Connection as Connection>::Options), + )* + ) -> Fut + where + $( + $db: TestSupport + Database, + $db::Connection: Migrate, + for<$lt> &$lt mut $db::Connection: Executor<$lt, Database = $db>, + )* + Fut: Future, + Fut::Output: TestTermination, + { + type Output = Fut::Output; + + fn run_test(self, $($args: TestArgs,)*) -> Self::Output { + $run_fn($($args,)* self) + } + } + + impl $name for fn() -> Fut + where + Fut: Future, + { + type Output = Fut::Output; + + fn run_test(self, $($args: TestArgs,)*) -> Self::Output { + $( + assert!( + $args.fixtures.is_empty(), + "fixtures cannot be applied for a bare function", + ); + )* + crate::rt::test_block_on(self()) + } + } + + fn $run_with_pool_fn<$($db,)* F, Fut>($($args: TestArgs,)* test_fn: F) -> Fut::Output + where + $( + $db: TestSupport, + $db::Connection: Migrate, + for<$lt> &$lt mut $db::Connection: Executor<$lt, Database = $db>, + )* + F: FnOnce($(Pool<$db>,)*) -> Fut, + Fut: Future, + Fut::Output: TestTermination, + { + $( + let $testpath: &'static str = $args.test_path; + )* + + $run_fn::<$($db,)* _, _>( + $($args,)* + |$(($poolopts, $connopts),)*| async move { + $( + let $pool = $poolopts + .connect_with($connopts) + .await + .expect("failed to connect test pool"); + )* + + let res = test_fn($($pool.clone(),)*).await; + + $( + let close_timed_out = crate::rt::timeout(Duration::from_secs(10), $pool.close()) + .await + .is_err(); + if close_timed_out { + eprintln!("test {} held onto Pool after exiting", $testpath); + } + )* + + res + }, + ) + } + + fn $run_fn<$($db,)* F, Fut>($($args: TestArgs,)* test_fn: F) -> Fut::Output + where + $( + $db: TestSupport, + $db::Connection: Migrate, + for<$lt> &$lt mut $db::Connection: Executor<$lt, Database = $db>, + )* + F: FnOnce( + $((PoolOptions<$db>, <$db::Connection as Connection>::Options),)* + ) -> Fut, + Fut: Future, + Fut::Output: TestTermination, + { + crate::rt::test_block_on(async move { + $( + let $testctx = $db::test_context(&$args) + .await + .expect("failed to connect to setup test database"); + setup_test_db::<$db>(&$testctx.connect_opts, &$args).await; + )* + + let res = test_fn( + $(($testctx.pool_opts, $testctx.connect_opts),)* + ) + .await; + + if res.is_success() { + $( + if let Err(e) = $db::cleanup_test(&$args).await { + eprintln!( + "failed to delete database {:?}: {}", + $testctx.db_name, e + ); + } + )* + } + + res + }) + } + + }; +} + +impl_test_fn!( + TestFn2; + run_test2; + run_test_with_pool2; + ('c DB1, args1, tc1, tp1, po1, co1, p1, c1), + ('d DB2, args2, tc2, tp2, po2, co2, p2, c2), + ; +); + +impl_test_fn!( + TestFn3; + run_test3; + run_test_with_pool3; + ('c DB1, args1, tc1, tp1, po1, co1, p1, c1), + ('d DB2, args2, tc2, tp2, po2, co2, p2, c2), + ('d DB3, args3, tc3, tp3, po3, co3, p3, c3), + ; +); + +impl_test_fn!( + TestFn4; + run_test4; + run_test_with_pool4; + ('c DB1, args1, tc1, tp1, po1, co1, p1, c1), + ('d DB2, args2, tc2, tp2, po2, co2, p2, c2), + ('d DB3, args3, tc3, tp3, po3, co3, p3, c3), + ('e DB4, args4, tc4, tp4, po4, co4, p4, c4), + ; +); diff --git a/sqlx-macros-core/src/test_attr.rs b/sqlx-macros-core/src/test_attr.rs index 046ff5c2fb..aa08589c51 100644 --- a/sqlx-macros-core/src/test_attr.rs +++ b/sqlx-macros-core/src/test_attr.rs @@ -6,6 +6,7 @@ use syn::parse::Parser; struct Args { fixtures: Vec<(FixturesType, Vec)>, migrations: MigrationsOpt, + database_url_var: DatabaseUrlOpt, } #[cfg(feature = "migrate")] @@ -24,6 +25,19 @@ enum MigrationsOpt { Disabled, } +#[cfg(feature = "migrate")] +enum DatabaseUrlOpt { + EnvironmentVariable(syn::LitStr), + ExplicitVariable(syn::Path), +} + +#[cfg(feature = "migrate")] +struct ParsedArgs { + fixtures: Vec, + migrations: TokenStream, + database_url_var: TokenStream, +} + type AttributeArgs = syn::punctuated::Punctuated; pub fn expand(args: TokenStream, input: syn::ItemFn) -> crate::Result { @@ -85,10 +99,122 @@ fn expand_advanced(args: AttributeArgs, input: syn::ItemFn) -> crate::Result { + return Ok(quote! { + #(#attrs)* + #[::core::prelude::v1::test] + fn #name() #ret { + async fn #name(#inputs) #ret { + #body + } + + let mut args = ::sqlx::testing::TestArgs::new(concat!(module_path!(), "::", stringify!(#name))); + + // We need to give a coercion site or else we get "unimplemented trait" errors. + let f: fn(#(#fn_arg_types),*) -> _ = #name; + + ::sqlx::testing::TestFn::run_test(f, args) + } + }); + } + args_num @ 1..=4 => { + use proc_macro2::Span; + use syn::Ident; + + let mut run_fn_args = Vec::with_capacity(args_num); + for i in 1..=args_num { + run_fn_args.push(Ident::new(&format!("args{i}"), Span::call_site())); + } + let run_fn = if args_num == 1 { + quote! { ::sqlx::testing::TestFn::run_test(f, #(#run_fn_args,)*) } + } else { + let test_fn = Ident::new(&format!("TestFn{args_num}"), Span::call_site()); + quote! { ::sqlx::testing::#test_fn::run_test(f, #(#run_fn_args,)*) } + }; + + let mut args = Vec::new(); + for (i, parsed_args) in parsed_args_list.iter().enumerate() { + let args_name = Ident::new(&format!("args{}", 1 + i), Span::call_site()); + let database_url_var = &parsed_args.database_url_var; + let migrations = &parsed_args.migrations; + let fixtures = parsed_args.fixtures.as_slice(); + + args.push(quote! { + let mut #args_name = ::sqlx::testing::TestArgs::new(concat!(module_path!(), "::", stringify!(#name), "#{", #i, "}")); + #args_name.#migrations + #args_name.fixtures(&[#(#fixtures),*]); + #args_name.database_url_var(#database_url_var); + }); + } + + return Ok(quote! { + #(#attrs)* + #[::core::prelude::v1::test] + fn #name() #ret { + async fn #name(#inputs) #ret { + #body + } + + #(#args)* + + // We need to give a coercion site or else we get "unimplemented trait" errors. + let f: fn(#(#fn_arg_types),*) -> _ = #name; + + #run_fn + } + }); + } + _ => { + return Err(Box::new(syn::Error::new_spanned( + args.first().unwrap(), + "expect to no more than 4 env attributes.", + ))); + } + } +} + +#[cfg(feature = "migrate")] +fn parse_attr_args( + args: &AttributeArgs, + input: &syn::ItemFn, + config: &sqlx_core::config::Config, +) -> crate::Result> { + let parser = AttributeArgs::parse_terminated; + + let mut parsed_args: Vec = Vec::new(); + + for arg in args { + match arg { + syn::Meta::List(list) if list.path.is_ident("env") => { + let args = parser.parse2(list.tokens.clone())?; + let parsed = parse_one_attr_args(args, input, config)?; + parsed_args.push(parsed); + } + _ => { + let parsed = parse_one_attr_args(args.clone(), input, config)?; + return Ok(vec![parsed]); + } + } + } + + Ok(parsed_args) +} + +#[cfg(feature = "migrate")] +fn parse_one_attr_args( + args: AttributeArgs, + input: &syn::ItemFn, + config: &sqlx_core::config::Config, +) -> crate::Result { + let inputs = &input.sig.inputs; + + let args = parse_args(args)?; + let mut fixtures = Vec::new(); for (fixture_type, fixtures_local) in args.fixtures { @@ -146,7 +272,7 @@ fn expand_advanced(args: AttributeArgs, input: syn::ItemFn) -> crate::Result { let migrator = crate::migrate::expand(Some(path))?; - quote! { args.migrator(&#migrator); } + quote! { migrator(&#migrator); } } MigrationsOpt::InferredPath if !inputs.is_empty() => { let path = crate::migrate::default_path(&config); @@ -155,41 +281,34 @@ fn expand_advanced(args: AttributeArgs, input: syn::ItemFn) -> crate::Result { - quote! { args.migrator(&#path); } + quote! { migrator(&#path); } } - _ => quote! {}, + _ => quote! { no_migrator(); }, }; - Ok(quote! { - #(#attrs)* - #[::core::prelude::v1::test] - fn #name() #ret { - async fn #name(#inputs) #ret { - #body - } - - let mut args = ::sqlx::testing::TestArgs::new(concat!(module_path!(), "::", stringify!(#name))); - - #migrations - - args.fixtures(&[#(#fixtures),*]); - - // We need to give a coercion site or else we get "unimplemented trait" errors. - let f: fn(#(#fn_arg_types),*) -> _ = #name; - - ::sqlx::testing::TestFn::run_test(f, args) + let database_url_var = match args.database_url_var { + DatabaseUrlOpt::EnvironmentVariable(name) => { + quote! { #name } } + DatabaseUrlOpt::ExplicitVariable(path) => quote! { #path }, + }; + + Ok(ParsedArgs { + fixtures, + migrations, + database_url_var, }) } #[cfg(feature = "migrate")] fn parse_args(attr_args: AttributeArgs) -> syn::Result { + use proc_macro2::Span; use syn::{ parenthesized, parse::Parse, punctuated::Punctuated, token::Comma, Expr, Lit, LitStr, Meta, MetaNameValue, Token, @@ -197,6 +316,8 @@ fn parse_args(attr_args: AttributeArgs) -> syn::Result { let mut fixtures = Vec::new(); let mut migrations = MigrationsOpt::InferredPath; + let mut database_url_var = + DatabaseUrlOpt::EnvironmentVariable(LitStr::new("DATABASE_URL", Span::call_site())); for arg in attr_args { let path = arg.path().clone(); @@ -292,11 +413,27 @@ fn parse_args(attr_args: AttributeArgs) -> syn::Result { migrations = MigrationsOpt::ExplicitMigrator(lit.parse()?); } + // var = "" + Meta::NameValue(MetaNameValue { value, .. }) if path.is_ident("var") => { + let Expr::Lit(syn::ExprLit { + lit: Lit::Str(lit), .. + }) = value + else { + return Err(syn::Error::new_spanned(path, "expected string")); + }; + + database_url_var = DatabaseUrlOpt::ExplicitVariable(lit.parse()?); + } + // var("") + Meta::List(list) if path.is_ident("var") => { + let s: LitStr = list.parse_args()?; + database_url_var = DatabaseUrlOpt::EnvironmentVariable(s); + } arg => { return Err(syn::Error::new_spanned( arg, - r#"expected `fixtures("", ...)` or `migrations = "" | false` or `migrator = ""`"#, - )) + r#"expected `fixtures("", ...)` or `migrations = "" | false` or `migrator = ""` or `var("DATABASE_URL")` or `var = ""`"#, + )); } } } @@ -304,6 +441,7 @@ fn parse_args(attr_args: AttributeArgs) -> syn::Result { Ok(Args { fixtures, migrations, + database_url_var, }) } diff --git a/sqlx-mysql/src/testing/mod.rs b/sqlx-mysql/src/testing/mod.rs index f509f9da45..bfaf8b6707 100644 --- a/sqlx-mysql/src/testing/mod.rs +++ b/sqlx-mysql/src/testing/mod.rs @@ -1,7 +1,7 @@ use std::future::Future; use std::ops::Deref; use std::str::FromStr; -use std::sync::OnceLock; +use std::sync::{LazyLock, Mutex}; use std::time::Duration; use crate::error::Error; @@ -13,11 +13,12 @@ use sqlx_core::connection::Connection; use sqlx_core::query_builder::QueryBuilder; use sqlx_core::query_scalar::query_scalar; use sqlx_core::sql_str::AssertSqlSafe; +use sqlx_core::HashMap; pub(crate) use sqlx_core::testing::*; -// Using a blocking `OnceLock` here because the critical sections are short. -static MASTER_POOL: OnceLock> = OnceLock::new(); +static MASTER_POOLS: LazyLock>>> = + LazyLock::new(|| Mutex::new(HashMap::new())); impl TestSupport for MySql { fn test_context( @@ -26,20 +27,25 @@ impl TestSupport for MySql { test_context(args) } - async fn cleanup_test(db_name: &str) -> Result<(), Error> { - let mut conn = MASTER_POOL - .get() - .expect("cleanup_test() invoked outside `#[sqlx::test]`") - .acquire() - .await?; + async fn cleanup_test(args: &TestArgs) -> Result<(), Error> { + let db_name = Self::db_name(args); + + let master_pool = get_master_pool(args.database_url_var); + let mut conn = master_pool.acquire().await?; - do_cleanup(&mut conn, db_name).await + do_cleanup(&mut conn, &db_name).await } async fn cleanup_test_dbs() -> Result, Error> { let url = dotenvy::var("DATABASE_URL").expect("DATABASE_URL must be set"); - let mut conn = MySqlConnection::connect(&url).await?; + let count = Self::cleanup_test_dbs_by_url(&url).await?; + + Ok(count) + } + + async fn cleanup_test_dbs_by_url(url: &str) -> Result, Error> { + let mut conn = MySqlConnection::connect(url).await?; let delete_db_names: Vec = query_scalar("select db_name from _sqlx_test_databases") .fetch_all(&mut conn) @@ -97,7 +103,9 @@ impl TestSupport for MySql { } async fn test_context(args: &TestArgs) -> Result, Error> { - let url = dotenvy::var("DATABASE_URL").expect("DATABASE_URL must be set"); + let database_url_var = args.database_url_var; + + let url = dotenvy::var(database_url_var).expect("DATABASE_URL must be set"); let master_opts = MySqlConnectOptions::from_str(&url).expect("failed to parse DATABASE_URL"); @@ -110,7 +118,7 @@ async fn test_context(args: &TestArgs) -> Result, Error> { .after_release(|_conn, _| Box::pin(async move { Ok(false) })) .connect_lazy_with(master_opts); - let master_pool = match once_lock_try_insert_polyfill(&MASTER_POOL, pool) { + let master_pool = match try_insert_polyfill(database_url_var, pool) { Ok(inserted) => inserted, Err((existing, pool)) => { // Sanity checks. @@ -144,7 +152,7 @@ async fn test_context(args: &TestArgs) -> Result, Error> { -- BLOB/TEXT columns can only be used as index keys with a prefix length: -- https://dev.mysql.com/doc/refman/8.4/en/column-indexes.html#column-indexes-prefix primary key(db_name(63)) - ); + ); "#, ) .await?; @@ -243,11 +251,30 @@ async fn cleanup_old_dbs(conn: &mut MySqlConnection) -> Result<(), Error> { Ok(()) } -fn once_lock_try_insert_polyfill(this: &OnceLock, value: T) -> Result<&T, (&T, T)> { - let mut value = Some(value); - let res = this.get_or_init(|| value.take().unwrap()); - match value { - None => Ok(res), - Some(value) => Err((res, value)), +fn get_master_pool(database_url_var: &'static str) -> Pool { + let guard = MASTER_POOLS + .lock() + .expect("failed to acquire lock of master pools"); + guard + .get(database_url_var) + .expect("cleanup_test() invoked outside `#[sqlx::test]`") + .clone() +} + +fn try_insert_polyfill( + database_url_var: &'static str, + pool: Pool, +) -> Result, (Pool, Pool)> { + let mut guard = MASTER_POOLS + .lock() + .expect("failed to acquire lock of master pools"); + let master_pool = guard.get(database_url_var); + + match master_pool { + None => { + guard.insert(database_url_var, pool.clone()); + Ok(pool) + } + Some(master_pool) => Err((master_pool.clone(), pool)), } } diff --git a/sqlx-postgres/src/testing/mod.rs b/sqlx-postgres/src/testing/mod.rs index 3e1cf0ddf7..e3c9742e6b 100644 --- a/sqlx-postgres/src/testing/mod.rs +++ b/sqlx-postgres/src/testing/mod.rs @@ -1,13 +1,14 @@ use std::future::Future; use std::ops::Deref; use std::str::FromStr; -use std::sync::OnceLock; +use std::sync::{LazyLock, Mutex}; use std::time::Duration; use sqlx_core::connection::Connection; use sqlx_core::query_builder::QueryBuilder; use sqlx_core::query_scalar::query_scalar; use sqlx_core::sql_str::AssertSqlSafe; +use sqlx_core::HashMap; use crate::error::Error; use crate::executor::Executor; @@ -17,8 +18,8 @@ use crate::{PgConnectOptions, PgConnection, Postgres}; pub(crate) use sqlx_core::testing::*; -// Using a blocking `OnceLock` here because the critical sections are short. -static MASTER_POOL: OnceLock> = OnceLock::new(); +static MASTER_POOLS: LazyLock>>> = + LazyLock::new(|| Mutex::new(HashMap::new())); // Automatically delete any databases created before the start of the test binary. impl TestSupport for Postgres { @@ -28,19 +29,24 @@ impl TestSupport for Postgres { test_context(args) } - async fn cleanup_test(db_name: &str) -> Result<(), Error> { - let mut conn = MASTER_POOL - .get() - .expect("cleanup_test() invoked outside `#[sqlx::test]`") - .acquire() - .await?; + async fn cleanup_test(args: &TestArgs) -> Result<(), Error> { + let db_name = Self::db_name(args); + + let master_pool = get_master_pool(args.database_url_var); + let mut conn = master_pool.acquire().await?; - do_cleanup(&mut conn, db_name).await + do_cleanup(&mut conn, &db_name).await } async fn cleanup_test_dbs() -> Result, Error> { let url = dotenvy::var("DATABASE_URL").expect("DATABASE_URL must be set"); + let count = Self::cleanup_test_dbs_by_url(&url).await?; + + Ok(count) + } + + async fn cleanup_test_dbs_by_url(url: &str) -> Result, Error> { let mut conn = PgConnection::connect(&url).await?; let delete_db_names: Vec = query_scalar("select db_name from _sqlx_test.databases") @@ -90,7 +96,9 @@ impl TestSupport for Postgres { } async fn test_context(args: &TestArgs) -> Result, Error> { - let url = dotenvy::var("DATABASE_URL").expect("DATABASE_URL must be set"); + let database_url_var = args.database_url_var; + + let url = dotenvy::var(database_url_var).expect("DATABASE_URL must be set"); let master_opts = PgConnectOptions::from_str(&url).expect("failed to parse DATABASE_URL"); @@ -103,7 +111,7 @@ async fn test_context(args: &TestArgs) -> Result, Error> { .after_release(|_conn, _| Box::pin(async move { Ok(false) })) .connect_lazy_with(master_opts); - let master_pool = match once_lock_try_insert_polyfill(&MASTER_POOL, pool) { + let master_pool = match try_insert_polyfill(database_url_var, pool) { Ok(inserted) => inserted, Err((existing, pool)) => { // Sanity checks. @@ -143,7 +151,7 @@ async fn test_context(args: &TestArgs) -> Result, Error> { created_at timestamptz not null default now() ); - create index if not exists databases_created_at + create index if not exists databases_created_at on _sqlx_test.databases(created_at); create sequence if not exists _sqlx_test.database_ids; @@ -168,6 +176,8 @@ async fn test_context(args: &TestArgs) -> Result, Error> { debug_assert!(create_command.starts_with("create database \"")); conn.execute(AssertSqlSafe(create_command)).await?; + eprintln!("created database {db_name}"); + Ok(TestContext { pool_opts: PoolOptions::new() // Don't allow a single test to take all the connections. @@ -197,11 +207,30 @@ async fn do_cleanup(conn: &mut PgConnection, db_name: &str) -> Result<(), Error> Ok(()) } -fn once_lock_try_insert_polyfill(this: &OnceLock, value: T) -> Result<&T, (&T, T)> { - let mut value = Some(value); - let res = this.get_or_init(|| value.take().unwrap()); - match value { - None => Ok(res), - Some(value) => Err((res, value)), +fn get_master_pool(database_url_var: &'static str) -> Pool { + let guard = MASTER_POOLS + .lock() + .expect("failed to acquire lock of master pools"); + guard + .get(database_url_var) + .expect("cleanup_test() invoked outside `#[sqlx::test]`") + .clone() +} + +fn try_insert_polyfill( + database_url_var: &'static str, + pool: Pool, +) -> Result, (Pool, Pool)> { + let mut guard = MASTER_POOLS + .lock() + .expect("failed to acquire lock of master pools"); + let master_pool = guard.get(database_url_var); + + match master_pool { + None => { + guard.insert(database_url_var, pool.clone()); + Ok(pool) + } + Some(master_pool) => Err((master_pool.clone(), pool)), } } diff --git a/sqlx-sqlite/src/testing/mod.rs b/sqlx-sqlite/src/testing/mod.rs index 058a24c52b..fb8fcc7a7c 100644 --- a/sqlx-sqlite/src/testing/mod.rs +++ b/sqlx-sqlite/src/testing/mod.rs @@ -16,8 +16,9 @@ impl TestSupport for Sqlite { test_context(args) } - async fn cleanup_test(db_name: &str) -> Result<(), Error> { - crate::fs::remove_file(db_name).await?; + async fn cleanup_test(args: &TestArgs) -> Result<(), Error> { + let db_name = Self::db_name(args); + crate::fs::remove_file(&db_name).await?; Ok(()) } @@ -26,6 +27,10 @@ impl TestSupport for Sqlite { Ok(None) } + async fn cleanup_test_dbs_by_url(_: &str) -> Result, Error> { + unimplemented!() + } + async fn snapshot(_conn: &mut Self::Connection) -> Result, Error> { todo!() } diff --git a/tests/mixed/fixtures/mysql/comments.sql b/tests/mixed/fixtures/mysql/comments.sql new file mode 100644 index 0000000000..7255de5e25 --- /dev/null +++ b/tests/mixed/fixtures/mysql/comments.sql @@ -0,0 +1,16 @@ +insert into comment(comment_id, post_id, user_id, content, created_at) +values (1, + 1, + 2, + 'lol bet ur still bad, 1v1 me', + timestamp(now(), '-0:50:00')), + (2, + 1, + 1, + 'you''re on!', + timestamp(now(), '-0:45:00')), + (3, + 2, + 1, + 'lol you''re just mad you lost :P', + timestamp(now(), '-0:15:00')); diff --git a/tests/mixed/fixtures/mysql/posts.sql b/tests/mixed/fixtures/mysql/posts.sql new file mode 100644 index 0000000000..d692f3a1bd --- /dev/null +++ b/tests/mixed/fixtures/mysql/posts.sql @@ -0,0 +1,9 @@ +insert into post(post_id, user_id, content, created_at) +values (1, + 1, + 'This new computer is lightning-fast!', + timestamp(now(), '-1:00:00')), + (2, + 2, + '@alice is a haxxor :(', + timestamp(now(), '-0:30:00')); diff --git a/tests/mixed/fixtures/postgres/users.sql b/tests/mixed/fixtures/postgres/users.sql new file mode 100644 index 0000000000..571fb829ed --- /dev/null +++ b/tests/mixed/fixtures/postgres/users.sql @@ -0,0 +1,2 @@ +insert into "user"(user_id, username) +values ('6592b7c0-b531-4613-ace5-94246b7ce0c3', 'alice'), ('297923c5-a83c-4052-bab0-030887154e52', 'bob'); diff --git a/tests/mixed/test-attr.rs b/tests/mixed/test-attr.rs new file mode 100644 index 0000000000..d851d02c51 --- /dev/null +++ b/tests/mixed/test-attr.rs @@ -0,0 +1,73 @@ +// The no-arg variant is covered by other tests already. + +use sqlx::{MySqlPool, PgPool}; + +const PG_MIGRATOR: sqlx::migrate::Migrator = sqlx::migrate!("tests/postgres/migrations"); +const MYSQL_MIGRATOR: sqlx::migrate::Migrator = sqlx::migrate!("tests/mysql/migrations"); + +const PG_USERS_DATABASE_URL: &'static str = "PG_USERS_DATABASE_URL"; +const MYSQL_COMMENTS_DATABASE_URL: &'static str = "MYSQL_COMMENTS_DATABASE_URL"; + +#[sqlx::test( + env(migrations = "tests/postgres/migrations"), // no database url var + env( + migrator = "PG_MIGRATOR", + fixtures(path = "fixtures/postgres", scripts("users")), + var = "PG_USERS_DATABASE_URL", // rust path + ), + env( + migrations = "tests/mysql/migrations", + fixtures(path = "fixtures/mysql", scripts("posts")), + var("MYSQL_POSTS_DATABASE_URL"), // string literal + ), + env( + migrator = "MYSQL_MIGRATOR", + fixtures(path = "fixtures/mysql", scripts("comments")), + var = "MYSQL_COMMENTS_DATABASE_URL", // rust path + ), +)] +async fn it_gets_from_invidual_environments( + pg_pool_1: PgPool, + pg_pool_2: PgPool, + mysql_pool_1: MySqlPool, + mysql_pool_2: MySqlPool, +) -> sqlx::Result<()> { + let db_name: String = sqlx::query_scalar("SELECT current_database()") + .fetch_one(&pg_pool_1) + .await?; + + assert!(db_name.starts_with("_sqlx_test"), "dbname: {db_name:?}"); + + let pg_pool_2_usernames: Vec = + sqlx::query_scalar(r#"SELECT username FROM "user" ORDER BY username"#) + .fetch_all(&pg_pool_2) + .await?; + + assert_eq!(pg_pool_2_usernames, ["alice", "bob"]); + + let mysql_pool_1_posts: Vec = + sqlx::query_scalar("SELECT content FROM post ORDER BY post_id") + .fetch_all(&mysql_pool_1) + .await?; + + assert_eq!( + mysql_pool_1_posts, + [ + "This new computer is lightning-fast!", + "@alice is a haxxor :(" + ] + ); + + let mysql_pool_2_comments: Vec = + sqlx::query_scalar("SELECT content FROM comment WHERE post_id = ? ORDER BY created_at") + .bind(1) + .fetch_all(&mysql_pool_2) + .await?; + + assert_eq!( + mysql_pool_2_comments, + ["lol bet ur still bad, 1v1 me", "you're on!"] + ); + + Ok(()) +} diff --git a/tests/mysql/test-attr.rs b/tests/mysql/test-attr.rs index 75ca668602..68f028a3e1 100644 --- a/tests/mysql/test-attr.rs +++ b/tests/mysql/test-attr.rs @@ -172,3 +172,56 @@ async fn it_gets_comments(pool: MySqlPool) -> sqlx::Result<()> { Ok(()) } + +#[sqlx::test( + env(migrations = "tests/mysql/migrations", fixtures("users")), + env(migrator = "MIGRATOR", fixtures("users", "posts", "comments")) +)] +async fn it_gets_from_invidual_environments( + pool_1: MySqlPool, + pool_2: MySqlPool, +) -> sqlx::Result<()> { + let pool_1_usernames: Vec = + sqlx::query_scalar(r#"SELECT username FROM user ORDER BY username"#) + .fetch_all(&pool_1) + .await?; + + assert_eq!(pool_1_usernames, ["alice", "bob"]); + + let pool_2_usernames: Vec = + sqlx::query_scalar(r#"SELECT username FROM user ORDER BY username"#) + .fetch_all(&pool_2) + .await?; + + assert_eq!(pool_2_usernames, ["alice", "bob"]); + + let pool_1_post_comments: Vec = + sqlx::query_scalar("SELECT content FROM comment WHERE post_id = ? ORDER BY created_at") + .bind(&1) + .fetch_all(&pool_1) + .await?; + + assert_eq!(pool_1_post_comments.len(), 0); + + let pool_2_post_comments: Vec = + sqlx::query_scalar("SELECT content FROM comment WHERE post_id = ? ORDER BY created_at") + .bind(&1) + .fetch_all(&pool_2) + .await?; + + assert_eq!( + pool_2_post_comments, + ["lol bet ur still bad, 1v1 me", "you're on!"] + ); + + Ok(()) +} + +#[sqlx::test( + migrations = "tests/mysql/migrations", + fixtures(path = "../fixtures/mysql", scripts("users", "posts")), + var("DATABASE_URL") +)] +async fn this_should_compile(_pool: MySqlPool) -> sqlx::Result<()> { + Ok(()) +} diff --git a/tests/postgres/test-attr.rs b/tests/postgres/test-attr.rs index 78a8b1f59a..2b26e93581 100644 --- a/tests/postgres/test-attr.rs +++ b/tests/postgres/test-attr.rs @@ -179,9 +179,53 @@ async fn it_gets_comments(pool: PgPool) -> sqlx::Result<()> { Ok(()) } +#[sqlx::test( + env(migrations = "tests/postgres/migrations", fixtures("users")), + env(migrator = "MIGRATOR", fixtures("users", "posts", "comments")) +)] +async fn it_gets_from_invidual_environments(pool_1: PgPool, pool_2: PgPool) -> sqlx::Result<()> { + let pool_1_usernames: Vec = + sqlx::query_scalar(r#"SELECT username FROM "user" ORDER BY username"#) + .fetch_all(&pool_1) + .await?; + + assert_eq!(pool_1_usernames, ["alice", "bob"]); + + let pool_2_usernames: Vec = + sqlx::query_scalar(r#"SELECT username FROM "user" ORDER BY username"#) + .fetch_all(&pool_2) + .await?; + + assert_eq!(pool_2_usernames, ["alice", "bob"]); + + let pool1_comments: Vec = sqlx::query_scalar( + "SELECT content FROM comment WHERE post_id = $1::uuid ORDER BY created_at", + ) + .bind("252c1d98-a9b0-4f18-8298-e59058bdfe16") + .fetch_all(&pool_1) + .await?; + + assert_eq!(pool1_comments.len(), 0); + + let pool_2_comments: Vec = sqlx::query_scalar( + "SELECT content FROM comment WHERE post_id = $1::uuid ORDER BY created_at", + ) + .bind("252c1d98-a9b0-4f18-8298-e59058bdfe16") + .fetch_all(&pool_2) + .await?; + + assert_eq!( + pool_2_comments, + ["lol bet ur still bad, 1v1 me", "you're on!"] + ); + + Ok(()) +} + #[sqlx::test( migrations = "tests/postgres/migrations", - fixtures(path = "../fixtures/postgres", scripts("users", "posts")) + fixtures(path = "../fixtures/postgres", scripts("users", "posts")), + var("DATABASE_URL") )] async fn this_should_compile(_pool: PgPool) -> sqlx::Result<()> { Ok(()) diff --git a/tests/sqlite/test-attr.rs b/tests/sqlite/test-attr.rs index 09957d764a..87918c0250 100644 --- a/tests/sqlite/test-attr.rs +++ b/tests/sqlite/test-attr.rs @@ -98,6 +98,50 @@ async fn it_gets_comments(pool: SqlitePool) -> sqlx::Result<()> { Ok(()) } +#[sqlx::test( + env(migrations = "tests/sqlite/migrations", fixtures("users")), + env(migrator = "MIGRATOR", fixtures("users", "posts", "comments")) +)] +async fn it_gets_from_invidual_environments( + pool_1: SqlitePool, + pool_2: SqlitePool, +) -> sqlx::Result<()> { + let pool_1_usernames: Vec = + sqlx::query_scalar(r#"SELECT username FROM "user" ORDER BY username"#) + .fetch_all(&pool_1) + .await?; + + assert_eq!(pool_1_usernames, ["alice", "bob"]); + + let pool_2_usernames: Vec = + sqlx::query_scalar(r#"SELECT username FROM "user" ORDER BY username"#) + .fetch_all(&pool_2) + .await?; + + assert_eq!(pool_2_usernames, ["alice", "bob"]); + + let pool_1_post_comments: Vec = + sqlx::query_scalar("SELECT content FROM comment WHERE post_id = ? ORDER BY created_at") + .bind(&1) + .fetch_all(&pool_1) + .await?; + + assert_eq!(pool_1_post_comments.len(), 0); + + let pool_2_post_comments: Vec = + sqlx::query_scalar("SELECT content FROM comment WHERE post_id = ? ORDER BY created_at") + .bind(&1) + .fetch_all(&pool_2) + .await?; + + assert_eq!( + pool_2_post_comments, + ["lol bet ur still bad, 1v1 me", "you're on!"] + ); + + Ok(()) +} + #[sqlx::test( migrations = "tests/sqlite/migrations", fixtures(path = "./fixtures", scripts("users", "posts")) From 0c1087055927bca354680ebdb4fe0bf14fe66f47 Mon Sep 17 00:00:00 2001 From: masato Date: Sun, 5 Oct 2025 05:17:23 +0900 Subject: [PATCH 2/4] Fixed environment variable names depending on test implementation. --- .github/workflows/sqlx.yml | 23 ++++++++++++----------- tests/mixed/test-attr.rs | 10 +++++----- 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/.github/workflows/sqlx.yml b/.github/workflows/sqlx.yml index 5da68f1276..5d952b7ef1 100644 --- a/.github/workflows/sqlx.yml +++ b/.github/workflows/sqlx.yml @@ -534,20 +534,20 @@ jobs: - run: cargo build --features any,postgres,mysql,macros,migrate,_unstable-all-types,runtime-${{ matrix.runtime }} - run: | - docker compose -f tests/docker-compose.yml run -d -p 5432:5432 --name postgres_${{ matrix.postgres }} postgres_${{ matrix.postgres }} - docker exec postgres_${{ matrix.postgres }} bash -c "until pg_isready; do sleep 1; done" + docker compose -f tests/docker-compose.yml run -d -p 5432:5432 --name postgres_1_${{ matrix.postgres }} postgres_${{ matrix.postgres }} + docker exec postgres_1_${{ matrix.postgres }} bash -c "until pg_isready; do sleep 1; done" - run: | - docker compose -f tests/docker-compose.yml run -d -p 5433:5432 --name postgres_users_${{ matrix.postgres }} postgres_${{ matrix.postgres }} - docker exec postgres_users_${{ matrix.postgres }} bash -c "until pg_isready; do sleep 1; done" + docker compose -f tests/docker-compose.yml run -d -p 5433:5432 --name postgres_2_${{ matrix.postgres }} postgres_${{ matrix.postgres }} + docker exec postgres_2_${{ matrix.postgres }} bash -c "until pg_isready; do sleep 1; done" - run: | - docker compose -f tests/docker-compose.yml run -d -p 3306:3306 --name mysql_posts_${{ matrix.mysql }} mysql_${{ matrix.mysql }} - docker exec mysql_posts_${{ matrix.mysql }} bash -c "until mysqladmin ping; do sleep 1; done" + docker compose -f tests/docker-compose.yml run -d -p 3306:3306 --name mysql_1_${{ matrix.mysql }} mysql_${{ matrix.mysql }} + docker exec mysql_1_${{ matrix.mysql }} bash -c "until mysqladmin ping; do sleep 1; done" - run: | - docker compose -f tests/docker-compose.yml run -d -p 3307:3306 --name mysql_comments_${{ matrix.mysql }} mysql_${{ matrix.mysql }} - docker exec mysql_comments_${{ matrix.mysql }} bash -c "until mysqladmin ping; do sleep 1; done" + docker compose -f tests/docker-compose.yml run -d -p 3307:3306 --name mysql_2_${{ matrix.mysql }} mysql_${{ matrix.mysql }} + docker exec mysql_2_${{ matrix.mysql }} bash -c "until mysqladmin ping; do sleep 1; done" # Create data dir for offline mode - run: mkdir .sqlx @@ -560,9 +560,10 @@ jobs: --features any,postgres,mysql,macros,migrate,_unstable-all-types,runtime-${{ matrix.runtime }} env: DATABASE_URL: postgres://postgres:password@localhost:5432/sqlx - PG_USERS_DATABASE_URL: postgres://postgres:password@localhost:5433/sqlx - MYSQL_POSTS_DATABASE_URL: mysql://root:password@localhost:3306/sqlx - MYSQL_COMMENTS_DATABASE_URL: mysql://root:password@localhost:3307/sqlx + PG_1_DATABASE_URL: postgres://postgres:password@localhost:5432/sqlx + PG_2_DATABASE_URL: postgres://postgres:password@localhost:5433/sqlx + MYSQL_1_DATABASE_URL: mysql://root:password@localhost:3306/sqlx + MYSQL_2_DATABASE_URL: mysql://root:password@localhost:3307/sqlx SQLX_OFFLINE_DIR: .sqlx RUSTFLAGS: -D warnings --cfg postgres="${{ matrix.postgres }}" --cfg mysql_${{ matrix.mysql }} diff --git a/tests/mixed/test-attr.rs b/tests/mixed/test-attr.rs index d851d02c51..4c76ba130d 100644 --- a/tests/mixed/test-attr.rs +++ b/tests/mixed/test-attr.rs @@ -5,25 +5,25 @@ use sqlx::{MySqlPool, PgPool}; const PG_MIGRATOR: sqlx::migrate::Migrator = sqlx::migrate!("tests/postgres/migrations"); const MYSQL_MIGRATOR: sqlx::migrate::Migrator = sqlx::migrate!("tests/mysql/migrations"); -const PG_USERS_DATABASE_URL: &'static str = "PG_USERS_DATABASE_URL"; -const MYSQL_COMMENTS_DATABASE_URL: &'static str = "MYSQL_COMMENTS_DATABASE_URL"; +const PG_2_DATABASE_URL: &'static str = "PG_2_DATABASE_URL"; +const MYSQL_2_DATABASE_URL: &'static str = "MYSQL_2_DATABASE_URL"; #[sqlx::test( env(migrations = "tests/postgres/migrations"), // no database url var env( migrator = "PG_MIGRATOR", fixtures(path = "fixtures/postgres", scripts("users")), - var = "PG_USERS_DATABASE_URL", // rust path + var = "PG_2_DATABASE_URL", // rust path ), env( migrations = "tests/mysql/migrations", fixtures(path = "fixtures/mysql", scripts("posts")), - var("MYSQL_POSTS_DATABASE_URL"), // string literal + var("MYSQL_1_DATABASE_URL"), // string literal ), env( migrator = "MYSQL_MIGRATOR", fixtures(path = "fixtures/mysql", scripts("comments")), - var = "MYSQL_COMMENTS_DATABASE_URL", // rust path + var = "MYSQL_2_DATABASE_URL", // rust path ), )] async fn it_gets_from_invidual_environments( From 09a539d7edb6bc9d63e4f90008a8defaa72bc260 Mon Sep 17 00:00:00 2001 From: masato Date: Sun, 5 Oct 2025 05:29:40 +0900 Subject: [PATCH 3/4] fix clippy checks. --- sqlx-postgres/src/testing/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sqlx-postgres/src/testing/mod.rs b/sqlx-postgres/src/testing/mod.rs index e3c9742e6b..e3431d9945 100644 --- a/sqlx-postgres/src/testing/mod.rs +++ b/sqlx-postgres/src/testing/mod.rs @@ -47,7 +47,7 @@ impl TestSupport for Postgres { } async fn cleanup_test_dbs_by_url(url: &str) -> Result, Error> { - let mut conn = PgConnection::connect(&url).await?; + let mut conn = PgConnection::connect(url).await?; let delete_db_names: Vec = query_scalar("select db_name from _sqlx_test.databases") .fetch_all(&mut conn) From 6269ff968abc5b4ba12b7c46892b8e60be443213 Mon Sep 17 00:00:00 2001 From: masato Date: Sun, 5 Oct 2025 05:56:38 +0900 Subject: [PATCH 4/4] Fixed a bug where fixtures weren't executed when sqlx::test had no arguments. --- sqlx-macros-core/src/test_attr.rs | 44 +++++++++++++++---------------- 1 file changed, 21 insertions(+), 23 deletions(-) diff --git a/sqlx-macros-core/src/test_attr.rs b/sqlx-macros-core/src/test_attr.rs index aa08589c51..9af94903f0 100644 --- a/sqlx-macros-core/src/test_attr.rs +++ b/sqlx-macros-core/src/test_attr.rs @@ -105,22 +105,10 @@ fn expand_advanced(args: AttributeArgs, input: syn::ItemFn) -> crate::Result { - return Ok(quote! { - #(#attrs)* - #[::core::prelude::v1::test] - fn #name() #ret { - async fn #name(#inputs) #ret { - #body - } - - let mut args = ::sqlx::testing::TestArgs::new(concat!(module_path!(), "::", stringify!(#name))); - - // We need to give a coercion site or else we get "unimplemented trait" errors. - let f: fn(#(#fn_arg_types),*) -> _ = #name; - - ::sqlx::testing::TestFn::run_test(f, args) - } - }); + return Err(Box::new(syn::Error::new_spanned( + args.first().unwrap(), + "BUG: unexpected args.", + ))); } args_num @ 1..=4 => { use proc_macro2::Span; @@ -139,13 +127,18 @@ fn expand_advanced(args: AttributeArgs, input: syn::ItemFn) -> crate::Result = Vec::new(); + if args.is_empty() { + let parsed = parse_one_attr_args(args, input, config)?; + return Ok(vec![parsed]); + } + for arg in args { match arg { syn::Meta::List(list) if list.path.is_ident("env") => { let args = parser.parse2(list.tokens.clone())?; - let parsed = parse_one_attr_args(args, input, config)?; + let parsed = parse_one_attr_args(&args, input, config)?; parsed_args.push(parsed); } _ => { - let parsed = parse_one_attr_args(args.clone(), input, config)?; + let parsed = parse_one_attr_args(args, input, config)?; return Ok(vec![parsed]); } } @@ -207,7 +205,7 @@ fn parse_attr_args( #[cfg(feature = "migrate")] fn parse_one_attr_args( - args: AttributeArgs, + args: &AttributeArgs, input: &syn::ItemFn, config: &sqlx_core::config::Config, ) -> crate::Result { @@ -307,7 +305,7 @@ fn parse_one_attr_args( } #[cfg(feature = "migrate")] -fn parse_args(attr_args: AttributeArgs) -> syn::Result { +fn parse_args(attr_args: &AttributeArgs) -> syn::Result { use proc_macro2::Span; use syn::{ parenthesized, parse::Parse, punctuated::Punctuated, token::Comma, Expr, Lit, LitStr, Meta, @@ -376,7 +374,7 @@ fn parse_args(attr_args: AttributeArgs) -> syn::Result { } } - let Some(lit) = recurse_lit_lookup(value.value) else { + let Some(lit) = recurse_lit_lookup(value.value.clone()) else { return Err(syn::Error::new_spanned(path, "expected string or `false`")); };