diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index 5dd4aeefc6..8cee1cff57 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -36,6 +36,7 @@ pub async fn run(opt: Opt) -> anyhow::Result<()> { ignore_missing, } => migrate::revert(&migrate.source, &database_url, dry_run, ignore_missing).await?, MigrateCommand::Info => migrate::info(&migrate.source, &database_url).await?, + MigrateCommand::BuildScript { force } => migrate::build_script(&migrate.source, force)?, }, Command::Database(database) => match database.command { diff --git a/sqlx-cli/src/migrate.rs b/sqlx-cli/src/migrate.rs index 20d61f1985..523cf83fa4 100644 --- a/sqlx-cli/src/migrate.rs +++ b/sqlx-cli/src/migrate.rs @@ -42,6 +42,11 @@ pub async fn add( ) -> anyhow::Result<()> { fs::create_dir_all(migration_source).context("Unable to create migrations directory")?; + // if the migrations directory is empty + let has_existing_migrations = fs::read_dir(migration_source) + .map(|mut dir| dir.next().is_some()) + .unwrap_or(false); + let migrator = Migrator::new(Path::new(migration_source)).await?; // This checks if all existing migrations are of the same type as the reverisble flag passed for migration in migrator.iter() { @@ -74,6 +79,31 @@ pub async fn add( )?; } + if !has_existing_migrations { + let quoted_source = if migration_source != "migrations" { + format!("{:?}", migration_source) + } else { + "".to_string() + }; + + print!( + r#" +Congratulations on creating your first migration! + +Did you know you can embed your migrations in your application binary? +On startup, after creating your database connection or pool, add: + +sqlx::migrate!({}).run(<&your_pool OR &mut your_connection>).await?; + +Note that the compiler won't pick up new migrations if no Rust source files have changed. +You can create a Cargo build script to work around this with `sqlx migrate build-script`. + +See: https://docs.rs/sqlx/0.5/sqlx/macro.migrate.html +"#, + quoted_source + ); + } + Ok(()) } @@ -245,3 +275,30 @@ pub async fn revert( Ok(()) } + +pub fn build_script(migration_source: &str, force: bool) -> anyhow::Result<()> { + anyhow::ensure!( + Path::new("Cargo.toml").exists(), + "must be run in a Cargo project root" + ); + + anyhow::ensure!( + (force || !Path::new("build.rs").exists()), + "build.rs already exists; use --force to overwrite" + ); + + let contents = format!( + r#"// generated by `sqlx migrate build-script` +fn main() {{ + // trigger recompilation when a new migration is added + println!("cargo:rerun-if-changed={}"); +}}"#, + migration_source + ); + + fs::write("build.rs", contents)?; + + println!("Created `build.rs`; be sure to check it into version control!"); + + Ok(()) +} diff --git a/sqlx-cli/src/opt.rs b/sqlx-cli/src/opt.rs index 8d912668bf..410fd9d707 100644 --- a/sqlx-cli/src/opt.rs +++ b/sqlx-cli/src/opt.rs @@ -130,4 +130,13 @@ pub enum MigrateCommand { /// List all available migrations. Info, + + /// Generate a `build.rs` to trigger recompilation when a new migration is added. + /// + /// Must be run in a Cargo project root. + BuildScript { + /// Overwrite the build script if it already exists. + #[clap(long)] + force: bool, + }, } diff --git a/sqlx-macros/src/lib.rs b/sqlx-macros/src/lib.rs index 8956aa745b..c1f173e655 100644 --- a/sqlx-macros/src/lib.rs +++ b/sqlx-macros/src/lib.rs @@ -2,6 +2,10 @@ not(any(feature = "postgres", feature = "mysql", feature = "offline")), allow(dead_code, unused_macros, unused_imports) )] +#![cfg_attr( + any(sqlx_macros_unstable, procmacro2_semver_exempt), + feature(track_path, proc_macro_tracked_env) +)] extern crate proc_macro; use proc_macro::TokenStream; diff --git a/sqlx-macros/src/migrate.rs b/sqlx-macros/src/migrate.rs index f10bc22318..018ba1b41e 100644 --- a/sqlx-macros/src/migrate.rs +++ b/sqlx-macros/src/migrate.rs @@ -24,7 +24,7 @@ struct QuotedMigration { version: i64, description: String, migration_type: QuotedMigrationType, - sql: String, + path: String, checksum: Vec, } @@ -34,7 +34,7 @@ impl ToTokens for QuotedMigration { version, description, migration_type, - sql, + path, checksum, } = &self; @@ -43,7 +43,8 @@ impl ToTokens for QuotedMigration { version: #version, description: ::std::borrow::Cow::Borrowed(#description), migration_type: #migration_type, - sql: ::std::borrow::Cow::Borrowed(#sql), + // this tells the compiler to watch this path for changes + sql: ::std::borrow::Cow::Borrowed(include_str!(#path)), checksum: ::std::borrow::Cow::Borrowed(&[ #(#checksum),* ]), @@ -59,7 +60,7 @@ pub(crate) fn expand_migrator_from_dir(dir: LitStr) -> crate::Result crate::Result crate::Result, query: &str) -> crate::Result { - serde_json::Deserializer::from_reader(BufReader::new( + let this = serde_json::Deserializer::from_reader(BufReader::new( File::open(path.as_ref()).map_err(|e| { format!("failed to open path {}: {}", path.as_ref().display(), e) })?, @@ -69,8 +69,22 @@ pub mod offline { .deserialize_map(DataFileVisitor { query, hash: hash_string(query), - }) - .map_err(Into::into) + })?; + + #[cfg(procmacr2_semver_exempt)] + { + let path = path.as_ref().canonicalize()?; + let path = path.to_str().ok_or_else(|| { + format!( + "sqlx-data.json path cannot be represented as a string: {:?}", + path + ) + })?; + + proc_macro::tracked_path::path(path); + } + + Ok(this) } } diff --git a/sqlx-macros/src/query/input.rs b/sqlx-macros/src/query/input.rs index 86627d60b1..ecfc3e643d 100644 --- a/sqlx-macros/src/query/input.rs +++ b/sqlx-macros/src/query/input.rs @@ -8,7 +8,7 @@ use syn::{ExprArray, Type}; /// Macro input shared by `query!()` and `query_file!()` pub struct QueryMacroInput { - pub(super) src: String, + pub(super) sql: String, #[cfg_attr(not(feature = "offline"), allow(dead_code))] pub(super) src_span: Span, @@ -18,6 +18,8 @@ pub struct QueryMacroInput { pub(super) arg_exprs: Vec, pub(super) checked: bool, + + pub(super) file_path: Option, } enum QuerySrc { @@ -94,12 +96,15 @@ impl Parse for QueryMacroInput { let arg_exprs = args.unwrap_or_default(); + let file_path = src.file_path(src_span)?; + Ok(QueryMacroInput { - src: src.resolve(src_span)?, + sql: src.resolve(src_span)?, src_span, record_type, arg_exprs, checked, + file_path, }) } } @@ -112,6 +117,27 @@ impl QuerySrc { QuerySrc::File(file) => read_file_src(&file, source_span), } } + + fn file_path(&self, source_span: Span) -> syn::Result> { + if let QuerySrc::File(ref file) = *self { + let path = std::path::Path::new(file) + .canonicalize() + .map_err(|e| syn::Error::new(source_span, e))?; + + Ok(Some( + path.to_str() + .ok_or_else(|| { + syn::Error::new( + source_span, + "query file path cannot be represented as a string", + ) + })? + .to_string(), + )) + } else { + Ok(None) + } + } } fn read_file_src(source: &str, source_span: Span) -> syn::Result { diff --git a/sqlx-macros/src/query/mod.rs b/sqlx-macros/src/query/mod.rs index 91c706dc42..357c4f3524 100644 --- a/sqlx-macros/src/query/mod.rs +++ b/sqlx-macros/src/query/mod.rs @@ -23,6 +23,7 @@ mod input; mod output; struct Metadata { + #[allow(unused)] manifest_dir: PathBuf, offline: bool, database_url: Option, @@ -35,42 +36,47 @@ struct Metadata { // If we are in a workspace, lookup `workspace_root` since `CARGO_MANIFEST_DIR` won't // reflect the workspace dir: https://github.com/rust-lang/cargo/issues/3946 static METADATA: Lazy = Lazy::new(|| { - use std::env; - - let manifest_dir: PathBuf = env::var("CARGO_MANIFEST_DIR") + let manifest_dir: PathBuf = env("CARGO_MANIFEST_DIR") .expect("`CARGO_MANIFEST_DIR` must be set") .into(); #[cfg(feature = "offline")] - let target_dir = - env::var_os("CARGO_TARGET_DIR").map_or_else(|| "target".into(), |dir| dir.into()); + let target_dir = env("CARGO_TARGET_DIR").map_or_else(|_| "target".into(), |dir| dir.into()); // If a .env file exists at CARGO_MANIFEST_DIR, load environment variables from this, // otherwise fallback to default dotenv behaviour. let env_path = manifest_dir.join(".env"); - if env_path.exists() { + + #[cfg_attr(not(procmacro2_semver_exempt), allow(unused_variables))] + let env_path = if env_path.exists() { let res = dotenv::from_path(&env_path); if let Err(e) = res { panic!("failed to load environment from {:?}, {}", env_path, e); } + + Some(env_path) } else { - let _ = dotenv::dotenv(); + dotenv::dotenv().ok() + }; + + // tell the compiler to watch the `.env` for changes, if applicable + #[cfg(procmacro2_semver_exempt)] + if let Some(env_path) = env_path.as_ref().and_then(|path| path.to_str()) { + proc_macro::tracked_path::path(env_path); } - // TODO: Switch to `var_os` after feature(osstring_ascii) is stable. - // Stabilization PR: https://github.com/rust-lang/rust/pull/80193 - let offline = env::var("SQLX_OFFLINE") + let offline = env("SQLX_OFFLINE") .map(|s| s.eq_ignore_ascii_case("true") || s == "1") .unwrap_or(false); - let database_url = env::var("DATABASE_URL").ok(); + let database_url = env("DATABASE_URL").ok(); #[cfg(feature = "offline")] let workspace_root = { use serde::Deserialize; use std::process::Command; - let cargo = env::var_os("CARGO").expect("`CARGO` must be set"); + let cargo = env("CARGO").expect("`CARGO` must be set"); let output = Command::new(&cargo) .args(&["metadata", "--format-version=1"]) @@ -151,7 +157,7 @@ fn expand_from_db(input: QueryMacroInput, db_url: &str) -> crate::Result { let data = block_on(async { let mut conn = sqlx_core::postgres::PgConnection::connect(db_url.as_str()).await?; - QueryData::from_db(&mut conn, &input.src).await + QueryData::from_db(&mut conn, &input.sql).await })?; expand_with_data(input, data, false) @@ -164,7 +170,7 @@ fn expand_from_db(input: QueryMacroInput, db_url: &str) -> crate::Result { let data = block_on(async { let mut conn = sqlx_core::mssql::MssqlConnection::connect(db_url.as_str()).await?; - QueryData::from_db(&mut conn, &input.src).await + QueryData::from_db(&mut conn, &input.sql).await })?; expand_with_data(input, data, false) @@ -177,7 +183,7 @@ fn expand_from_db(input: QueryMacroInput, db_url: &str) -> crate::Result { let data = block_on(async { let mut conn = sqlx_core::mysql::MySqlConnection::connect(db_url.as_str()).await?; - QueryData::from_db(&mut conn, &input.src).await + QueryData::from_db(&mut conn, &input.sql).await })?; expand_with_data(input, data, false) @@ -190,7 +196,7 @@ fn expand_from_db(input: QueryMacroInput, db_url: &str) -> crate::Result { let data = block_on(async { let mut conn = sqlx_core::sqlite::SqliteConnection::connect(db_url.as_str()).await?; - QueryData::from_db(&mut conn, &input.src).await + QueryData::from_db(&mut conn, &input.sql).await })?; expand_with_data(input, data, false) @@ -207,7 +213,7 @@ fn expand_from_db(input: QueryMacroInput, db_url: &str) -> crate::Result crate::Result { use data::offline::DynQueryData; - let query_data = DynQueryData::from_data_file(file, &input.src)?; + let query_data = DynQueryData::from_data_file(file, &input.sql)?; assert!(!query_data.db_name.is_empty()); match &*query_data.db_name { @@ -288,7 +294,7 @@ where .all(|it| it.type_info().is_void()) { let db_path = DB::db_path(); - let sql = &input.src; + let sql = &input.sql; quote! { ::sqlx::query_with::<#db_path, _>(#sql, #query_args) @@ -368,3 +374,16 @@ where Ok(ret_tokens) } + +/// Get the value of an environment variable, telling the compiler about it if applicable. +fn env(name: &str) -> Result { + #[cfg(procmacro2_semver_exempt)] + { + proc_macro::tracked_env::var(name) + } + + #[cfg(not(procmacro2_semver_exempt))] + { + std::env::var(name) + } +} diff --git a/sqlx-macros/src/query/output.rs b/sqlx-macros/src/query/output.rs index e7b482e044..2938f8846d 100644 --- a/sqlx-macros/src/query/output.rs +++ b/sqlx-macros/src/query/output.rs @@ -157,7 +157,14 @@ pub fn quote_query_as( let db_path = DB::db_path(); let row_path = DB::row_path(); - let sql = &input.src; + + // if this query came from a file, use `include_str!()` to tell the compiler where it came from + let sql = if let Some(ref path) = &input.file_path { + quote::quote_spanned! { input.src_span => include_str!(#path) } + } else { + let sql = &input.sql; + quote! { #sql } + }; quote! { ::sqlx::query_with::<#db_path, _>(#sql, #bind_args).try_map(|row: #row_path| { @@ -200,7 +207,7 @@ pub fn quote_query_scalar( }; let db = DB::db_path(); - let query = &input.src; + let query = &input.sql; Ok(quote! { ::sqlx::query_scalar_with::<#db, #ty, _>(#query, #bind_args) diff --git a/src/macros.rs b/src/macros.rs index fdabc1e1ef..78264f3573 100644 --- a/src/macros.rs +++ b/src/macros.rs @@ -715,6 +715,68 @@ macro_rules! query_file_scalar_unchecked ( /// The directory must be relative to the project root (the directory containing `Cargo.toml`), /// unlike `include_str!()` which uses compiler internals to get the path of the file where it /// was invoked. +/// +/// ## Triggering Recompilation on Migration Changes +/// In some cases when making changes to embedded migrations, such as adding a new migration without +/// changing any Rust source files, you might find that `cargo build` doesn't actually do anything, +/// or when you do `cargo run` your application isn't applying new migrations on startup. +/// +/// This is because our ability to tell the compiler to watch external files for changes +/// from a proc-macro is very limited. The compiler by default only re-runs proc macros when +/// one ore more source files have changed, because normally it shouldn't have to otherwise. SQLx is +/// just weird in that external factors can change the output of proc macros, much to the chagrin of +/// the compiler team and IDE plugin authors. +/// +/// As of 0.5.6, we emit `include_str!()` with an absolute path for each migration, but that +/// only works to get the compiler to watch _existing_ migration files for changes. +/// +/// Our only options for telling it to watch the whole `migrations/` directory are either via the +/// user creating a Cargo build script in their project, or using an unstable API on nightly +/// governed by a `cfg`-flag. +/// +/// ##### Stable Rust: Cargo Build Script +/// The only solution on stable Rust right now is to create a Cargo build script in your project +/// and have it print `cargo:rerun-if-changed=migrations`: +/// +/// `build.rs` +/// ``` +/// fn main() { +/// println!("cargo:rerun-if-changed=migrations"); +/// } +/// ``` +/// +/// You can run `sqlx migrate build-script` to generate this file automatically. +/// +/// See: [The Cargo Book: 3.8 Build Scripts; Outputs of the Build Script](https://doc.rust-lang.org/stable/cargo/reference/build-scripts.html#outputs-of-the-build-script) +/// +/// #### Nightly Rust: `cfg` Flag +/// The `migrate!()` macro also listens to `--cfg sqlx_macros_unstable`, which will enable +/// the `track_path` feature to directly tell the compiler to watch the `migrations/` directory: +/// +/// ```sh,ignore +/// $ env RUSTFLAGS='--cfg sqlx_macros_unstable' cargo build +/// ``` +/// +/// Note that this unfortunately will trigger a fully recompile of your dependency tree, at least +/// for the first time you use it. It also, of course, requires using a nightly compiler. +/// +/// You can also set it in `build.rustflags` in `.cargo/config.toml`: +/// ```toml,ignore +/// [build] +/// rustflags = ["--cfg sqlx_macros_unstable"] +/// ``` +/// +/// And then continue building and running your project normally. +/// +/// If you're building on nightly anyways, it would be extremely helpful to help us test +/// this feature and find any bugs in it. +/// +/// Subscribe to [the `track_path` tracking issue](https://github.com/rust-lang/rust/issues/73921) +/// for discussion and the future stabilization of this feature. +/// +/// For brevity and because it involves the same commitment to unstable features in `proc_macro`, +/// if you're using `--cfg procmacro2_semver_exempt` it will also enable this feature +/// (see [`proc-macro2` docs / Unstable Features](https://docs.rs/proc-macro2/1.0.27/proc_macro2/#unstable-features)). #[cfg(feature = "migrate")] #[macro_export] macro_rules! migrate { diff --git a/tests/migrate/macro.rs b/tests/migrate/macro.rs index 9a3c16150e..7215046bef 100644 --- a/tests/migrate/macro.rs +++ b/tests/migrate/macro.rs @@ -7,6 +7,8 @@ static EMBEDDED: Migrator = sqlx::migrate!("tests/migrate/migrations"); async fn same_output() -> anyhow::Result<()> { let runtime = Migrator::new(Path::new("tests/migrate/migrations")).await?; + assert_eq!(runtime.migrations.len(), EMBEDDED.migrations.len()); + for (e, r) in EMBEDDED.iter().zip(runtime.iter()) { assert_eq!(e.version, r.version); assert_eq!(e.description, r.description); diff --git a/tests/postgres/macros.rs b/tests/postgres/macros.rs index bc770e050f..51d1f89bb8 100644 --- a/tests/postgres/macros.rs +++ b/tests/postgres/macros.rs @@ -105,7 +105,8 @@ async fn test_query_file() -> anyhow::Result<()> { .fetch_one(&mut conn) .await?; - println!("{:?}", account); + assert_eq!(account.id, 1); + assert_eq!(account.name, Option::::None); Ok(()) }