diff --git a/.changepacks/changepack_log_mqohNf_DQdPeRP_BxXvFx.json b/.changepacks/changepack_log_mqohNf_DQdPeRP_BxXvFx.json new file mode 100644 index 0000000..0c00632 --- /dev/null +++ b/.changepacks/changepack_log_mqohNf_DQdPeRP_BxXvFx.json @@ -0,0 +1 @@ +{"changes":{"crates/vespertide-query/Cargo.toml":"Patch","crates/vespertide-exporter/Cargo.toml":"Patch","crates/vespertide-planner/Cargo.toml":"Patch","crates/vespertide-cli/Cargo.toml":"Patch","crates/vespertide-config/Cargo.toml":"Patch","crates/vespertide-naming/Cargo.toml":"Patch","crates/vespertide-loader/Cargo.toml":"Patch","crates/vespertide-macro/Cargo.toml":"Patch","crates/vespertide-core/Cargo.toml":"Patch","crates/vespertide/Cargo.toml":"Patch"},"note":"Add table descr","date":"2026-01-02T12:12:36.575837700Z"} \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index d2258e8..7d7b132 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2995,7 +2995,7 @@ checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "vespertide" -version = "0.1.20" +version = "0.1.21" dependencies = [ "vespertide-core", "vespertide-macro", @@ -3003,7 +3003,7 @@ dependencies = [ [[package]] name = "vespertide-cli" -version = "0.1.20" +version = "0.1.21" dependencies = [ "anyhow", "assert_cmd", @@ -3028,7 +3028,7 @@ dependencies = [ [[package]] name = "vespertide-config" -version = "0.1.20" +version = "0.1.21" dependencies = [ "clap", "serde", @@ -3037,7 +3037,7 @@ dependencies = [ [[package]] name = "vespertide-core" -version = "0.1.20" +version = "0.1.21" dependencies = [ "rstest", "schemars", @@ -3048,7 +3048,7 @@ dependencies = [ [[package]] name = "vespertide-exporter" -version = "0.1.20" +version = "0.1.21" dependencies = [ "insta", "rstest", @@ -3059,7 +3059,7 @@ dependencies = [ [[package]] name = "vespertide-loader" -version = "0.1.20" +version = "0.1.21" dependencies = [ "anyhow", "rstest", @@ -3074,7 +3074,7 @@ dependencies = [ [[package]] name = "vespertide-macro" -version = "0.1.20" +version = "0.1.21" dependencies = [ "proc-macro2", "quote", @@ -3091,11 +3091,11 @@ dependencies = [ [[package]] name = "vespertide-naming" -version = "0.1.20" +version = "0.1.21" [[package]] name = "vespertide-planner" -version = "0.1.20" +version = "0.1.21" dependencies = [ "insta", "rstest", @@ -3106,7 +3106,7 @@ dependencies = [ [[package]] name = "vespertide-query" -version = "0.1.20" +version = "0.1.21" dependencies = [ "insta", "rstest", diff --git a/crates/vespertide-cli/src/commands/diff.rs b/crates/vespertide-cli/src/commands/diff.rs index 32f78e4..058232e 100644 --- a/crates/vespertide-cli/src/commands/diff.rs +++ b/crates/vespertide-cli/src/commands/diff.rs @@ -255,6 +255,7 @@ mod tests { fs::create_dir_all(&models_dir).unwrap(); let table = TableDef { name: name.to_string(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), diff --git a/crates/vespertide-cli/src/commands/export.rs b/crates/vespertide-cli/src/commands/export.rs index d15f28e..9a7cd72 100644 --- a/crates/vespertide-cli/src/commands/export.rs +++ b/crates/vespertide-cli/src/commands/export.rs @@ -1,506 +1,507 @@ -use std::{ - fs, - path::{Path, PathBuf}, -}; - -use anyhow::{Context, Result}; -use clap::ValueEnum; -use vespertide_config::VespertideConfig; -use vespertide_core::TableDef; -use vespertide_exporter::{Orm, render_entity_with_schema, seaorm::SeaOrmExporterWithConfig}; - -use crate::utils::load_config; - -#[derive(Copy, Clone, Debug, ValueEnum)] -pub enum OrmArg { - Seaorm, - Sqlalchemy, - Sqlmodel, -} - -impl From for Orm { - fn from(value: OrmArg) -> Self { - match value { - OrmArg::Seaorm => Orm::SeaOrm, - OrmArg::Sqlalchemy => Orm::SqlAlchemy, - OrmArg::Sqlmodel => Orm::SqlModel, - } - } -} - -pub fn cmd_export(orm: OrmArg, export_dir: Option) -> Result<()> { - let config = load_config()?; - let models = load_models_recursive(config.models_dir()).context("load models recursively")?; - - // Normalize tables to convert inline constraints (primary_key, foreign_key, etc.) to table-level constraints - let normalized_models: Vec<(TableDef, PathBuf)> = models - .into_iter() - .map(|(table, rel_path)| { - table - .normalize() - .map_err(|e| anyhow::anyhow!("Failed to normalize table '{}': {}", table.name, e)) - .map(|normalized| (normalized, rel_path)) - }) - .collect::, _>>()?; - - let target_root = resolve_export_dir(export_dir, &config); - if !target_root.exists() { - fs::create_dir_all(&target_root) - .with_context(|| format!("create export dir {}", target_root.display()))?; - } - - let orm_kind: Orm = orm.into(); - - // Extract all tables for schema context (used for FK chain resolution) - let all_tables: Vec = normalized_models.iter().map(|(t, _)| t.clone()).collect(); - - // Create SeaORM exporter with config if needed - let seaorm_exporter = SeaOrmExporterWithConfig::new(config.seaorm()); - - for (table, rel_path) in &normalized_models { - let code = match orm_kind { - Orm::SeaOrm => seaorm_exporter - .render_entity_with_schema(table, &all_tables) - .map_err(|e| anyhow::anyhow!(e))?, - _ => render_entity_with_schema(orm_kind, table, &all_tables) - .map_err(|e| anyhow::anyhow!(e))?, - }; - let out_path = build_output_path(&target_root, rel_path, orm_kind); - if let Some(parent) = out_path.parent() { - fs::create_dir_all(parent) - .with_context(|| format!("create parent dir {}", parent.display()))?; - } - fs::write(&out_path, code).with_context(|| format!("write {}", out_path.display()))?; - if matches!(orm_kind, Orm::SeaOrm) { - ensure_mod_chain(&target_root, rel_path) - .with_context(|| format!("ensure mod chain for {}", out_path.display()))?; - } - println!("Exported {} -> {}", table.name, out_path.display()); - } - - Ok(()) -} - -fn resolve_export_dir(export_dir: Option, config: &VespertideConfig) -> PathBuf { - if let Some(dir) = export_dir { - return dir; - } - // Prefer explicit model_export_dir from config, fallback to default inside config. - config.model_export_dir().to_path_buf() -} - -fn build_output_path(root: &Path, rel_path: &Path, orm: Orm) -> PathBuf { - // Sanitize file name: replace spaces with underscores - let mut out = root.to_path_buf(); - - // Reconstruct path with sanitized file name - for component in rel_path.components() { - if let std::path::Component::Normal(name) = component { - out.push(name); - } else { - out.push(component.as_os_str()); - } - } - - // Sanitize the file name (last component) - if let Some(file_name) = out.file_name().and_then(|n| n.to_str()) { - // Remove extension, sanitize, then add new extension - let (stem, _ext) = if let Some(dot_idx) = file_name.rfind('.') { - file_name.split_at(dot_idx) - } else { - (file_name, "") - }; - - let sanitized = sanitize_filename(stem); - let ext = match orm { - Orm::SeaOrm => "rs", - Orm::SqlAlchemy | Orm::SqlModel => "py", - }; - out.set_file_name(format!("{}.{}", sanitized, ext)); - } - - out -} - -fn sanitize_filename(name: &str) -> String { - name.chars() - .map(|ch| { - if ch.is_alphanumeric() || ch == '_' || ch == '-' { - ch - } else { - '_' - } - }) - .collect::() -} - -fn load_models_recursive(base: &Path) -> Result> { - let mut out = Vec::new(); - if !base.exists() { - return Ok(out); - } - walk_models(base, base, &mut out)?; - Ok(out) -} - -fn ensure_mod_chain(root: &Path, rel_path: &Path) -> Result<()> { - // Only needed for SeaORM (Rust) exports to wire modules. - let mut comps: Vec = rel_path - .with_extension("") - .components() - .filter_map(|c| { - c.as_os_str() - .to_str() - .map(|s| sanitize_filename(s).to_string()) - }) - .collect(); - if comps.is_empty() { - return Ok(()); - } - // Build from deepest file up to root: dir/mod.rs should include child module. - while let Some(child) = comps.pop() { - let dir = root.join(comps.join(std::path::MAIN_SEPARATOR_STR)); - let mod_path = dir.join("mod.rs"); - if let Some(parent) = mod_path.parent() - && !parent.exists() - { - fs::create_dir_all(parent)?; - } - let mut content = if mod_path.exists() { - fs::read_to_string(&mod_path)? - } else { - String::new() - }; - let decl = format!("pub mod {};", child); - if !content.lines().any(|l| l.trim() == decl) { - if !content.is_empty() && !content.ends_with('\n') { - content.push('\n'); - } - content.push_str(&decl); - content.push('\n'); - fs::write(mod_path, content)?; - } - } - Ok(()) -} - -fn walk_models(root: &Path, current: &Path, acc: &mut Vec<(TableDef, PathBuf)>) -> Result<()> { - for entry in fs::read_dir(current).with_context(|| format!("read {}", current.display()))? { - let entry = entry?; - let path = entry.path(); - if path.is_dir() { - walk_models(root, &path, acc)?; - continue; - } - let ext = path.extension().and_then(|s| s.to_str()); - if !matches!(ext, Some("json") | Some("yaml") | Some("yml")) { - continue; - } - let content = fs::read_to_string(&path) - .with_context(|| format!("read model file: {}", path.display()))?; - let table: TableDef = if ext == Some("json") { - serde_json::from_str(&content) - .with_context(|| format!("parse JSON model: {}", path.display()))? - } else { - serde_yaml::from_str(&content) - .with_context(|| format!("parse YAML model: {}", path.display()))? - }; - let rel = path.strip_prefix(root).unwrap_or(&path).to_path_buf(); - acc.push((table, rel)); - } - Ok(()) -} - -#[cfg(test)] -mod tests { - use super::*; - use rstest::rstest; - use serial_test::serial; - use std::fs; - use tempfile::tempdir; - use vespertide_core::{ColumnDef, ColumnType, SimpleColumnType, TableConstraint}; - - struct CwdGuard { - original: PathBuf, - } - - impl CwdGuard { - fn new(dir: &PathBuf) -> Self { - let original = std::env::current_dir().unwrap(); - std::env::set_current_dir(dir).unwrap(); - Self { original } - } - } - - impl Drop for CwdGuard { - fn drop(&mut self) { - let _ = std::env::set_current_dir(&self.original); - } - } - - fn write_config() { - let cfg = VespertideConfig::default(); - let text = serde_json::to_string_pretty(&cfg).unwrap(); - fs::write("vespertide.json", text).unwrap(); - } - - fn write_model(path: &Path, table: &TableDef) { - if let Some(parent) = path.parent() { - fs::create_dir_all(parent).unwrap(); - } - fs::write(path, serde_json::to_string_pretty(table).unwrap()).unwrap(); - } - - fn sample_table(name: &str) -> TableDef { - TableDef { - name: name.to_string(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::PrimaryKey { - auto_increment: false, - columns: vec!["id".into()], - }], - } - } - - #[test] - #[serial] - fn export_writes_seaorm_files_to_default_dir() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - write_config(); - - let model = sample_table("users"); - write_model(Path::new("models/users.json"), &model); - - cmd_export(OrmArg::Seaorm, None).unwrap(); - - let out = PathBuf::from("src/models/users.rs"); - assert!(out.exists()); - let content = fs::read_to_string(out).unwrap(); - assert!(content.contains("#[sea_orm(table_name = \"users\")]")); - - // mod.rs wiring at root - let root_mod = PathBuf::from("src/models/mod.rs"); - assert!(root_mod.exists()); - let root_mod_content = fs::read_to_string(root_mod).unwrap(); - assert!(root_mod_content.contains("pub mod users;")); - } - - #[test] - #[serial] - fn export_respects_custom_output_dir() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - write_config(); - - let model = sample_table("posts"); - write_model(Path::new("models/blog/posts.json"), &model); - - let custom = PathBuf::from("out_dir"); - cmd_export(OrmArg::Seaorm, Some(custom.clone())).unwrap(); - - let out = custom.join("blog/posts.rs"); - assert!(out.exists()); - let content = fs::read_to_string(out).unwrap(); - assert!(content.contains("#[sea_orm(table_name = \"posts\")]")); - - // mod.rs wiring - let root_mod = custom.join("mod.rs"); - let blog_mod = custom.join("blog/mod.rs"); - assert!(root_mod.exists()); - assert!(blog_mod.exists()); - let root_mod_content = fs::read_to_string(root_mod).unwrap(); - let blog_mod_content = fs::read_to_string(blog_mod).unwrap(); - assert!(root_mod_content.contains("pub mod blog;")); - assert!(blog_mod_content.contains("pub mod posts;")); - } - - #[test] - #[serial] - fn export_with_sqlalchemy_sets_py_extension() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - write_config(); - - let model = sample_table("items"); - write_model(Path::new("models/items.json"), &model); - - cmd_export(OrmArg::Sqlalchemy, None).unwrap(); - - let out = PathBuf::from("src/models/items.py"); - assert!(out.exists()); - let content = fs::read_to_string(out).unwrap(); - assert!(content.contains("items")); - } - - #[test] - #[serial] - fn export_with_sqlmodel_sets_py_extension() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - write_config(); - - let model = sample_table("orders"); - write_model(Path::new("models/orders.json"), &model); - - cmd_export(OrmArg::Sqlmodel, None).unwrap(); - - let out = PathBuf::from("src/models/orders.py"); - assert!(out.exists()); - let content = fs::read_to_string(out).unwrap(); - assert!(content.contains("orders")); - } - - #[test] - #[serial] - fn load_models_recursive_returns_empty_when_absent() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - let models = load_models_recursive(Path::new("no_models")).unwrap(); - assert!(models.is_empty()); - } - - #[test] - #[serial] - fn load_models_recursive_ignores_non_model_files() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - write_config(); - - fs::create_dir_all("models").unwrap(); - fs::write("models/ignore.txt", "hello").unwrap(); - write_model(Path::new("models/valid.json"), &sample_table("valid")); - - let models = load_models_recursive(Path::new("models")).unwrap(); - assert_eq!(models.len(), 1); - assert_eq!(models[0].0.name, "valid"); - } - - #[test] - #[serial] - fn load_models_recursive_parses_yaml_branch() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - write_config(); - - fs::create_dir_all("models").unwrap(); - let table = sample_table("yaml_table"); - let yaml = serde_yaml::to_string(&table).unwrap(); - fs::write("models/yaml_table.yaml", yaml).unwrap(); - - let models = load_models_recursive(Path::new("models")).unwrap(); - assert_eq!(models.len(), 1); - assert_eq!(models[0].0.name, "yaml_table"); - } - - #[test] - #[serial] - fn ensure_mod_chain_adds_to_existing_file_without_trailing_newline() { - let tmp = tempdir().unwrap(); - let root = tmp.path().join("src/models"); - fs::create_dir_all(&root).unwrap(); - fs::write(root.join("mod.rs"), "pub mod existing;").unwrap(); - - ensure_mod_chain(&root, Path::new("blog/posts.rs")).unwrap(); - - let root_mod = fs::read_to_string(root.join("mod.rs")).unwrap(); - let blog_mod = fs::read_to_string(root.join("blog/mod.rs")).unwrap(); - assert!(root_mod.contains("pub mod existing;")); - assert!(root_mod.contains("pub mod blog;")); - assert!(blog_mod.contains("pub mod posts;")); - // ensure newline appended if missing - assert!(root_mod.ends_with('\n')); - } - - #[test] - fn ensure_mod_chain_no_components_is_noop() { - let tmp = tempdir().unwrap(); - let root = tmp.path().join("src/models"); - fs::create_dir_all(&root).unwrap(); - // empty path should not error - assert!(ensure_mod_chain(&root, Path::new("")).is_ok()); - } - - #[test] - #[serial] - fn resolve_export_dir_prefers_override() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - write_config(); - let cfg = VespertideConfig::default(); - let override_dir = PathBuf::from("custom_out"); - let resolved = super::resolve_export_dir(Some(override_dir.clone()), &cfg); - assert_eq!(resolved, override_dir); - } - - #[rstest] - #[case(OrmArg::Seaorm, Orm::SeaOrm)] - #[case(OrmArg::Sqlalchemy, Orm::SqlAlchemy)] - #[case(OrmArg::Sqlmodel, Orm::SqlModel)] - fn orm_arg_maps_to_enum(#[case] arg: OrmArg, #[case] expected: Orm) { - assert_eq!(Orm::from(arg), expected); - } - - #[rstest] - #[case("normal_name", "normal_name")] - #[case("user copy", "user_copy")] - #[case("user copy", "user__copy")] - #[case("user-copy", "user-copy")] - #[case("user.copy", "user_copy")] - #[case("user copy.json", "user_copy_json")] - fn test_sanitize_filename(#[case] input: &str, #[case] expected: &str) { - assert_eq!(sanitize_filename(input), expected); - } - - #[test] - fn build_output_path_sanitizes_spaces() { - use std::path::Path; - let root = Path::new("src/models"); - let rel_path = Path::new("user copy.json"); - let out = build_output_path(root, rel_path, Orm::SeaOrm); - assert_eq!(out, Path::new("src/models/user_copy.rs")); - - let rel_path2 = Path::new("blog/post name.yaml"); - let out2 = build_output_path(root, rel_path2, Orm::SeaOrm); - assert_eq!(out2, Path::new("src/models/blog/post_name.rs")); - } - - #[test] - fn build_output_path_handles_file_without_extension() { - use std::path::Path; - let root = Path::new("src/models"); - // File without extension - covers line 88 (else branch) - let rel_path = Path::new("users"); - let out = build_output_path(root, rel_path, Orm::SeaOrm); - assert_eq!(out, Path::new("src/models/users.rs")); - - let out_py = build_output_path(root, rel_path, Orm::SqlAlchemy); - assert_eq!(out_py, Path::new("src/models/users.py")); - } - - #[test] - fn build_output_path_handles_special_path_components() { - use std::path::Path; - let root = Path::new("src/models"); - // Path with CurDir component (.) - covers line 78 (non-Normal component branch) - let rel_path = Path::new("./blog/posts.json"); - let out = build_output_path(root, rel_path, Orm::SeaOrm); - // The . component gets pushed via the else branch - assert!(out.to_string_lossy().contains("posts")); - - // Path with ParentDir component (..) - let rel_path2 = Path::new("../other/items.yaml"); - let out2 = build_output_path(root, rel_path2, Orm::SeaOrm); - assert!(out2.to_string_lossy().contains("items")); - } -} +use std::{ + fs, + path::{Path, PathBuf}, +}; + +use anyhow::{Context, Result}; +use clap::ValueEnum; +use vespertide_config::VespertideConfig; +use vespertide_core::TableDef; +use vespertide_exporter::{Orm, render_entity_with_schema, seaorm::SeaOrmExporterWithConfig}; + +use crate::utils::load_config; + +#[derive(Copy, Clone, Debug, ValueEnum)] +pub enum OrmArg { + Seaorm, + Sqlalchemy, + Sqlmodel, +} + +impl From for Orm { + fn from(value: OrmArg) -> Self { + match value { + OrmArg::Seaorm => Orm::SeaOrm, + OrmArg::Sqlalchemy => Orm::SqlAlchemy, + OrmArg::Sqlmodel => Orm::SqlModel, + } + } +} + +pub fn cmd_export(orm: OrmArg, export_dir: Option) -> Result<()> { + let config = load_config()?; + let models = load_models_recursive(config.models_dir()).context("load models recursively")?; + + // Normalize tables to convert inline constraints (primary_key, foreign_key, etc.) to table-level constraints + let normalized_models: Vec<(TableDef, PathBuf)> = models + .into_iter() + .map(|(table, rel_path)| { + table + .normalize() + .map_err(|e| anyhow::anyhow!("Failed to normalize table '{}': {}", table.name, e)) + .map(|normalized| (normalized, rel_path)) + }) + .collect::, _>>()?; + + let target_root = resolve_export_dir(export_dir, &config); + if !target_root.exists() { + fs::create_dir_all(&target_root) + .with_context(|| format!("create export dir {}", target_root.display()))?; + } + + let orm_kind: Orm = orm.into(); + + // Extract all tables for schema context (used for FK chain resolution) + let all_tables: Vec = normalized_models.iter().map(|(t, _)| t.clone()).collect(); + + // Create SeaORM exporter with config if needed + let seaorm_exporter = SeaOrmExporterWithConfig::new(config.seaorm()); + + for (table, rel_path) in &normalized_models { + let code = match orm_kind { + Orm::SeaOrm => seaorm_exporter + .render_entity_with_schema(table, &all_tables) + .map_err(|e| anyhow::anyhow!(e))?, + _ => render_entity_with_schema(orm_kind, table, &all_tables) + .map_err(|e| anyhow::anyhow!(e))?, + }; + let out_path = build_output_path(&target_root, rel_path, orm_kind); + if let Some(parent) = out_path.parent() { + fs::create_dir_all(parent) + .with_context(|| format!("create parent dir {}", parent.display()))?; + } + fs::write(&out_path, code).with_context(|| format!("write {}", out_path.display()))?; + if matches!(orm_kind, Orm::SeaOrm) { + ensure_mod_chain(&target_root, rel_path) + .with_context(|| format!("ensure mod chain for {}", out_path.display()))?; + } + println!("Exported {} -> {}", table.name, out_path.display()); + } + + Ok(()) +} + +fn resolve_export_dir(export_dir: Option, config: &VespertideConfig) -> PathBuf { + if let Some(dir) = export_dir { + return dir; + } + // Prefer explicit model_export_dir from config, fallback to default inside config. + config.model_export_dir().to_path_buf() +} + +fn build_output_path(root: &Path, rel_path: &Path, orm: Orm) -> PathBuf { + // Sanitize file name: replace spaces with underscores + let mut out = root.to_path_buf(); + + // Reconstruct path with sanitized file name + for component in rel_path.components() { + if let std::path::Component::Normal(name) = component { + out.push(name); + } else { + out.push(component.as_os_str()); + } + } + + // Sanitize the file name (last component) + if let Some(file_name) = out.file_name().and_then(|n| n.to_str()) { + // Remove extension, sanitize, then add new extension + let (stem, _ext) = if let Some(dot_idx) = file_name.rfind('.') { + file_name.split_at(dot_idx) + } else { + (file_name, "") + }; + + let sanitized = sanitize_filename(stem); + let ext = match orm { + Orm::SeaOrm => "rs", + Orm::SqlAlchemy | Orm::SqlModel => "py", + }; + out.set_file_name(format!("{}.{}", sanitized, ext)); + } + + out +} + +fn sanitize_filename(name: &str) -> String { + name.chars() + .map(|ch| { + if ch.is_alphanumeric() || ch == '_' || ch == '-' { + ch + } else { + '_' + } + }) + .collect::() +} + +fn load_models_recursive(base: &Path) -> Result> { + let mut out = Vec::new(); + if !base.exists() { + return Ok(out); + } + walk_models(base, base, &mut out)?; + Ok(out) +} + +fn ensure_mod_chain(root: &Path, rel_path: &Path) -> Result<()> { + // Only needed for SeaORM (Rust) exports to wire modules. + let mut comps: Vec = rel_path + .with_extension("") + .components() + .filter_map(|c| { + c.as_os_str() + .to_str() + .map(|s| sanitize_filename(s).to_string()) + }) + .collect(); + if comps.is_empty() { + return Ok(()); + } + // Build from deepest file up to root: dir/mod.rs should include child module. + while let Some(child) = comps.pop() { + let dir = root.join(comps.join(std::path::MAIN_SEPARATOR_STR)); + let mod_path = dir.join("mod.rs"); + if let Some(parent) = mod_path.parent() + && !parent.exists() + { + fs::create_dir_all(parent)?; + } + let mut content = if mod_path.exists() { + fs::read_to_string(&mod_path)? + } else { + String::new() + }; + let decl = format!("pub mod {};", child); + if !content.lines().any(|l| l.trim() == decl) { + if !content.is_empty() && !content.ends_with('\n') { + content.push('\n'); + } + content.push_str(&decl); + content.push('\n'); + fs::write(mod_path, content)?; + } + } + Ok(()) +} + +fn walk_models(root: &Path, current: &Path, acc: &mut Vec<(TableDef, PathBuf)>) -> Result<()> { + for entry in fs::read_dir(current).with_context(|| format!("read {}", current.display()))? { + let entry = entry?; + let path = entry.path(); + if path.is_dir() { + walk_models(root, &path, acc)?; + continue; + } + let ext = path.extension().and_then(|s| s.to_str()); + if !matches!(ext, Some("json") | Some("yaml") | Some("yml")) { + continue; + } + let content = fs::read_to_string(&path) + .with_context(|| format!("read model file: {}", path.display()))?; + let table: TableDef = if ext == Some("json") { + serde_json::from_str(&content) + .with_context(|| format!("parse JSON model: {}", path.display()))? + } else { + serde_yaml::from_str(&content) + .with_context(|| format!("parse YAML model: {}", path.display()))? + }; + let rel = path.strip_prefix(root).unwrap_or(&path).to_path_buf(); + acc.push((table, rel)); + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use rstest::rstest; + use serial_test::serial; + use std::fs; + use tempfile::tempdir; + use vespertide_core::{ColumnDef, ColumnType, SimpleColumnType, TableConstraint}; + + struct CwdGuard { + original: PathBuf, + } + + impl CwdGuard { + fn new(dir: &PathBuf) -> Self { + let original = std::env::current_dir().unwrap(); + std::env::set_current_dir(dir).unwrap(); + Self { original } + } + } + + impl Drop for CwdGuard { + fn drop(&mut self) { + let _ = std::env::set_current_dir(&self.original); + } + } + + fn write_config() { + let cfg = VespertideConfig::default(); + let text = serde_json::to_string_pretty(&cfg).unwrap(); + fs::write("vespertide.json", text).unwrap(); + } + + fn write_model(path: &Path, table: &TableDef) { + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).unwrap(); + } + fs::write(path, serde_json::to_string_pretty(table).unwrap()).unwrap(); + } + + fn sample_table(name: &str) -> TableDef { + TableDef { + name: name.to_string(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::PrimaryKey { + auto_increment: false, + columns: vec!["id".into()], + }], + } + } + + #[test] + #[serial] + fn export_writes_seaorm_files_to_default_dir() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + write_config(); + + let model = sample_table("users"); + write_model(Path::new("models/users.json"), &model); + + cmd_export(OrmArg::Seaorm, None).unwrap(); + + let out = PathBuf::from("src/models/users.rs"); + assert!(out.exists()); + let content = fs::read_to_string(out).unwrap(); + assert!(content.contains("#[sea_orm(table_name = \"users\")]")); + + // mod.rs wiring at root + let root_mod = PathBuf::from("src/models/mod.rs"); + assert!(root_mod.exists()); + let root_mod_content = fs::read_to_string(root_mod).unwrap(); + assert!(root_mod_content.contains("pub mod users;")); + } + + #[test] + #[serial] + fn export_respects_custom_output_dir() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + write_config(); + + let model = sample_table("posts"); + write_model(Path::new("models/blog/posts.json"), &model); + + let custom = PathBuf::from("out_dir"); + cmd_export(OrmArg::Seaorm, Some(custom.clone())).unwrap(); + + let out = custom.join("blog/posts.rs"); + assert!(out.exists()); + let content = fs::read_to_string(out).unwrap(); + assert!(content.contains("#[sea_orm(table_name = \"posts\")]")); + + // mod.rs wiring + let root_mod = custom.join("mod.rs"); + let blog_mod = custom.join("blog/mod.rs"); + assert!(root_mod.exists()); + assert!(blog_mod.exists()); + let root_mod_content = fs::read_to_string(root_mod).unwrap(); + let blog_mod_content = fs::read_to_string(blog_mod).unwrap(); + assert!(root_mod_content.contains("pub mod blog;")); + assert!(blog_mod_content.contains("pub mod posts;")); + } + + #[test] + #[serial] + fn export_with_sqlalchemy_sets_py_extension() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + write_config(); + + let model = sample_table("items"); + write_model(Path::new("models/items.json"), &model); + + cmd_export(OrmArg::Sqlalchemy, None).unwrap(); + + let out = PathBuf::from("src/models/items.py"); + assert!(out.exists()); + let content = fs::read_to_string(out).unwrap(); + assert!(content.contains("items")); + } + + #[test] + #[serial] + fn export_with_sqlmodel_sets_py_extension() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + write_config(); + + let model = sample_table("orders"); + write_model(Path::new("models/orders.json"), &model); + + cmd_export(OrmArg::Sqlmodel, None).unwrap(); + + let out = PathBuf::from("src/models/orders.py"); + assert!(out.exists()); + let content = fs::read_to_string(out).unwrap(); + assert!(content.contains("orders")); + } + + #[test] + #[serial] + fn load_models_recursive_returns_empty_when_absent() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + let models = load_models_recursive(Path::new("no_models")).unwrap(); + assert!(models.is_empty()); + } + + #[test] + #[serial] + fn load_models_recursive_ignores_non_model_files() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + write_config(); + + fs::create_dir_all("models").unwrap(); + fs::write("models/ignore.txt", "hello").unwrap(); + write_model(Path::new("models/valid.json"), &sample_table("valid")); + + let models = load_models_recursive(Path::new("models")).unwrap(); + assert_eq!(models.len(), 1); + assert_eq!(models[0].0.name, "valid"); + } + + #[test] + #[serial] + fn load_models_recursive_parses_yaml_branch() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + write_config(); + + fs::create_dir_all("models").unwrap(); + let table = sample_table("yaml_table"); + let yaml = serde_yaml::to_string(&table).unwrap(); + fs::write("models/yaml_table.yaml", yaml).unwrap(); + + let models = load_models_recursive(Path::new("models")).unwrap(); + assert_eq!(models.len(), 1); + assert_eq!(models[0].0.name, "yaml_table"); + } + + #[test] + #[serial] + fn ensure_mod_chain_adds_to_existing_file_without_trailing_newline() { + let tmp = tempdir().unwrap(); + let root = tmp.path().join("src/models"); + fs::create_dir_all(&root).unwrap(); + fs::write(root.join("mod.rs"), "pub mod existing;").unwrap(); + + ensure_mod_chain(&root, Path::new("blog/posts.rs")).unwrap(); + + let root_mod = fs::read_to_string(root.join("mod.rs")).unwrap(); + let blog_mod = fs::read_to_string(root.join("blog/mod.rs")).unwrap(); + assert!(root_mod.contains("pub mod existing;")); + assert!(root_mod.contains("pub mod blog;")); + assert!(blog_mod.contains("pub mod posts;")); + // ensure newline appended if missing + assert!(root_mod.ends_with('\n')); + } + + #[test] + fn ensure_mod_chain_no_components_is_noop() { + let tmp = tempdir().unwrap(); + let root = tmp.path().join("src/models"); + fs::create_dir_all(&root).unwrap(); + // empty path should not error + assert!(ensure_mod_chain(&root, Path::new("")).is_ok()); + } + + #[test] + #[serial] + fn resolve_export_dir_prefers_override() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + write_config(); + let cfg = VespertideConfig::default(); + let override_dir = PathBuf::from("custom_out"); + let resolved = super::resolve_export_dir(Some(override_dir.clone()), &cfg); + assert_eq!(resolved, override_dir); + } + + #[rstest] + #[case(OrmArg::Seaorm, Orm::SeaOrm)] + #[case(OrmArg::Sqlalchemy, Orm::SqlAlchemy)] + #[case(OrmArg::Sqlmodel, Orm::SqlModel)] + fn orm_arg_maps_to_enum(#[case] arg: OrmArg, #[case] expected: Orm) { + assert_eq!(Orm::from(arg), expected); + } + + #[rstest] + #[case("normal_name", "normal_name")] + #[case("user copy", "user_copy")] + #[case("user copy", "user__copy")] + #[case("user-copy", "user-copy")] + #[case("user.copy", "user_copy")] + #[case("user copy.json", "user_copy_json")] + fn test_sanitize_filename(#[case] input: &str, #[case] expected: &str) { + assert_eq!(sanitize_filename(input), expected); + } + + #[test] + fn build_output_path_sanitizes_spaces() { + use std::path::Path; + let root = Path::new("src/models"); + let rel_path = Path::new("user copy.json"); + let out = build_output_path(root, rel_path, Orm::SeaOrm); + assert_eq!(out, Path::new("src/models/user_copy.rs")); + + let rel_path2 = Path::new("blog/post name.yaml"); + let out2 = build_output_path(root, rel_path2, Orm::SeaOrm); + assert_eq!(out2, Path::new("src/models/blog/post_name.rs")); + } + + #[test] + fn build_output_path_handles_file_without_extension() { + use std::path::Path; + let root = Path::new("src/models"); + // File without extension - covers line 88 (else branch) + let rel_path = Path::new("users"); + let out = build_output_path(root, rel_path, Orm::SeaOrm); + assert_eq!(out, Path::new("src/models/users.rs")); + + let out_py = build_output_path(root, rel_path, Orm::SqlAlchemy); + assert_eq!(out_py, Path::new("src/models/users.py")); + } + + #[test] + fn build_output_path_handles_special_path_components() { + use std::path::Path; + let root = Path::new("src/models"); + // Path with CurDir component (.) - covers line 78 (non-Normal component branch) + let rel_path = Path::new("./blog/posts.json"); + let out = build_output_path(root, rel_path, Orm::SeaOrm); + // The . component gets pushed via the else branch + assert!(out.to_string_lossy().contains("posts")); + + // Path with ParentDir component (..) + let rel_path2 = Path::new("../other/items.yaml"); + let out2 = build_output_path(root, rel_path2, Orm::SeaOrm); + assert!(out2.to_string_lossy().contains("items")); + } +} diff --git a/crates/vespertide-cli/src/commands/new.rs b/crates/vespertide-cli/src/commands/new.rs index b829570..440bc58 100644 --- a/crates/vespertide-cli/src/commands/new.rs +++ b/crates/vespertide-cli/src/commands/new.rs @@ -30,6 +30,7 @@ pub fn cmd_new(name: String, format: Option) -> Result<()> { let table = TableDef { name: name.clone(), + description: None, columns: Vec::new(), constraints: Vec::new(), }; diff --git a/crates/vespertide-cli/src/commands/revision.rs b/crates/vespertide-cli/src/commands/revision.rs index 95d5c9c..6d175b0 100644 --- a/crates/vespertide-cli/src/commands/revision.rs +++ b/crates/vespertide-cli/src/commands/revision.rs @@ -1,1099 +1,1100 @@ -use std::collections::HashMap; -use std::fs; - -use anyhow::{Context, Result}; -use chrono::Utc; -use colored::Colorize; -use dialoguer::Input; -use serde_json::Value; -use vespertide_config::FileFormat; -use vespertide_core::{MigrationAction, MigrationPlan}; -use vespertide_planner::{find_missing_fill_with, plan_next_migration}; - -use crate::utils::{ - load_config, load_migrations, load_models, migration_filename_with_format_and_pattern, -}; - -/// Parse fill_with arguments from CLI. -/// Format: table.column=value -fn parse_fill_with_args(args: &[String]) -> HashMap<(String, String), String> { - let mut map = HashMap::new(); - for arg in args { - if let Some((key, value)) = arg.split_once('=') - && let Some((table, column)) = key.split_once('.') - { - map.insert((table.to_string(), column.to_string()), value.to_string()); - } - } - map -} - -/// Format the type info string for display. -fn format_type_info(column_type: Option<&String>) -> String { - column_type.map(|t| format!(" ({})", t)).unwrap_or_default() -} - -/// Format a single fill_with item for display. -fn format_fill_with_item(table: &str, column: &str, type_info: &str, action_type: &str) -> String { - format!( - " {} {}.{}{}\n {} {}", - "•".bright_cyan(), - table.bright_white(), - column.bright_green(), - type_info.bright_black(), - "Action:".bright_black(), - action_type.bright_magenta() - ) -} - -/// Format the prompt string for interactive input. -fn format_fill_with_prompt(table: &str, column: &str) -> String { - format!( - " Enter fill value for {}.{}", - table.bright_white(), - column.bright_green() - ) -} - -/// Print the header for fill_with prompts. -fn print_fill_with_header() { - println!( - "\n{} {}", - "⚠".bright_yellow(), - "The following columns require fill_with values:".bright_yellow() - ); - println!("{}", "─".repeat(60).bright_black()); -} - -/// Print the footer for fill_with prompts. -fn print_fill_with_footer() { - println!("{}", "─".repeat(60).bright_black()); -} - -/// Print a fill_with item and return the formatted prompt. -fn print_fill_with_item_and_get_prompt( - table: &str, - column: &str, - column_type: Option<&String>, - action_type: &str, -) -> String { - let type_info = format_type_info(column_type); - let item_display = format_fill_with_item(table, column, &type_info, action_type); - println!("{}", item_display); - format_fill_with_prompt(table, column) -} - -/// Prompt the user for a fill_with value using dialoguer. -/// This function wraps terminal I/O and cannot be unit tested without a real terminal. -#[cfg(not(tarpaulin_include))] -fn prompt_fill_with_value(prompt: &str) -> Result { - Input::new() - .with_prompt(prompt) - .interact_text() - .context("failed to read input") -} - -/// Collect fill_with values interactively for missing columns. -/// The `prompt_fn` parameter allows injecting a mock for testing. -fn collect_fill_with_values( - missing: &[vespertide_planner::FillWithRequired], - fill_values: &mut HashMap<(String, String), String>, - prompt_fn: F, -) -> Result<()> -where - F: Fn(&str) -> Result, -{ - print_fill_with_header(); - - for item in missing { - let prompt = print_fill_with_item_and_get_prompt( - &item.table, - &item.column, - item.column_type.as_ref(), - item.action_type, - ); - - let value = prompt_fn(&prompt)?; - fill_values.insert((item.table.clone(), item.column.clone()), value); - } - - print_fill_with_footer(); - Ok(()) -} - -/// Apply fill_with values to a migration plan. -fn apply_fill_with_to_plan( - plan: &mut MigrationPlan, - fill_values: &HashMap<(String, String), String>, -) { - for action in &mut plan.actions { - match action { - MigrationAction::AddColumn { - table, - column, - fill_with, - } => { - if fill_with.is_none() - && let Some(value) = fill_values.get(&(table.clone(), column.name.clone())) - { - *fill_with = Some(value.clone()); - } - } - MigrationAction::ModifyColumnNullable { - table, - column, - fill_with, - .. - } => { - if fill_with.is_none() - && let Some(value) = fill_values.get(&(table.clone(), column.clone())) - { - *fill_with = Some(value.clone()); - } - } - _ => {} - } - } -} - -/// Handle interactive fill_with collection if there are missing values. -/// Returns the updated fill_values map after collecting from user. -fn handle_missing_fill_with( - plan: &mut MigrationPlan, - fill_values: &mut HashMap<(String, String), String>, - prompt_fn: F, -) -> Result<()> -where - F: Fn(&str) -> Result, -{ - let missing = find_missing_fill_with(plan); - - if !missing.is_empty() { - collect_fill_with_values(&missing, fill_values, prompt_fn)?; - - // Apply the collected fill_with values - apply_fill_with_to_plan(plan, fill_values); - } - - Ok(()) -} - -pub fn cmd_revision(message: String, fill_with_args: Vec) -> Result<()> { - let config = load_config()?; - let current_models = load_models(&config)?; - let applied_plans = load_migrations(&config)?; - - let mut plan = plan_next_migration(¤t_models, &applied_plans) - .map_err(|e| anyhow::anyhow!("planning error: {}", e))?; - - if plan.actions.is_empty() { - println!( - "{} {}", - "No changes detected.".bright_yellow(), - "Nothing to migrate.".bright_white() - ); - return Ok(()); - } - - // Parse CLI fill_with arguments - let mut fill_values = parse_fill_with_args(&fill_with_args); - - // Apply any CLI-provided fill_with values first - apply_fill_with_to_plan(&mut plan, &fill_values); - - // Handle any missing fill_with values interactively - handle_missing_fill_with(&mut plan, &mut fill_values, prompt_fill_with_value)?; - - plan.comment = Some(message); - if plan.created_at.is_none() { - // Record creation time in RFC3339 (UTC). - plan.created_at = Some(Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true)); - } - - let migrations_dir = config.migrations_dir(); - if !migrations_dir.exists() { - fs::create_dir_all(migrations_dir).context("create migrations directory")?; - } - - let format = config.migration_format(); - let filename = migration_filename_with_format_and_pattern( - plan.version, - plan.comment.as_deref(), - format, - config.migration_filename_pattern(), - ); - let path = migrations_dir.join(&filename); - - let schema_url = schema_url_for(format); - match format { - FileFormat::Json => write_json_with_schema(&path, &plan, &schema_url)?, - FileFormat::Yaml | FileFormat::Yml => write_yaml(&path, &plan, &schema_url)?, - } - - println!( - "{} {}", - "Created migration:".bright_green().bold(), - format!("{}", path.display()).bright_white() - ); - println!( - " {} {}", - "Version:".bright_cyan(), - plan.version.to_string().bright_magenta().bold() - ); - println!( - " {} {}", - "Actions:".bright_cyan(), - plan.actions.len().to_string().bright_yellow() - ); - if let Some(comment) = &plan.comment { - println!(" {} {}", "Comment:".bright_cyan(), comment.bright_white()); - } - - Ok(()) -} - -fn schema_url_for(format: FileFormat) -> String { - // If not set, default to public raw GitHub schema location. - // Users can override via VESP_SCHEMA_BASE_URL. - let base = std::env::var("VESP_SCHEMA_BASE_URL").ok(); - let base = base.as_deref().unwrap_or( - "https://raw.githubusercontent.com/dev-five-git/vespertide/refs/heads/main/schemas", - ); - let base = base.trim_end_matches('/'); - match format { - FileFormat::Json => format!("{}/migration.schema.json", base), - FileFormat::Yaml | FileFormat::Yml => format!("{}/migration.schema.json", base), - } -} - -fn write_json_with_schema( - path: &std::path::Path, - plan: &MigrationPlan, - schema_url: &str, -) -> Result<()> { - let mut value = serde_json::to_value(plan).context("serialize migration plan to json")?; - if let Value::Object(ref mut map) = value { - map.insert("$schema".to_string(), Value::String(schema_url.to_string())); - } - let text = serde_json::to_string_pretty(&value).context("stringify json with schema")?; - fs::write(path, text).with_context(|| format!("write file: {}", path.display()))?; - Ok(()) -} - -fn write_yaml(path: &std::path::Path, plan: &MigrationPlan, schema_url: &str) -> Result<()> { - let mut value = serde_yaml::to_value(plan).context("serialize migration plan to yaml value")?; - if let serde_yaml::Value::Mapping(ref mut map) = value { - map.insert( - serde_yaml::Value::String("$schema".to_string()), - serde_yaml::Value::String(schema_url.to_string()), - ); - } - let text = serde_yaml::to_string(&value).context("serialize yaml with schema")?; - fs::write(path, text).with_context(|| format!("write file: {}", path.display()))?; - Ok(()) -} - -#[cfg(test)] -mod tests { - use super::*; - use std::{env, fs, path::PathBuf}; - use tempfile::tempdir; - use vespertide_config::{FileFormat, VespertideConfig}; - use vespertide_core::{ColumnDef, ColumnType, SimpleColumnType, TableConstraint, TableDef}; - - struct CwdGuard { - original: PathBuf, - } - - impl CwdGuard { - fn new(dir: &PathBuf) -> Self { - let original = env::current_dir().unwrap(); - env::set_current_dir(dir).unwrap(); - Self { original } - } - } - - impl Drop for CwdGuard { - fn drop(&mut self) { - let _ = env::set_current_dir(&self.original); - } - } - - fn write_config() -> VespertideConfig { - write_config_with_format(None) - } - - fn write_config_with_format(fmt: Option) -> VespertideConfig { - let mut cfg = VespertideConfig::default(); - if let Some(f) = fmt { - cfg.migration_format = f; - } - let text = serde_json::to_string_pretty(&cfg).unwrap(); - fs::write("vespertide.json", text).unwrap(); - cfg - } - - fn write_model(name: &str) { - let models_dir = PathBuf::from("models"); - fs::create_dir_all(&models_dir).unwrap(); - let table = TableDef { - name: name.to_string(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::PrimaryKey { - auto_increment: false, - columns: vec!["id".into()], - }], - }; - let path = models_dir.join(format!("{name}.json")); - fs::write(path, serde_json::to_string_pretty(&table).unwrap()).unwrap(); - } - - #[test] - #[serial_test::serial] - fn cmd_revision_writes_migration() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - - let cfg = write_config(); - write_model("users"); - fs::create_dir_all(cfg.migrations_dir()).unwrap(); - - cmd_revision("init".into(), vec![]).unwrap(); - - let entries: Vec<_> = fs::read_dir(cfg.migrations_dir()).unwrap().collect(); - assert!(!entries.is_empty()); - } - - #[test] - #[serial_test::serial] - fn cmd_revision_no_changes_short_circuits() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - - let cfg = write_config(); - // no models, no migrations -> plan with no actions -> early return - assert!(cmd_revision("noop".into(), vec![]).is_ok()); - // migrations dir should not be created - assert!(!cfg.migrations_dir().exists()); - } - - #[test] - #[serial_test::serial] - fn cmd_revision_writes_yaml_when_configured() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - - let cfg = write_config_with_format(Some(FileFormat::Yaml)); - write_model("users"); - // ensure migrations dir absent to exercise create_dir_all branch - if cfg.migrations_dir().exists() { - fs::remove_dir_all(cfg.migrations_dir()).unwrap(); - } - - cmd_revision("yaml".into(), vec![]).unwrap(); - - let entries: Vec<_> = fs::read_dir(cfg.migrations_dir()).unwrap().collect(); - assert!(!entries.is_empty()); - let has_yaml = entries.iter().any(|e| { - e.as_ref() - .unwrap() - .path() - .extension() - .map(|s| s == "yaml") - .unwrap_or(false) - }); - assert!(has_yaml); - } - - #[test] - fn test_parse_fill_with_args() { - let args = vec![ - "users.email=default@example.com".to_string(), - "orders.status=pending".to_string(), - ]; - let result = parse_fill_with_args(&args); - - assert_eq!(result.len(), 2); - assert_eq!( - result.get(&("users".to_string(), "email".to_string())), - Some(&"default@example.com".to_string()) - ); - assert_eq!( - result.get(&("orders".to_string(), "status".to_string())), - Some(&"pending".to_string()) - ); - } - - #[test] - fn test_parse_fill_with_args_invalid_format() { - let args = vec![ - "invalid_format".to_string(), - "no_equals_sign".to_string(), - "users.email=valid".to_string(), - ]; - let result = parse_fill_with_args(&args); - - // Only the valid one should be parsed - assert_eq!(result.len(), 1); - assert_eq!( - result.get(&("users".to_string(), "email".to_string())), - Some(&"valid".to_string()) - ); - } - - #[test] - fn test_apply_fill_with_to_plan_add_column() { - use vespertide_core::MigrationPlan; - - let mut plan = MigrationPlan { - comment: None, - created_at: None, - version: 1, - actions: vec![MigrationAction::AddColumn { - table: "users".into(), - column: Box::new(ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }), - fill_with: None, - }], - }; - - let mut fill_values = HashMap::new(); - fill_values.insert( - ("users".to_string(), "email".to_string()), - "'default@example.com'".to_string(), - ); - - apply_fill_with_to_plan(&mut plan, &fill_values); - - match &plan.actions[0] { - MigrationAction::AddColumn { fill_with, .. } => { - assert_eq!(fill_with, &Some("'default@example.com'".to_string())); - } - _ => panic!("Expected AddColumn action"), - } - } - - #[test] - fn test_apply_fill_with_to_plan_modify_column_nullable() { - use vespertide_core::MigrationPlan; - - let mut plan = MigrationPlan { - comment: None, - created_at: None, - version: 1, - actions: vec![MigrationAction::ModifyColumnNullable { - table: "users".into(), - column: "status".into(), - nullable: false, - fill_with: None, - }], - }; - - let mut fill_values = HashMap::new(); - fill_values.insert( - ("users".to_string(), "status".to_string()), - "'active'".to_string(), - ); - - apply_fill_with_to_plan(&mut plan, &fill_values); - - match &plan.actions[0] { - MigrationAction::ModifyColumnNullable { fill_with, .. } => { - assert_eq!(fill_with, &Some("'active'".to_string())); - } - _ => panic!("Expected ModifyColumnNullable action"), - } - } - - #[test] - fn test_apply_fill_with_to_plan_skips_existing_fill_with() { - use vespertide_core::MigrationPlan; - - let mut plan = MigrationPlan { - comment: None, - created_at: None, - version: 1, - actions: vec![MigrationAction::AddColumn { - table: "users".into(), - column: Box::new(ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }), - fill_with: Some("'existing@example.com'".to_string()), - }], - }; - - let mut fill_values = HashMap::new(); - fill_values.insert( - ("users".to_string(), "email".to_string()), - "'new@example.com'".to_string(), - ); - - apply_fill_with_to_plan(&mut plan, &fill_values); - - // Should keep existing value, not replace with new - match &plan.actions[0] { - MigrationAction::AddColumn { fill_with, .. } => { - assert_eq!(fill_with, &Some("'existing@example.com'".to_string())); - } - _ => panic!("Expected AddColumn action"), - } - } - - #[test] - fn test_apply_fill_with_to_plan_no_match() { - use vespertide_core::MigrationPlan; - - let mut plan = MigrationPlan { - comment: None, - created_at: None, - version: 1, - actions: vec![MigrationAction::AddColumn { - table: "users".into(), - column: Box::new(ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }), - fill_with: None, - }], - }; - - let mut fill_values = HashMap::new(); - fill_values.insert( - ("orders".to_string(), "status".to_string()), - "'pending'".to_string(), - ); - - apply_fill_with_to_plan(&mut plan, &fill_values); - - // Should remain None since no match - match &plan.actions[0] { - MigrationAction::AddColumn { fill_with, .. } => { - assert_eq!(fill_with, &None); - } - _ => panic!("Expected AddColumn action"), - } - } - - #[test] - fn test_apply_fill_with_to_plan_multiple_actions() { - use vespertide_core::MigrationPlan; - - let mut plan = MigrationPlan { - comment: None, - created_at: None, - version: 1, - actions: vec![ - MigrationAction::AddColumn { - table: "users".into(), - column: Box::new(ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }), - fill_with: None, - }, - MigrationAction::ModifyColumnNullable { - table: "orders".into(), - column: "status".into(), - nullable: false, - fill_with: None, - }, - ], - }; - - let mut fill_values = HashMap::new(); - fill_values.insert( - ("users".to_string(), "email".to_string()), - "'user@example.com'".to_string(), - ); - fill_values.insert( - ("orders".to_string(), "status".to_string()), - "'pending'".to_string(), - ); - - apply_fill_with_to_plan(&mut plan, &fill_values); - - match &plan.actions[0] { - MigrationAction::AddColumn { fill_with, .. } => { - assert_eq!(fill_with, &Some("'user@example.com'".to_string())); - } - _ => panic!("Expected AddColumn action"), - } - - match &plan.actions[1] { - MigrationAction::ModifyColumnNullable { fill_with, .. } => { - assert_eq!(fill_with, &Some("'pending'".to_string())); - } - _ => panic!("Expected ModifyColumnNullable action"), - } - } - - #[test] - fn test_apply_fill_with_to_plan_other_actions_ignored() { - use vespertide_core::MigrationPlan; - - let mut plan = MigrationPlan { - comment: None, - created_at: None, - version: 1, - actions: vec![MigrationAction::DeleteColumn { - table: "users".into(), - column: "old_column".into(), - }], - }; - - let mut fill_values = HashMap::new(); - fill_values.insert( - ("users".to_string(), "old_column".to_string()), - "'value'".to_string(), - ); - - // Should not panic or modify anything - apply_fill_with_to_plan(&mut plan, &fill_values); - - match &plan.actions[0] { - MigrationAction::DeleteColumn { table, column } => { - assert_eq!(table, "users"); - assert_eq!(column, "old_column"); - } - _ => panic!("Expected DeleteColumn action"), - } - } - - #[test] - fn test_format_type_info_with_some() { - let column_type = Some("Integer".to_string()); - let result = format_type_info(column_type.as_ref()); - assert_eq!(result, " (Integer)"); - } - - #[test] - fn test_format_type_info_with_none() { - let result = format_type_info(None); - assert_eq!(result, ""); - } - - #[test] - fn test_format_fill_with_item() { - let result = format_fill_with_item("users", "email", " (Text)", "AddColumn"); - // The result should contain the table, column, type info, and action type - // Colors make exact matching difficult, but we can check structure - assert!(result.contains("users")); - assert!(result.contains("email")); - assert!(result.contains("(Text)")); - assert!(result.contains("AddColumn")); - assert!(result.contains("Action:")); - } - - #[test] - fn test_format_fill_with_item_empty_type_info() { - let result = format_fill_with_item("orders", "status", "", "ModifyColumnNullable"); - assert!(result.contains("orders")); - assert!(result.contains("status")); - assert!(result.contains("ModifyColumnNullable")); - } - - #[test] - fn test_format_fill_with_prompt() { - let result = format_fill_with_prompt("users", "email"); - assert!(result.contains("Enter fill value for")); - assert!(result.contains("users")); - assert!(result.contains("email")); - } - - #[test] - fn test_print_fill_with_item_and_get_prompt() { - // This function prints to stdout and returns the prompt string - let prompt = print_fill_with_item_and_get_prompt( - "users", - "email", - Some(&"Text".to_string()), - "AddColumn", - ); - assert!(prompt.contains("Enter fill value for")); - assert!(prompt.contains("users")); - assert!(prompt.contains("email")); - } - - #[test] - fn test_print_fill_with_item_and_get_prompt_no_type() { - let prompt = - print_fill_with_item_and_get_prompt("orders", "status", None, "ModifyColumnNullable"); - assert!(prompt.contains("Enter fill value for")); - assert!(prompt.contains("orders")); - assert!(prompt.contains("status")); - } - - #[test] - fn test_print_fill_with_header() { - // Just verify it doesn't panic - output goes to stdout - print_fill_with_header(); - } - - #[test] - fn test_print_fill_with_footer() { - // Just verify it doesn't panic - output goes to stdout - print_fill_with_footer(); - } - - #[test] - fn test_collect_fill_with_values_single_item() { - use vespertide_planner::FillWithRequired; - - let missing = vec![FillWithRequired { - action_index: 0, - table: "users".to_string(), - column: "email".to_string(), - action_type: "AddColumn", - column_type: Some("Text".to_string()), - }]; - - let mut fill_values = HashMap::new(); - - // Mock prompt function that returns a fixed value - let mock_prompt = - |_prompt: &str| -> Result { Ok("'test@example.com'".to_string()) }; - - let result = collect_fill_with_values(&missing, &mut fill_values, mock_prompt); - assert!(result.is_ok()); - assert_eq!(fill_values.len(), 1); - assert_eq!( - fill_values.get(&("users".to_string(), "email".to_string())), - Some(&"'test@example.com'".to_string()) - ); - } - - #[test] - fn test_collect_fill_with_values_multiple_items() { - use vespertide_planner::FillWithRequired; - - let missing = vec![ - FillWithRequired { - action_index: 0, - table: "users".to_string(), - column: "email".to_string(), - action_type: "AddColumn", - column_type: Some("Text".to_string()), - }, - FillWithRequired { - action_index: 1, - table: "orders".to_string(), - column: "status".to_string(), - action_type: "ModifyColumnNullable", - column_type: None, - }, - ]; - - let mut fill_values = HashMap::new(); - - // Mock prompt function that returns different values based on call count - let call_count = std::cell::RefCell::new(0); - let mock_prompt = |_prompt: &str| -> Result { - let mut count = call_count.borrow_mut(); - *count += 1; - match *count { - 1 => Ok("'user@example.com'".to_string()), - 2 => Ok("'pending'".to_string()), - _ => Ok("'default'".to_string()), - } - }; - - let result = collect_fill_with_values(&missing, &mut fill_values, mock_prompt); - assert!(result.is_ok()); - assert_eq!(fill_values.len(), 2); - assert_eq!( - fill_values.get(&("users".to_string(), "email".to_string())), - Some(&"'user@example.com'".to_string()) - ); - assert_eq!( - fill_values.get(&("orders".to_string(), "status".to_string())), - Some(&"'pending'".to_string()) - ); - } - - #[test] - fn test_collect_fill_with_values_empty() { - let missing: Vec = vec![]; - let mut fill_values = HashMap::new(); - - // This function should handle empty list gracefully (though it won't be called in practice) - // But we can't test the header/footer without items since the function still prints them - // So we test with a mock that would fail if called - let mock_prompt = |_prompt: &str| -> Result { - panic!("Should not be called for empty list"); - }; - - // Note: The function still prints header/footer even for empty list - // This is a design choice - in practice, cmd_revision won't call this with empty list - let result = collect_fill_with_values(&missing, &mut fill_values, mock_prompt); - assert!(result.is_ok()); - assert!(fill_values.is_empty()); - } - - #[test] - fn test_collect_fill_with_values_prompt_error() { - use vespertide_planner::FillWithRequired; - - let missing = vec![FillWithRequired { - action_index: 0, - table: "users".to_string(), - column: "email".to_string(), - action_type: "AddColumn", - column_type: Some("Text".to_string()), - }]; - - let mut fill_values = HashMap::new(); - - // Mock prompt function that returns an error - let mock_prompt = - |_prompt: &str| -> Result { Err(anyhow::anyhow!("input cancelled")) }; - - let result = collect_fill_with_values(&missing, &mut fill_values, mock_prompt); - assert!(result.is_err()); - assert!(fill_values.is_empty()); - } - - #[test] - fn test_prompt_fill_with_value_function_exists() { - // This test verifies that prompt_fill_with_value has the correct signature. - // We cannot actually call it in tests because dialoguer::Input blocks waiting for terminal input. - // The function is excluded from coverage with #[cfg_attr(coverage_nightly, coverage(off))]. - let _: fn(&str) -> Result = prompt_fill_with_value; - } - - #[test] - fn test_handle_missing_fill_with_collects_and_applies() { - use vespertide_core::MigrationPlan; - - let mut plan = MigrationPlan { - comment: None, - created_at: None, - version: 1, - actions: vec![MigrationAction::AddColumn { - table: "users".into(), - column: Box::new(ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }), - fill_with: None, - }], - }; - - let mut fill_values = HashMap::new(); - - // Mock prompt function - let mock_prompt = - |_prompt: &str| -> Result { Ok("'test@example.com'".to_string()) }; - - let result = handle_missing_fill_with(&mut plan, &mut fill_values, mock_prompt); - assert!(result.is_ok()); - - // Verify fill_with was applied to the plan - match &plan.actions[0] { - MigrationAction::AddColumn { fill_with, .. } => { - assert_eq!(fill_with, &Some("'test@example.com'".to_string())); - } - _ => panic!("Expected AddColumn action"), - } - - // Verify fill_values map was updated - assert_eq!( - fill_values.get(&("users".to_string(), "email".to_string())), - Some(&"'test@example.com'".to_string()) - ); - } - - #[test] - fn test_handle_missing_fill_with_no_missing() { - use vespertide_core::MigrationPlan; - - // Plan with no missing fill_with values (nullable column) - let mut plan = MigrationPlan { - comment: None, - created_at: None, - version: 1, - actions: vec![MigrationAction::AddColumn { - table: "users".into(), - column: Box::new(ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, // nullable, so no fill_with required - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }), - fill_with: None, - }], - }; - - let mut fill_values = HashMap::new(); - - // Mock prompt that should never be called - let mock_prompt = |_prompt: &str| -> Result { - panic!("Should not be called when no missing fill_with values"); - }; - - let result = handle_missing_fill_with(&mut plan, &mut fill_values, mock_prompt); - assert!(result.is_ok()); - assert!(fill_values.is_empty()); - } - - #[test] - fn test_handle_missing_fill_with_prompt_error() { - use vespertide_core::MigrationPlan; - - let mut plan = MigrationPlan { - comment: None, - created_at: None, - version: 1, - actions: vec![MigrationAction::AddColumn { - table: "users".into(), - column: Box::new(ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }), - fill_with: None, - }], - }; - - let mut fill_values = HashMap::new(); - - // Mock prompt that returns an error - let mock_prompt = - |_prompt: &str| -> Result { Err(anyhow::anyhow!("user cancelled")) }; - - let result = handle_missing_fill_with(&mut plan, &mut fill_values, mock_prompt); - assert!(result.is_err()); - - // Plan should not be modified on error - match &plan.actions[0] { - MigrationAction::AddColumn { fill_with, .. } => { - assert_eq!(fill_with, &None); - } - _ => panic!("Expected AddColumn action"), - } - } - - #[test] - fn test_handle_missing_fill_with_multiple_columns() { - use vespertide_core::MigrationPlan; - - let mut plan = MigrationPlan { - comment: None, - created_at: None, - version: 1, - actions: vec![ - MigrationAction::AddColumn { - table: "users".into(), - column: Box::new(ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }), - fill_with: None, - }, - MigrationAction::ModifyColumnNullable { - table: "orders".into(), - column: "status".into(), - nullable: false, - fill_with: None, - }, - ], - }; - - let mut fill_values = HashMap::new(); - - // Mock prompt that returns different values based on call count - let call_count = std::cell::RefCell::new(0); - let mock_prompt = |_prompt: &str| -> Result { - let mut count = call_count.borrow_mut(); - *count += 1; - match *count { - 1 => Ok("'user@example.com'".to_string()), - 2 => Ok("'pending'".to_string()), - _ => Ok("'default'".to_string()), - } - }; - - let result = handle_missing_fill_with(&mut plan, &mut fill_values, mock_prompt); - assert!(result.is_ok()); - - // Verify both actions were updated - match &plan.actions[0] { - MigrationAction::AddColumn { fill_with, .. } => { - assert_eq!(fill_with, &Some("'user@example.com'".to_string())); - } - _ => panic!("Expected AddColumn action"), - } - - match &plan.actions[1] { - MigrationAction::ModifyColumnNullable { fill_with, .. } => { - assert_eq!(fill_with, &Some("'pending'".to_string())); - } - _ => panic!("Expected ModifyColumnNullable action"), - } - } -} +use std::collections::HashMap; +use std::fs; + +use anyhow::{Context, Result}; +use chrono::Utc; +use colored::Colorize; +use dialoguer::Input; +use serde_json::Value; +use vespertide_config::FileFormat; +use vespertide_core::{MigrationAction, MigrationPlan}; +use vespertide_planner::{find_missing_fill_with, plan_next_migration}; + +use crate::utils::{ + load_config, load_migrations, load_models, migration_filename_with_format_and_pattern, +}; + +/// Parse fill_with arguments from CLI. +/// Format: table.column=value +fn parse_fill_with_args(args: &[String]) -> HashMap<(String, String), String> { + let mut map = HashMap::new(); + for arg in args { + if let Some((key, value)) = arg.split_once('=') + && let Some((table, column)) = key.split_once('.') + { + map.insert((table.to_string(), column.to_string()), value.to_string()); + } + } + map +} + +/// Format the type info string for display. +fn format_type_info(column_type: Option<&String>) -> String { + column_type.map(|t| format!(" ({})", t)).unwrap_or_default() +} + +/// Format a single fill_with item for display. +fn format_fill_with_item(table: &str, column: &str, type_info: &str, action_type: &str) -> String { + format!( + " {} {}.{}{}\n {} {}", + "•".bright_cyan(), + table.bright_white(), + column.bright_green(), + type_info.bright_black(), + "Action:".bright_black(), + action_type.bright_magenta() + ) +} + +/// Format the prompt string for interactive input. +fn format_fill_with_prompt(table: &str, column: &str) -> String { + format!( + " Enter fill value for {}.{}", + table.bright_white(), + column.bright_green() + ) +} + +/// Print the header for fill_with prompts. +fn print_fill_with_header() { + println!( + "\n{} {}", + "⚠".bright_yellow(), + "The following columns require fill_with values:".bright_yellow() + ); + println!("{}", "─".repeat(60).bright_black()); +} + +/// Print the footer for fill_with prompts. +fn print_fill_with_footer() { + println!("{}", "─".repeat(60).bright_black()); +} + +/// Print a fill_with item and return the formatted prompt. +fn print_fill_with_item_and_get_prompt( + table: &str, + column: &str, + column_type: Option<&String>, + action_type: &str, +) -> String { + let type_info = format_type_info(column_type); + let item_display = format_fill_with_item(table, column, &type_info, action_type); + println!("{}", item_display); + format_fill_with_prompt(table, column) +} + +/// Prompt the user for a fill_with value using dialoguer. +/// This function wraps terminal I/O and cannot be unit tested without a real terminal. +#[cfg(not(tarpaulin_include))] +fn prompt_fill_with_value(prompt: &str) -> Result { + Input::new() + .with_prompt(prompt) + .interact_text() + .context("failed to read input") +} + +/// Collect fill_with values interactively for missing columns. +/// The `prompt_fn` parameter allows injecting a mock for testing. +fn collect_fill_with_values( + missing: &[vespertide_planner::FillWithRequired], + fill_values: &mut HashMap<(String, String), String>, + prompt_fn: F, +) -> Result<()> +where + F: Fn(&str) -> Result, +{ + print_fill_with_header(); + + for item in missing { + let prompt = print_fill_with_item_and_get_prompt( + &item.table, + &item.column, + item.column_type.as_ref(), + item.action_type, + ); + + let value = prompt_fn(&prompt)?; + fill_values.insert((item.table.clone(), item.column.clone()), value); + } + + print_fill_with_footer(); + Ok(()) +} + +/// Apply fill_with values to a migration plan. +fn apply_fill_with_to_plan( + plan: &mut MigrationPlan, + fill_values: &HashMap<(String, String), String>, +) { + for action in &mut plan.actions { + match action { + MigrationAction::AddColumn { + table, + column, + fill_with, + } => { + if fill_with.is_none() + && let Some(value) = fill_values.get(&(table.clone(), column.name.clone())) + { + *fill_with = Some(value.clone()); + } + } + MigrationAction::ModifyColumnNullable { + table, + column, + fill_with, + .. + } => { + if fill_with.is_none() + && let Some(value) = fill_values.get(&(table.clone(), column.clone())) + { + *fill_with = Some(value.clone()); + } + } + _ => {} + } + } +} + +/// Handle interactive fill_with collection if there are missing values. +/// Returns the updated fill_values map after collecting from user. +fn handle_missing_fill_with( + plan: &mut MigrationPlan, + fill_values: &mut HashMap<(String, String), String>, + prompt_fn: F, +) -> Result<()> +where + F: Fn(&str) -> Result, +{ + let missing = find_missing_fill_with(plan); + + if !missing.is_empty() { + collect_fill_with_values(&missing, fill_values, prompt_fn)?; + + // Apply the collected fill_with values + apply_fill_with_to_plan(plan, fill_values); + } + + Ok(()) +} + +pub fn cmd_revision(message: String, fill_with_args: Vec) -> Result<()> { + let config = load_config()?; + let current_models = load_models(&config)?; + let applied_plans = load_migrations(&config)?; + + let mut plan = plan_next_migration(¤t_models, &applied_plans) + .map_err(|e| anyhow::anyhow!("planning error: {}", e))?; + + if plan.actions.is_empty() { + println!( + "{} {}", + "No changes detected.".bright_yellow(), + "Nothing to migrate.".bright_white() + ); + return Ok(()); + } + + // Parse CLI fill_with arguments + let mut fill_values = parse_fill_with_args(&fill_with_args); + + // Apply any CLI-provided fill_with values first + apply_fill_with_to_plan(&mut plan, &fill_values); + + // Handle any missing fill_with values interactively + handle_missing_fill_with(&mut plan, &mut fill_values, prompt_fill_with_value)?; + + plan.comment = Some(message); + if plan.created_at.is_none() { + // Record creation time in RFC3339 (UTC). + plan.created_at = Some(Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true)); + } + + let migrations_dir = config.migrations_dir(); + if !migrations_dir.exists() { + fs::create_dir_all(migrations_dir).context("create migrations directory")?; + } + + let format = config.migration_format(); + let filename = migration_filename_with_format_and_pattern( + plan.version, + plan.comment.as_deref(), + format, + config.migration_filename_pattern(), + ); + let path = migrations_dir.join(&filename); + + let schema_url = schema_url_for(format); + match format { + FileFormat::Json => write_json_with_schema(&path, &plan, &schema_url)?, + FileFormat::Yaml | FileFormat::Yml => write_yaml(&path, &plan, &schema_url)?, + } + + println!( + "{} {}", + "Created migration:".bright_green().bold(), + format!("{}", path.display()).bright_white() + ); + println!( + " {} {}", + "Version:".bright_cyan(), + plan.version.to_string().bright_magenta().bold() + ); + println!( + " {} {}", + "Actions:".bright_cyan(), + plan.actions.len().to_string().bright_yellow() + ); + if let Some(comment) = &plan.comment { + println!(" {} {}", "Comment:".bright_cyan(), comment.bright_white()); + } + + Ok(()) +} + +fn schema_url_for(format: FileFormat) -> String { + // If not set, default to public raw GitHub schema location. + // Users can override via VESP_SCHEMA_BASE_URL. + let base = std::env::var("VESP_SCHEMA_BASE_URL").ok(); + let base = base.as_deref().unwrap_or( + "https://raw.githubusercontent.com/dev-five-git/vespertide/refs/heads/main/schemas", + ); + let base = base.trim_end_matches('/'); + match format { + FileFormat::Json => format!("{}/migration.schema.json", base), + FileFormat::Yaml | FileFormat::Yml => format!("{}/migration.schema.json", base), + } +} + +fn write_json_with_schema( + path: &std::path::Path, + plan: &MigrationPlan, + schema_url: &str, +) -> Result<()> { + let mut value = serde_json::to_value(plan).context("serialize migration plan to json")?; + if let Value::Object(ref mut map) = value { + map.insert("$schema".to_string(), Value::String(schema_url.to_string())); + } + let text = serde_json::to_string_pretty(&value).context("stringify json with schema")?; + fs::write(path, text).with_context(|| format!("write file: {}", path.display()))?; + Ok(()) +} + +fn write_yaml(path: &std::path::Path, plan: &MigrationPlan, schema_url: &str) -> Result<()> { + let mut value = serde_yaml::to_value(plan).context("serialize migration plan to yaml value")?; + if let serde_yaml::Value::Mapping(ref mut map) = value { + map.insert( + serde_yaml::Value::String("$schema".to_string()), + serde_yaml::Value::String(schema_url.to_string()), + ); + } + let text = serde_yaml::to_string(&value).context("serialize yaml with schema")?; + fs::write(path, text).with_context(|| format!("write file: {}", path.display()))?; + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::{env, fs, path::PathBuf}; + use tempfile::tempdir; + use vespertide_config::{FileFormat, VespertideConfig}; + use vespertide_core::{ColumnDef, ColumnType, SimpleColumnType, TableConstraint, TableDef}; + + struct CwdGuard { + original: PathBuf, + } + + impl CwdGuard { + fn new(dir: &PathBuf) -> Self { + let original = env::current_dir().unwrap(); + env::set_current_dir(dir).unwrap(); + Self { original } + } + } + + impl Drop for CwdGuard { + fn drop(&mut self) { + let _ = env::set_current_dir(&self.original); + } + } + + fn write_config() -> VespertideConfig { + write_config_with_format(None) + } + + fn write_config_with_format(fmt: Option) -> VespertideConfig { + let mut cfg = VespertideConfig::default(); + if let Some(f) = fmt { + cfg.migration_format = f; + } + let text = serde_json::to_string_pretty(&cfg).unwrap(); + fs::write("vespertide.json", text).unwrap(); + cfg + } + + fn write_model(name: &str) { + let models_dir = PathBuf::from("models"); + fs::create_dir_all(&models_dir).unwrap(); + let table = TableDef { + name: name.to_string(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::PrimaryKey { + auto_increment: false, + columns: vec!["id".into()], + }], + }; + let path = models_dir.join(format!("{name}.json")); + fs::write(path, serde_json::to_string_pretty(&table).unwrap()).unwrap(); + } + + #[test] + #[serial_test::serial] + fn cmd_revision_writes_migration() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + + let cfg = write_config(); + write_model("users"); + fs::create_dir_all(cfg.migrations_dir()).unwrap(); + + cmd_revision("init".into(), vec![]).unwrap(); + + let entries: Vec<_> = fs::read_dir(cfg.migrations_dir()).unwrap().collect(); + assert!(!entries.is_empty()); + } + + #[test] + #[serial_test::serial] + fn cmd_revision_no_changes_short_circuits() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + + let cfg = write_config(); + // no models, no migrations -> plan with no actions -> early return + assert!(cmd_revision("noop".into(), vec![]).is_ok()); + // migrations dir should not be created + assert!(!cfg.migrations_dir().exists()); + } + + #[test] + #[serial_test::serial] + fn cmd_revision_writes_yaml_when_configured() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + + let cfg = write_config_with_format(Some(FileFormat::Yaml)); + write_model("users"); + // ensure migrations dir absent to exercise create_dir_all branch + if cfg.migrations_dir().exists() { + fs::remove_dir_all(cfg.migrations_dir()).unwrap(); + } + + cmd_revision("yaml".into(), vec![]).unwrap(); + + let entries: Vec<_> = fs::read_dir(cfg.migrations_dir()).unwrap().collect(); + assert!(!entries.is_empty()); + let has_yaml = entries.iter().any(|e| { + e.as_ref() + .unwrap() + .path() + .extension() + .map(|s| s == "yaml") + .unwrap_or(false) + }); + assert!(has_yaml); + } + + #[test] + fn test_parse_fill_with_args() { + let args = vec![ + "users.email=default@example.com".to_string(), + "orders.status=pending".to_string(), + ]; + let result = parse_fill_with_args(&args); + + assert_eq!(result.len(), 2); + assert_eq!( + result.get(&("users".to_string(), "email".to_string())), + Some(&"default@example.com".to_string()) + ); + assert_eq!( + result.get(&("orders".to_string(), "status".to_string())), + Some(&"pending".to_string()) + ); + } + + #[test] + fn test_parse_fill_with_args_invalid_format() { + let args = vec![ + "invalid_format".to_string(), + "no_equals_sign".to_string(), + "users.email=valid".to_string(), + ]; + let result = parse_fill_with_args(&args); + + // Only the valid one should be parsed + assert_eq!(result.len(), 1); + assert_eq!( + result.get(&("users".to_string(), "email".to_string())), + Some(&"valid".to_string()) + ); + } + + #[test] + fn test_apply_fill_with_to_plan_add_column() { + use vespertide_core::MigrationPlan; + + let mut plan = MigrationPlan { + comment: None, + created_at: None, + version: 1, + actions: vec![MigrationAction::AddColumn { + table: "users".into(), + column: Box::new(ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }), + fill_with: None, + }], + }; + + let mut fill_values = HashMap::new(); + fill_values.insert( + ("users".to_string(), "email".to_string()), + "'default@example.com'".to_string(), + ); + + apply_fill_with_to_plan(&mut plan, &fill_values); + + match &plan.actions[0] { + MigrationAction::AddColumn { fill_with, .. } => { + assert_eq!(fill_with, &Some("'default@example.com'".to_string())); + } + _ => panic!("Expected AddColumn action"), + } + } + + #[test] + fn test_apply_fill_with_to_plan_modify_column_nullable() { + use vespertide_core::MigrationPlan; + + let mut plan = MigrationPlan { + comment: None, + created_at: None, + version: 1, + actions: vec![MigrationAction::ModifyColumnNullable { + table: "users".into(), + column: "status".into(), + nullable: false, + fill_with: None, + }], + }; + + let mut fill_values = HashMap::new(); + fill_values.insert( + ("users".to_string(), "status".to_string()), + "'active'".to_string(), + ); + + apply_fill_with_to_plan(&mut plan, &fill_values); + + match &plan.actions[0] { + MigrationAction::ModifyColumnNullable { fill_with, .. } => { + assert_eq!(fill_with, &Some("'active'".to_string())); + } + _ => panic!("Expected ModifyColumnNullable action"), + } + } + + #[test] + fn test_apply_fill_with_to_plan_skips_existing_fill_with() { + use vespertide_core::MigrationPlan; + + let mut plan = MigrationPlan { + comment: None, + created_at: None, + version: 1, + actions: vec![MigrationAction::AddColumn { + table: "users".into(), + column: Box::new(ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }), + fill_with: Some("'existing@example.com'".to_string()), + }], + }; + + let mut fill_values = HashMap::new(); + fill_values.insert( + ("users".to_string(), "email".to_string()), + "'new@example.com'".to_string(), + ); + + apply_fill_with_to_plan(&mut plan, &fill_values); + + // Should keep existing value, not replace with new + match &plan.actions[0] { + MigrationAction::AddColumn { fill_with, .. } => { + assert_eq!(fill_with, &Some("'existing@example.com'".to_string())); + } + _ => panic!("Expected AddColumn action"), + } + } + + #[test] + fn test_apply_fill_with_to_plan_no_match() { + use vespertide_core::MigrationPlan; + + let mut plan = MigrationPlan { + comment: None, + created_at: None, + version: 1, + actions: vec![MigrationAction::AddColumn { + table: "users".into(), + column: Box::new(ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }), + fill_with: None, + }], + }; + + let mut fill_values = HashMap::new(); + fill_values.insert( + ("orders".to_string(), "status".to_string()), + "'pending'".to_string(), + ); + + apply_fill_with_to_plan(&mut plan, &fill_values); + + // Should remain None since no match + match &plan.actions[0] { + MigrationAction::AddColumn { fill_with, .. } => { + assert_eq!(fill_with, &None); + } + _ => panic!("Expected AddColumn action"), + } + } + + #[test] + fn test_apply_fill_with_to_plan_multiple_actions() { + use vespertide_core::MigrationPlan; + + let mut plan = MigrationPlan { + comment: None, + created_at: None, + version: 1, + actions: vec![ + MigrationAction::AddColumn { + table: "users".into(), + column: Box::new(ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }), + fill_with: None, + }, + MigrationAction::ModifyColumnNullable { + table: "orders".into(), + column: "status".into(), + nullable: false, + fill_with: None, + }, + ], + }; + + let mut fill_values = HashMap::new(); + fill_values.insert( + ("users".to_string(), "email".to_string()), + "'user@example.com'".to_string(), + ); + fill_values.insert( + ("orders".to_string(), "status".to_string()), + "'pending'".to_string(), + ); + + apply_fill_with_to_plan(&mut plan, &fill_values); + + match &plan.actions[0] { + MigrationAction::AddColumn { fill_with, .. } => { + assert_eq!(fill_with, &Some("'user@example.com'".to_string())); + } + _ => panic!("Expected AddColumn action"), + } + + match &plan.actions[1] { + MigrationAction::ModifyColumnNullable { fill_with, .. } => { + assert_eq!(fill_with, &Some("'pending'".to_string())); + } + _ => panic!("Expected ModifyColumnNullable action"), + } + } + + #[test] + fn test_apply_fill_with_to_plan_other_actions_ignored() { + use vespertide_core::MigrationPlan; + + let mut plan = MigrationPlan { + comment: None, + created_at: None, + version: 1, + actions: vec![MigrationAction::DeleteColumn { + table: "users".into(), + column: "old_column".into(), + }], + }; + + let mut fill_values = HashMap::new(); + fill_values.insert( + ("users".to_string(), "old_column".to_string()), + "'value'".to_string(), + ); + + // Should not panic or modify anything + apply_fill_with_to_plan(&mut plan, &fill_values); + + match &plan.actions[0] { + MigrationAction::DeleteColumn { table, column } => { + assert_eq!(table, "users"); + assert_eq!(column, "old_column"); + } + _ => panic!("Expected DeleteColumn action"), + } + } + + #[test] + fn test_format_type_info_with_some() { + let column_type = Some("Integer".to_string()); + let result = format_type_info(column_type.as_ref()); + assert_eq!(result, " (Integer)"); + } + + #[test] + fn test_format_type_info_with_none() { + let result = format_type_info(None); + assert_eq!(result, ""); + } + + #[test] + fn test_format_fill_with_item() { + let result = format_fill_with_item("users", "email", " (Text)", "AddColumn"); + // The result should contain the table, column, type info, and action type + // Colors make exact matching difficult, but we can check structure + assert!(result.contains("users")); + assert!(result.contains("email")); + assert!(result.contains("(Text)")); + assert!(result.contains("AddColumn")); + assert!(result.contains("Action:")); + } + + #[test] + fn test_format_fill_with_item_empty_type_info() { + let result = format_fill_with_item("orders", "status", "", "ModifyColumnNullable"); + assert!(result.contains("orders")); + assert!(result.contains("status")); + assert!(result.contains("ModifyColumnNullable")); + } + + #[test] + fn test_format_fill_with_prompt() { + let result = format_fill_with_prompt("users", "email"); + assert!(result.contains("Enter fill value for")); + assert!(result.contains("users")); + assert!(result.contains("email")); + } + + #[test] + fn test_print_fill_with_item_and_get_prompt() { + // This function prints to stdout and returns the prompt string + let prompt = print_fill_with_item_and_get_prompt( + "users", + "email", + Some(&"Text".to_string()), + "AddColumn", + ); + assert!(prompt.contains("Enter fill value for")); + assert!(prompt.contains("users")); + assert!(prompt.contains("email")); + } + + #[test] + fn test_print_fill_with_item_and_get_prompt_no_type() { + let prompt = + print_fill_with_item_and_get_prompt("orders", "status", None, "ModifyColumnNullable"); + assert!(prompt.contains("Enter fill value for")); + assert!(prompt.contains("orders")); + assert!(prompt.contains("status")); + } + + #[test] + fn test_print_fill_with_header() { + // Just verify it doesn't panic - output goes to stdout + print_fill_with_header(); + } + + #[test] + fn test_print_fill_with_footer() { + // Just verify it doesn't panic - output goes to stdout + print_fill_with_footer(); + } + + #[test] + fn test_collect_fill_with_values_single_item() { + use vespertide_planner::FillWithRequired; + + let missing = vec![FillWithRequired { + action_index: 0, + table: "users".to_string(), + column: "email".to_string(), + action_type: "AddColumn", + column_type: Some("Text".to_string()), + }]; + + let mut fill_values = HashMap::new(); + + // Mock prompt function that returns a fixed value + let mock_prompt = + |_prompt: &str| -> Result { Ok("'test@example.com'".to_string()) }; + + let result = collect_fill_with_values(&missing, &mut fill_values, mock_prompt); + assert!(result.is_ok()); + assert_eq!(fill_values.len(), 1); + assert_eq!( + fill_values.get(&("users".to_string(), "email".to_string())), + Some(&"'test@example.com'".to_string()) + ); + } + + #[test] + fn test_collect_fill_with_values_multiple_items() { + use vespertide_planner::FillWithRequired; + + let missing = vec![ + FillWithRequired { + action_index: 0, + table: "users".to_string(), + column: "email".to_string(), + action_type: "AddColumn", + column_type: Some("Text".to_string()), + }, + FillWithRequired { + action_index: 1, + table: "orders".to_string(), + column: "status".to_string(), + action_type: "ModifyColumnNullable", + column_type: None, + }, + ]; + + let mut fill_values = HashMap::new(); + + // Mock prompt function that returns different values based on call count + let call_count = std::cell::RefCell::new(0); + let mock_prompt = |_prompt: &str| -> Result { + let mut count = call_count.borrow_mut(); + *count += 1; + match *count { + 1 => Ok("'user@example.com'".to_string()), + 2 => Ok("'pending'".to_string()), + _ => Ok("'default'".to_string()), + } + }; + + let result = collect_fill_with_values(&missing, &mut fill_values, mock_prompt); + assert!(result.is_ok()); + assert_eq!(fill_values.len(), 2); + assert_eq!( + fill_values.get(&("users".to_string(), "email".to_string())), + Some(&"'user@example.com'".to_string()) + ); + assert_eq!( + fill_values.get(&("orders".to_string(), "status".to_string())), + Some(&"'pending'".to_string()) + ); + } + + #[test] + fn test_collect_fill_with_values_empty() { + let missing: Vec = vec![]; + let mut fill_values = HashMap::new(); + + // This function should handle empty list gracefully (though it won't be called in practice) + // But we can't test the header/footer without items since the function still prints them + // So we test with a mock that would fail if called + let mock_prompt = |_prompt: &str| -> Result { + panic!("Should not be called for empty list"); + }; + + // Note: The function still prints header/footer even for empty list + // This is a design choice - in practice, cmd_revision won't call this with empty list + let result = collect_fill_with_values(&missing, &mut fill_values, mock_prompt); + assert!(result.is_ok()); + assert!(fill_values.is_empty()); + } + + #[test] + fn test_collect_fill_with_values_prompt_error() { + use vespertide_planner::FillWithRequired; + + let missing = vec![FillWithRequired { + action_index: 0, + table: "users".to_string(), + column: "email".to_string(), + action_type: "AddColumn", + column_type: Some("Text".to_string()), + }]; + + let mut fill_values = HashMap::new(); + + // Mock prompt function that returns an error + let mock_prompt = + |_prompt: &str| -> Result { Err(anyhow::anyhow!("input cancelled")) }; + + let result = collect_fill_with_values(&missing, &mut fill_values, mock_prompt); + assert!(result.is_err()); + assert!(fill_values.is_empty()); + } + + #[test] + fn test_prompt_fill_with_value_function_exists() { + // This test verifies that prompt_fill_with_value has the correct signature. + // We cannot actually call it in tests because dialoguer::Input blocks waiting for terminal input. + // The function is excluded from coverage with #[cfg_attr(coverage_nightly, coverage(off))]. + let _: fn(&str) -> Result = prompt_fill_with_value; + } + + #[test] + fn test_handle_missing_fill_with_collects_and_applies() { + use vespertide_core::MigrationPlan; + + let mut plan = MigrationPlan { + comment: None, + created_at: None, + version: 1, + actions: vec![MigrationAction::AddColumn { + table: "users".into(), + column: Box::new(ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }), + fill_with: None, + }], + }; + + let mut fill_values = HashMap::new(); + + // Mock prompt function + let mock_prompt = + |_prompt: &str| -> Result { Ok("'test@example.com'".to_string()) }; + + let result = handle_missing_fill_with(&mut plan, &mut fill_values, mock_prompt); + assert!(result.is_ok()); + + // Verify fill_with was applied to the plan + match &plan.actions[0] { + MigrationAction::AddColumn { fill_with, .. } => { + assert_eq!(fill_with, &Some("'test@example.com'".to_string())); + } + _ => panic!("Expected AddColumn action"), + } + + // Verify fill_values map was updated + assert_eq!( + fill_values.get(&("users".to_string(), "email".to_string())), + Some(&"'test@example.com'".to_string()) + ); + } + + #[test] + fn test_handle_missing_fill_with_no_missing() { + use vespertide_core::MigrationPlan; + + // Plan with no missing fill_with values (nullable column) + let mut plan = MigrationPlan { + comment: None, + created_at: None, + version: 1, + actions: vec![MigrationAction::AddColumn { + table: "users".into(), + column: Box::new(ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, // nullable, so no fill_with required + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }), + fill_with: None, + }], + }; + + let mut fill_values = HashMap::new(); + + // Mock prompt that should never be called + let mock_prompt = |_prompt: &str| -> Result { + panic!("Should not be called when no missing fill_with values"); + }; + + let result = handle_missing_fill_with(&mut plan, &mut fill_values, mock_prompt); + assert!(result.is_ok()); + assert!(fill_values.is_empty()); + } + + #[test] + fn test_handle_missing_fill_with_prompt_error() { + use vespertide_core::MigrationPlan; + + let mut plan = MigrationPlan { + comment: None, + created_at: None, + version: 1, + actions: vec![MigrationAction::AddColumn { + table: "users".into(), + column: Box::new(ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }), + fill_with: None, + }], + }; + + let mut fill_values = HashMap::new(); + + // Mock prompt that returns an error + let mock_prompt = + |_prompt: &str| -> Result { Err(anyhow::anyhow!("user cancelled")) }; + + let result = handle_missing_fill_with(&mut plan, &mut fill_values, mock_prompt); + assert!(result.is_err()); + + // Plan should not be modified on error + match &plan.actions[0] { + MigrationAction::AddColumn { fill_with, .. } => { + assert_eq!(fill_with, &None); + } + _ => panic!("Expected AddColumn action"), + } + } + + #[test] + fn test_handle_missing_fill_with_multiple_columns() { + use vespertide_core::MigrationPlan; + + let mut plan = MigrationPlan { + comment: None, + created_at: None, + version: 1, + actions: vec![ + MigrationAction::AddColumn { + table: "users".into(), + column: Box::new(ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }), + fill_with: None, + }, + MigrationAction::ModifyColumnNullable { + table: "orders".into(), + column: "status".into(), + nullable: false, + fill_with: None, + }, + ], + }; + + let mut fill_values = HashMap::new(); + + // Mock prompt that returns different values based on call count + let call_count = std::cell::RefCell::new(0); + let mock_prompt = |_prompt: &str| -> Result { + let mut count = call_count.borrow_mut(); + *count += 1; + match *count { + 1 => Ok("'user@example.com'".to_string()), + 2 => Ok("'pending'".to_string()), + _ => Ok("'default'".to_string()), + } + }; + + let result = handle_missing_fill_with(&mut plan, &mut fill_values, mock_prompt); + assert!(result.is_ok()); + + // Verify both actions were updated + match &plan.actions[0] { + MigrationAction::AddColumn { fill_with, .. } => { + assert_eq!(fill_with, &Some("'user@example.com'".to_string())); + } + _ => panic!("Expected AddColumn action"), + } + + match &plan.actions[1] { + MigrationAction::ModifyColumnNullable { fill_with, .. } => { + assert_eq!(fill_with, &Some("'pending'".to_string())); + } + _ => panic!("Expected ModifyColumnNullable action"), + } + } +} diff --git a/crates/vespertide-cli/src/commands/sql.rs b/crates/vespertide-cli/src/commands/sql.rs index 5bce752..8e42330 100644 --- a/crates/vespertide-cli/src/commands/sql.rs +++ b/crates/vespertide-cli/src/commands/sql.rs @@ -1,469 +1,471 @@ -use anyhow::Result; -use colored::Colorize; -use vespertide_planner::{plan_next_migration_with_baseline, schema_from_plans}; -use vespertide_query::{DatabaseBackend, build_plan_queries}; - -use crate::utils::{load_config, load_migrations, load_models}; - -pub fn cmd_sql(backend: DatabaseBackend) -> Result<()> { - let config = load_config()?; - let current_models = load_models(&config)?; - let applied_plans = load_migrations(&config)?; - - // Reconstruct the baseline schema from applied migrations - let baseline_schema = schema_from_plans(&applied_plans) - .map_err(|e| anyhow::anyhow!("failed to reconstruct schema: {}", e))?; - - // Plan next migration using the pre-computed baseline - let plan = plan_next_migration_with_baseline(¤t_models, &applied_plans, &baseline_schema) - .map_err(|e| anyhow::anyhow!("planning error: {}", e))?; - - emit_sql(&plan, backend, &baseline_schema) -} - -fn emit_sql( - plan: &vespertide_core::MigrationPlan, - backend: DatabaseBackend, - current_schema: &[vespertide_core::TableDef], -) -> Result<()> { - if plan.actions.is_empty() { - println!( - "{} {}", - "No differences found.".bright_green(), - "Schema is up to date; no SQL to emit.".bright_white() - ); - return Ok(()); - } - - let plan_queries = build_plan_queries(plan, current_schema) - .map_err(|e| anyhow::anyhow!("query build error: {}", e))?; - - // Select queries for the specified backend - let queries: Vec<_> = plan_queries - .iter() - .flat_map(|pq| match backend { - DatabaseBackend::Postgres => &pq.postgres, - DatabaseBackend::MySql => &pq.mysql, - DatabaseBackend::Sqlite => &pq.sqlite, - }) - .collect(); - - println!( - "{} {}", - "Plan version:".bright_cyan().bold(), - plan.version.to_string().bright_magenta() - ); - if let Some(created_at) = &plan.created_at { - println!( - "{} {}", - "Created at:".bright_cyan(), - created_at.bright_white() - ); - } - if let Some(comment) = &plan.comment { - println!("{} {}", "Comment:".bright_cyan(), comment.bright_white()); - } - println!( - "{} {}", - "Actions:".bright_cyan(), - plan.actions.len().to_string().bright_yellow() - ); - println!( - "{} {}", - "SQL statements:".bright_cyan().bold(), - queries.len().to_string().bright_yellow().bold() - ); - println!(); - - for (i, pq) in plan_queries.iter().enumerate() { - let queries = match backend { - DatabaseBackend::Postgres => &pq.postgres, - DatabaseBackend::MySql => &pq.mysql, - DatabaseBackend::Sqlite => &pq.sqlite, - }; - println!( - "{} {}", - "Action:".bright_cyan(), - pq.action.to_string().bright_white() - ); - for (j, q) in queries.iter().enumerate() { - println!( - "{}{}. {}", - (i + 1).to_string().bright_magenta().bold(), - if queries.len() > 1 { - format!("-{}", j + 1) - } else { - "".to_string() - } - .bright_magenta() - .bold(), - q.build(backend).trim().bright_white() - ); - } - } - - Ok(()) -} - -#[cfg(test)] -mod tests { - use super::*; - use serial_test::serial; - use std::fs; - use std::path::PathBuf; - use tempfile::tempdir; - use vespertide_config::VespertideConfig; - use vespertide_core::{ - ColumnDef, ColumnType, MigrationAction, MigrationPlan, SimpleColumnType, TableConstraint, - TableDef, - }; - - struct CwdGuard { - original: PathBuf, - } - - impl CwdGuard { - fn new(dir: &PathBuf) -> Self { - let original = std::env::current_dir().unwrap(); - std::env::set_current_dir(dir).unwrap(); - Self { original } - } - } - - impl Drop for CwdGuard { - fn drop(&mut self) { - let _ = std::env::set_current_dir(&self.original); - } - } - - fn write_config() -> VespertideConfig { - let cfg = VespertideConfig::default(); - let text = serde_json::to_string_pretty(&cfg).unwrap(); - fs::write("vespertide.json", text).unwrap(); - cfg - } - - fn write_model(name: &str) { - let models_dir = PathBuf::from("models"); - fs::create_dir_all(&models_dir).unwrap(); - let table = TableDef { - name: name.to_string(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::PrimaryKey { - auto_increment: false, - columns: vec!["id".into()], - }], - }; - let path = models_dir.join(format!("{name}.json")); - fs::write(path, serde_json::to_string_pretty(&table).unwrap()).unwrap(); - } - - #[test] - #[serial] - fn cmd_sql_emits_queries_postgres() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - - let _cfg = write_config(); - write_model("users"); - - let result = cmd_sql(DatabaseBackend::Postgres); - assert!(result.is_ok()); - } - - #[test] - #[serial] - fn cmd_sql_emits_queries_mysql() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - - let _cfg = write_config(); - write_model("users"); - - let result = cmd_sql(DatabaseBackend::MySql); - assert!(result.is_ok()); - } - - #[test] - #[serial] - fn cmd_sql_emits_queries_sqlite() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - - let _cfg = write_config(); - write_model("users"); - - let result = cmd_sql(DatabaseBackend::Sqlite); - assert!(result.is_ok()); - } - - #[test] - #[serial] - fn cmd_sql_no_changes_postgres() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - - let cfg = write_config(); - write_model("users"); - - let plan = MigrationPlan { - comment: None, - created_at: None, - version: 1, - actions: vec![MigrationAction::CreateTable { - table: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::PrimaryKey { - auto_increment: false, - columns: vec!["id".into()], - }], - }], - }; - fs::create_dir_all(cfg.migrations_dir()).unwrap(); - let path = cfg.migrations_dir().join("0001_init.json"); - fs::write(path, serde_json::to_string_pretty(&plan).unwrap()).unwrap(); - - let result = cmd_sql(DatabaseBackend::Postgres); - assert!(result.is_ok()); - } - - #[test] - #[serial] - fn cmd_sql_no_changes_mysql() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - - let cfg = write_config(); - write_model("users"); - - let plan = MigrationPlan { - comment: None, - created_at: None, - version: 1, - actions: vec![MigrationAction::CreateTable { - table: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::PrimaryKey { - auto_increment: false, - columns: vec!["id".into()], - }], - }], - }; - fs::create_dir_all(cfg.migrations_dir()).unwrap(); - let path = cfg.migrations_dir().join("0001_init.json"); - fs::write(path, serde_json::to_string_pretty(&plan).unwrap()).unwrap(); - - let result = cmd_sql(DatabaseBackend::MySql); - assert!(result.is_ok()); - } - - #[test] - #[serial] - fn cmd_sql_no_changes_sqlite() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - - let cfg = write_config(); - write_model("users"); - - let plan = MigrationPlan { - comment: None, - created_at: None, - version: 1, - actions: vec![MigrationAction::CreateTable { - table: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::PrimaryKey { - auto_increment: false, - columns: vec!["id".into()], - }], - }], - }; - fs::create_dir_all(cfg.migrations_dir()).unwrap(); - let path = cfg.migrations_dir().join("0001_init.json"); - fs::write(path, serde_json::to_string_pretty(&plan).unwrap()).unwrap(); - - let result = cmd_sql(DatabaseBackend::Sqlite); - assert!(result.is_ok()); - } - - #[test] - #[serial] - fn emit_sql_prints_created_at_and_comment_postgres() { - let plan = MigrationPlan { - comment: Some("with comment".into()), - created_at: Some("2024-01-02T00:00:00Z".into()), - version: 1, - actions: vec![MigrationAction::RawSql { - sql: "SELECT 1;".into(), - }], - }; - - let result = emit_sql(&plan, DatabaseBackend::Postgres, &[]); - assert!(result.is_ok()); - } - - #[test] - #[serial] - fn emit_sql_prints_created_at_and_comment_mysql() { - let plan = MigrationPlan { - comment: Some("with comment".into()), - created_at: Some("2024-01-02T00:00:00Z".into()), - version: 1, - actions: vec![MigrationAction::RawSql { - sql: "SELECT 1;".into(), - }], - }; - - let result = emit_sql(&plan, DatabaseBackend::MySql, &[]); - assert!(result.is_ok()); - } - - #[test] - #[serial] - fn emit_sql_prints_created_at_and_comment_sqlite() { - let plan = MigrationPlan { - comment: Some("with comment".into()), - created_at: Some("2024-01-02T00:00:00Z".into()), - version: 1, - actions: vec![MigrationAction::RawSql { - sql: "SELECT 1;".into(), - }], - }; - - let result = emit_sql(&plan, DatabaseBackend::Sqlite, &[]); - assert!(result.is_ok()); - } - - #[test] - #[serial] - fn emit_sql_multiple_queries() { - let plan = MigrationPlan { - comment: None, - created_at: None, - version: 1, - actions: vec![ - MigrationAction::CreateTable { - table: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }, - MigrationAction::AddConstraint { - table: "users".into(), - constraint: TableConstraint::Index { - name: Some("idx_id".into()), - columns: vec!["id".into()], - }, - }, - ], - }; - - let result = emit_sql(&plan, DatabaseBackend::Postgres, &[]); - assert!(result.is_ok()); - } - - #[test] - #[serial] - fn emit_sql_multiple_queries_per_action() { - // Test case where a single action generates multiple queries (e.g., SQLite constraint addition) - // This should trigger the queries.len() > 1 branch (line 89) - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - let _cfg = write_config(); - write_model("users"); - - // Create a migration that adds a NOT NULL column in SQLite, which generates multiple queries - let plan = MigrationPlan { - comment: None, - created_at: None, - version: 1, - actions: vec![MigrationAction::AddColumn { - table: "users".into(), - column: Box::new(ColumnDef { - name: "nickname".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }), - fill_with: Some("default".into()), - }], - }; - - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::PrimaryKey { - auto_increment: false, - columns: vec!["id".into()], - }], - }]; - - let result = emit_sql(&plan, DatabaseBackend::Sqlite, ¤t_schema); - assert!(result.is_ok()); - } -} +use anyhow::Result; +use colored::Colorize; +use vespertide_planner::{plan_next_migration_with_baseline, schema_from_plans}; +use vespertide_query::{DatabaseBackend, build_plan_queries}; + +use crate::utils::{load_config, load_migrations, load_models}; + +pub fn cmd_sql(backend: DatabaseBackend) -> Result<()> { + let config = load_config()?; + let current_models = load_models(&config)?; + let applied_plans = load_migrations(&config)?; + + // Reconstruct the baseline schema from applied migrations + let baseline_schema = schema_from_plans(&applied_plans) + .map_err(|e| anyhow::anyhow!("failed to reconstruct schema: {}", e))?; + + // Plan next migration using the pre-computed baseline + let plan = plan_next_migration_with_baseline(¤t_models, &applied_plans, &baseline_schema) + .map_err(|e| anyhow::anyhow!("planning error: {}", e))?; + + emit_sql(&plan, backend, &baseline_schema) +} + +fn emit_sql( + plan: &vespertide_core::MigrationPlan, + backend: DatabaseBackend, + current_schema: &[vespertide_core::TableDef], +) -> Result<()> { + if plan.actions.is_empty() { + println!( + "{} {}", + "No differences found.".bright_green(), + "Schema is up to date; no SQL to emit.".bright_white() + ); + return Ok(()); + } + + let plan_queries = build_plan_queries(plan, current_schema) + .map_err(|e| anyhow::anyhow!("query build error: {}", e))?; + + // Select queries for the specified backend + let queries: Vec<_> = plan_queries + .iter() + .flat_map(|pq| match backend { + DatabaseBackend::Postgres => &pq.postgres, + DatabaseBackend::MySql => &pq.mysql, + DatabaseBackend::Sqlite => &pq.sqlite, + }) + .collect(); + + println!( + "{} {}", + "Plan version:".bright_cyan().bold(), + plan.version.to_string().bright_magenta() + ); + if let Some(created_at) = &plan.created_at { + println!( + "{} {}", + "Created at:".bright_cyan(), + created_at.bright_white() + ); + } + if let Some(comment) = &plan.comment { + println!("{} {}", "Comment:".bright_cyan(), comment.bright_white()); + } + println!( + "{} {}", + "Actions:".bright_cyan(), + plan.actions.len().to_string().bright_yellow() + ); + println!( + "{} {}", + "SQL statements:".bright_cyan().bold(), + queries.len().to_string().bright_yellow().bold() + ); + println!(); + + for (i, pq) in plan_queries.iter().enumerate() { + let queries = match backend { + DatabaseBackend::Postgres => &pq.postgres, + DatabaseBackend::MySql => &pq.mysql, + DatabaseBackend::Sqlite => &pq.sqlite, + }; + println!( + "{} {}", + "Action:".bright_cyan(), + pq.action.to_string().bright_white() + ); + for (j, q) in queries.iter().enumerate() { + println!( + "{}{}. {}", + (i + 1).to_string().bright_magenta().bold(), + if queries.len() > 1 { + format!("-{}", j + 1) + } else { + "".to_string() + } + .bright_magenta() + .bold(), + q.build(backend).trim().bright_white() + ); + } + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use serial_test::serial; + use std::fs; + use std::path::PathBuf; + use tempfile::tempdir; + use vespertide_config::VespertideConfig; + use vespertide_core::{ + ColumnDef, ColumnType, MigrationAction, MigrationPlan, SimpleColumnType, TableConstraint, + TableDef, + }; + + struct CwdGuard { + original: PathBuf, + } + + impl CwdGuard { + fn new(dir: &PathBuf) -> Self { + let original = std::env::current_dir().unwrap(); + std::env::set_current_dir(dir).unwrap(); + Self { original } + } + } + + impl Drop for CwdGuard { + fn drop(&mut self) { + let _ = std::env::set_current_dir(&self.original); + } + } + + fn write_config() -> VespertideConfig { + let cfg = VespertideConfig::default(); + let text = serde_json::to_string_pretty(&cfg).unwrap(); + fs::write("vespertide.json", text).unwrap(); + cfg + } + + fn write_model(name: &str) { + let models_dir = PathBuf::from("models"); + fs::create_dir_all(&models_dir).unwrap(); + let table = TableDef { + name: name.to_string(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::PrimaryKey { + auto_increment: false, + columns: vec!["id".into()], + }], + }; + let path = models_dir.join(format!("{name}.json")); + fs::write(path, serde_json::to_string_pretty(&table).unwrap()).unwrap(); + } + + #[test] + #[serial] + fn cmd_sql_emits_queries_postgres() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + + let _cfg = write_config(); + write_model("users"); + + let result = cmd_sql(DatabaseBackend::Postgres); + assert!(result.is_ok()); + } + + #[test] + #[serial] + fn cmd_sql_emits_queries_mysql() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + + let _cfg = write_config(); + write_model("users"); + + let result = cmd_sql(DatabaseBackend::MySql); + assert!(result.is_ok()); + } + + #[test] + #[serial] + fn cmd_sql_emits_queries_sqlite() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + + let _cfg = write_config(); + write_model("users"); + + let result = cmd_sql(DatabaseBackend::Sqlite); + assert!(result.is_ok()); + } + + #[test] + #[serial] + fn cmd_sql_no_changes_postgres() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + + let cfg = write_config(); + write_model("users"); + + let plan = MigrationPlan { + comment: None, + created_at: None, + version: 1, + actions: vec![MigrationAction::CreateTable { + table: "users".into(), + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::PrimaryKey { + auto_increment: false, + columns: vec!["id".into()], + }], + }], + }; + fs::create_dir_all(cfg.migrations_dir()).unwrap(); + let path = cfg.migrations_dir().join("0001_init.json"); + fs::write(path, serde_json::to_string_pretty(&plan).unwrap()).unwrap(); + + let result = cmd_sql(DatabaseBackend::Postgres); + assert!(result.is_ok()); + } + + #[test] + #[serial] + fn cmd_sql_no_changes_mysql() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + + let cfg = write_config(); + write_model("users"); + + let plan = MigrationPlan { + comment: None, + created_at: None, + version: 1, + actions: vec![MigrationAction::CreateTable { + table: "users".into(), + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::PrimaryKey { + auto_increment: false, + columns: vec!["id".into()], + }], + }], + }; + fs::create_dir_all(cfg.migrations_dir()).unwrap(); + let path = cfg.migrations_dir().join("0001_init.json"); + fs::write(path, serde_json::to_string_pretty(&plan).unwrap()).unwrap(); + + let result = cmd_sql(DatabaseBackend::MySql); + assert!(result.is_ok()); + } + + #[test] + #[serial] + fn cmd_sql_no_changes_sqlite() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + + let cfg = write_config(); + write_model("users"); + + let plan = MigrationPlan { + comment: None, + created_at: None, + version: 1, + actions: vec![MigrationAction::CreateTable { + table: "users".into(), + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::PrimaryKey { + auto_increment: false, + columns: vec!["id".into()], + }], + }], + }; + fs::create_dir_all(cfg.migrations_dir()).unwrap(); + let path = cfg.migrations_dir().join("0001_init.json"); + fs::write(path, serde_json::to_string_pretty(&plan).unwrap()).unwrap(); + + let result = cmd_sql(DatabaseBackend::Sqlite); + assert!(result.is_ok()); + } + + #[test] + #[serial] + fn emit_sql_prints_created_at_and_comment_postgres() { + let plan = MigrationPlan { + comment: Some("with comment".into()), + created_at: Some("2024-01-02T00:00:00Z".into()), + version: 1, + actions: vec![MigrationAction::RawSql { + sql: "SELECT 1;".into(), + }], + }; + + let result = emit_sql(&plan, DatabaseBackend::Postgres, &[]); + assert!(result.is_ok()); + } + + #[test] + #[serial] + fn emit_sql_prints_created_at_and_comment_mysql() { + let plan = MigrationPlan { + comment: Some("with comment".into()), + created_at: Some("2024-01-02T00:00:00Z".into()), + version: 1, + actions: vec![MigrationAction::RawSql { + sql: "SELECT 1;".into(), + }], + }; + + let result = emit_sql(&plan, DatabaseBackend::MySql, &[]); + assert!(result.is_ok()); + } + + #[test] + #[serial] + fn emit_sql_prints_created_at_and_comment_sqlite() { + let plan = MigrationPlan { + comment: Some("with comment".into()), + created_at: Some("2024-01-02T00:00:00Z".into()), + version: 1, + actions: vec![MigrationAction::RawSql { + sql: "SELECT 1;".into(), + }], + }; + + let result = emit_sql(&plan, DatabaseBackend::Sqlite, &[]); + assert!(result.is_ok()); + } + + #[test] + #[serial] + fn emit_sql_multiple_queries() { + let plan = MigrationPlan { + comment: None, + created_at: None, + version: 1, + actions: vec![ + MigrationAction::CreateTable { + table: "users".into(), + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }, + MigrationAction::AddConstraint { + table: "users".into(), + constraint: TableConstraint::Index { + name: Some("idx_id".into()), + columns: vec!["id".into()], + }, + }, + ], + }; + + let result = emit_sql(&plan, DatabaseBackend::Postgres, &[]); + assert!(result.is_ok()); + } + + #[test] + #[serial] + fn emit_sql_multiple_queries_per_action() { + // Test case where a single action generates multiple queries (e.g., SQLite constraint addition) + // This should trigger the queries.len() > 1 branch (line 89) + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + let _cfg = write_config(); + write_model("users"); + + // Create a migration that adds a NOT NULL column in SQLite, which generates multiple queries + let plan = MigrationPlan { + comment: None, + created_at: None, + version: 1, + actions: vec![MigrationAction::AddColumn { + table: "users".into(), + column: Box::new(ColumnDef { + name: "nickname".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }), + fill_with: Some("default".into()), + }], + }; + + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::PrimaryKey { + auto_increment: false, + columns: vec!["id".into()], + }], + }]; + + let result = emit_sql(&plan, DatabaseBackend::Sqlite, ¤t_schema); + assert!(result.is_ok()); + } +} diff --git a/crates/vespertide-cli/src/commands/status.rs b/crates/vespertide-cli/src/commands/status.rs index a6fcafd..03fe162 100644 --- a/crates/vespertide-cli/src/commands/status.rs +++ b/crates/vespertide-cli/src/commands/status.rs @@ -1,292 +1,302 @@ -use anyhow::Result; -use colored::Colorize; -use vespertide_planner::schema_from_plans; - -use crate::utils::{load_config, load_migrations, load_models}; -use std::collections::HashSet; - -pub fn cmd_status() -> Result<()> { - let config = load_config()?; - let current_models = load_models(&config)?; - let applied_plans = load_migrations(&config)?; - - println!("{}", "Configuration:".bright_cyan().bold()); - println!( - " {} {}", - "Models directory:".cyan(), - format!("{}", config.models_dir().display()).bright_white() - ); - println!( - " {} {}", - "Migrations directory:".cyan(), - format!("{}", config.migrations_dir().display()).bright_white() - ); - println!( - " {} {:?}", - "Table naming:".cyan(), - config.table_naming_case - ); - println!( - " {} {:?}", - "Column naming:".cyan(), - config.column_naming_case - ); - println!(" {} {:?}", "Model format:".cyan(), config.model_format()); - println!( - " {} {:?}", - "Migration format:".cyan(), - config.migration_format() - ); - println!( - " {} {}", - "Migration filename pattern:".cyan(), - config.migration_filename_pattern().bright_white() - ); - println!(); - - println!( - "{} {}", - "Applied migrations:".bright_cyan().bold(), - applied_plans.len().to_string().bright_yellow() - ); - if !applied_plans.is_empty() { - let latest = applied_plans.last().unwrap(); - println!( - " {} {}", - "Latest version:".cyan(), - latest.version.to_string().bright_magenta() - ); - if let Some(comment) = &latest.comment { - println!(" {} {}", "Latest comment:".cyan(), comment.bright_white()); - } - if let Some(created_at) = &latest.created_at { - println!( - " {} {}", - "Latest created at:".cyan(), - created_at.bright_white() - ); - } - } - println!(); - - println!( - "{} {}", - "Current models:".bright_cyan().bold(), - current_models.len().to_string().bright_yellow() - ); - for model in ¤t_models { - // Count Index constraints - let index_count = model - .constraints - .iter() - .filter(|c| matches!(c, vespertide_core::TableConstraint::Index { .. })) - .count(); - // Count Unique constraints - let unique_count = model - .constraints - .iter() - .filter(|c| matches!(c, vespertide_core::TableConstraint::Unique { .. })) - .count(); - println!( - " {} {} ({} {}, {} {}, {} {})", - "-".bright_white(), - model.name.bright_green(), - model.columns.len().to_string().bright_blue(), - "columns".bright_white(), - index_count.to_string().bright_blue(), - "indexes".bright_white(), - unique_count.to_string().bright_blue(), - "uniques".bright_white() - ); - } - println!(); - - if !applied_plans.is_empty() { - let baseline = schema_from_plans(&applied_plans) - .map_err(|e| anyhow::anyhow!("schema reconstruction error: {}", e))?; - - let baseline_tables: HashSet<_> = baseline.iter().map(|t| &t.name).collect(); - let current_tables: HashSet<_> = current_models.iter().map(|t| &t.name).collect(); - - if baseline_tables == current_tables { - println!( - "{} {}", - "Status:".bright_cyan().bold(), - "Schema is synchronized with migrations.".bright_green() - ); - } else { - println!( - "{} {}", - "Status:".bright_cyan().bold(), - "Schema differs from applied migrations.".bright_yellow() - ); - println!( - " {} {} {}", - "Run".bright_white(), - "'vespertide diff'".bright_cyan().bold(), - "to see details.".bright_white() - ); - } - } else if current_models.is_empty() { - println!( - "{} {}", - "Status:".bright_cyan().bold(), - "No models or migrations found.".bright_yellow() - ); - } else { - println!( - "{} {}", - "Status:".bright_cyan().bold(), - "Models exist but no migrations have been applied.".bright_yellow() - ); - println!( - " {} {} {}", - "Run".bright_white(), - "'vespertide revision -m \"initial\"'".bright_cyan().bold(), - "to create the first migration.".bright_white() - ); - } - - Ok(()) -} - -#[cfg(test)] -mod tests { - use super::*; - use serial_test::serial; - use std::{fs, path::PathBuf}; - use tempfile::tempdir; - use vespertide_config::VespertideConfig; - use vespertide_core::{ - ColumnDef, ColumnType, MigrationAction, MigrationPlan, SimpleColumnType, TableConstraint, - TableDef, - }; - - struct CwdGuard { - original: PathBuf, - } - - impl CwdGuard { - fn new(dir: &PathBuf) -> Self { - let original = std::env::current_dir().unwrap(); - std::env::set_current_dir(dir).unwrap(); - Self { original } - } - } - - impl Drop for CwdGuard { - fn drop(&mut self) { - let _ = std::env::set_current_dir(&self.original); - } - } - - fn write_config() -> VespertideConfig { - let cfg = VespertideConfig::default(); - let text = serde_json::to_string_pretty(&cfg).unwrap(); - fs::write("vespertide.json", text).unwrap(); - cfg - } - - fn write_model(name: &str) { - let models_dir = PathBuf::from("models"); - fs::create_dir_all(&models_dir).unwrap(); - let table = TableDef { - name: name.to_string(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::PrimaryKey { - auto_increment: false, - columns: vec!["id".into()], - }], - }; - let path = models_dir.join(format!("{name}.json")); - fs::write(path, serde_json::to_string_pretty(&table).unwrap()).unwrap(); - } - - fn write_migration(cfg: &VespertideConfig) { - fs::create_dir_all(cfg.migrations_dir()).unwrap(); - let plan = MigrationPlan { - comment: Some("init".into()), - created_at: Some("2024-01-01T00:00:00Z".into()), - version: 1, - actions: vec![MigrationAction::CreateTable { - table: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }], - }; - let path = cfg.migrations_dir().join("0001_init.json"); - fs::write(path, serde_json::to_string_pretty(&plan).unwrap()).unwrap(); - } - - #[test] - #[serial] - fn cmd_status_with_matching_schema() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - - let cfg = write_config(); - write_model("users"); - write_migration(&cfg); - - cmd_status().unwrap(); - } - - #[test] - #[serial] - fn cmd_status_no_models_no_migrations_prints_message() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - let cfg = write_config(); - fs::create_dir_all(cfg.models_dir()).unwrap(); // empty models dir - fs::create_dir_all(cfg.migrations_dir()).unwrap(); // empty migrations dir - - cmd_status().unwrap(); - } - - #[test] - #[serial] - fn cmd_status_models_no_migrations_prints_hint() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - let cfg = write_config(); - write_model("users"); - fs::create_dir_all(cfg.migrations_dir()).unwrap(); - - cmd_status().unwrap(); - } - - #[test] - #[serial] - fn cmd_status_differs_prints_diff_hint() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - - let cfg = write_config(); - write_model("users"); - // add another model to differ from baseline - write_model("posts"); - write_migration(&cfg); // baseline only has users - - cmd_status().unwrap(); - } -} +use anyhow::Result; +use colored::Colorize; +use vespertide_planner::schema_from_plans; + +use crate::utils::{load_config, load_migrations, load_models}; +use std::collections::HashSet; + +pub fn cmd_status() -> Result<()> { + let config = load_config()?; + let current_models = load_models(&config)?; + let applied_plans = load_migrations(&config)?; + + println!("{}", "Configuration:".bright_cyan().bold()); + println!( + " {} {}", + "Models directory:".cyan(), + format!("{}", config.models_dir().display()).bright_white() + ); + println!( + " {} {}", + "Migrations directory:".cyan(), + format!("{}", config.migrations_dir().display()).bright_white() + ); + println!( + " {} {:?}", + "Table naming:".cyan(), + config.table_naming_case + ); + println!( + " {} {:?}", + "Column naming:".cyan(), + config.column_naming_case + ); + println!(" {} {:?}", "Model format:".cyan(), config.model_format()); + println!( + " {} {:?}", + "Migration format:".cyan(), + config.migration_format() + ); + println!( + " {} {}", + "Migration filename pattern:".cyan(), + config.migration_filename_pattern().bright_white() + ); + println!(); + + println!( + "{} {}", + "Applied migrations:".bright_cyan().bold(), + applied_plans.len().to_string().bright_yellow() + ); + if !applied_plans.is_empty() { + let latest = applied_plans.last().unwrap(); + println!( + " {} {}", + "Latest version:".cyan(), + latest.version.to_string().bright_magenta() + ); + if let Some(comment) = &latest.comment { + println!(" {} {}", "Latest comment:".cyan(), comment.bright_white()); + } + if let Some(created_at) = &latest.created_at { + println!( + " {} {}", + "Latest created at:".cyan(), + created_at.bright_white() + ); + } + } + println!(); + + println!( + "{} {}", + "Current models:".bright_cyan().bold(), + current_models.len().to_string().bright_yellow() + ); + for model in ¤t_models { + // Count Index constraints + let index_count = model + .constraints + .iter() + .filter(|c| matches!(c, vespertide_core::TableConstraint::Index { .. })) + .count(); + // Count Unique constraints + let unique_count = model + .constraints + .iter() + .filter(|c| matches!(c, vespertide_core::TableConstraint::Unique { .. })) + .count(); + print!( + " {} {} ({} {}, {} {}, {} {})", + "-".bright_white(), + model.name.bright_green(), + model.columns.len().to_string().bright_blue(), + "columns".bright_white(), + index_count.to_string().bright_blue(), + "indexes".bright_white(), + unique_count.to_string().bright_blue(), + "uniques".bright_white() + ); + if let Some(description) = &model.description { + println!( + "\n {} {}", + "Description:".bright_black(), + description.bright_white() + ); + } else { + println!(); + } + } + println!(); + + if !applied_plans.is_empty() { + let baseline = schema_from_plans(&applied_plans) + .map_err(|e| anyhow::anyhow!("schema reconstruction error: {}", e))?; + + let baseline_tables: HashSet<_> = baseline.iter().map(|t| &t.name).collect(); + let current_tables: HashSet<_> = current_models.iter().map(|t| &t.name).collect(); + + if baseline_tables == current_tables { + println!( + "{} {}", + "Status:".bright_cyan().bold(), + "Schema is synchronized with migrations.".bright_green() + ); + } else { + println!( + "{} {}", + "Status:".bright_cyan().bold(), + "Schema differs from applied migrations.".bright_yellow() + ); + println!( + " {} {} {}", + "Run".bright_white(), + "'vespertide diff'".bright_cyan().bold(), + "to see details.".bright_white() + ); + } + } else if current_models.is_empty() { + println!( + "{} {}", + "Status:".bright_cyan().bold(), + "No models or migrations found.".bright_yellow() + ); + } else { + println!( + "{} {}", + "Status:".bright_cyan().bold(), + "Models exist but no migrations have been applied.".bright_yellow() + ); + println!( + " {} {} {}", + "Run".bright_white(), + "'vespertide revision -m \"initial\"'".bright_cyan().bold(), + "to create the first migration.".bright_white() + ); + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use serial_test::serial; + use std::{fs, path::PathBuf}; + use tempfile::tempdir; + use vespertide_config::VespertideConfig; + use vespertide_core::{ + ColumnDef, ColumnType, MigrationAction, MigrationPlan, SimpleColumnType, TableConstraint, + TableDef, + }; + + struct CwdGuard { + original: PathBuf, + } + + impl CwdGuard { + fn new(dir: &PathBuf) -> Self { + let original = std::env::current_dir().unwrap(); + std::env::set_current_dir(dir).unwrap(); + Self { original } + } + } + + impl Drop for CwdGuard { + fn drop(&mut self) { + let _ = std::env::set_current_dir(&self.original); + } + } + + fn write_config() -> VespertideConfig { + let cfg = VespertideConfig::default(); + let text = serde_json::to_string_pretty(&cfg).unwrap(); + fs::write("vespertide.json", text).unwrap(); + cfg + } + + fn write_model(name: &str) { + let models_dir = PathBuf::from("models"); + fs::create_dir_all(&models_dir).unwrap(); + let table = TableDef { + name: name.to_string(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::PrimaryKey { + auto_increment: false, + columns: vec!["id".into()], + }], + }; + let path = models_dir.join(format!("{name}.json")); + fs::write(path, serde_json::to_string_pretty(&table).unwrap()).unwrap(); + } + + fn write_migration(cfg: &VespertideConfig) { + fs::create_dir_all(cfg.migrations_dir()).unwrap(); + let plan = MigrationPlan { + comment: Some("init".into()), + created_at: Some("2024-01-01T00:00:00Z".into()), + version: 1, + actions: vec![MigrationAction::CreateTable { + table: "users".into(), + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }], + }; + let path = cfg.migrations_dir().join("0001_init.json"); + fs::write(path, serde_json::to_string_pretty(&plan).unwrap()).unwrap(); + } + + #[test] + #[serial] + fn cmd_status_with_matching_schema() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + + let cfg = write_config(); + write_model("users"); + write_migration(&cfg); + + cmd_status().unwrap(); + } + + #[test] + #[serial] + fn cmd_status_no_models_no_migrations_prints_message() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + let cfg = write_config(); + fs::create_dir_all(cfg.models_dir()).unwrap(); // empty models dir + fs::create_dir_all(cfg.migrations_dir()).unwrap(); // empty migrations dir + + cmd_status().unwrap(); + } + + #[test] + #[serial] + fn cmd_status_models_no_migrations_prints_hint() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + let cfg = write_config(); + write_model("users"); + fs::create_dir_all(cfg.migrations_dir()).unwrap(); + + cmd_status().unwrap(); + } + + #[test] + #[serial] + fn cmd_status_differs_prints_diff_hint() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + + let cfg = write_config(); + write_model("users"); + // add another model to differ from baseline + write_model("posts"); + write_migration(&cfg); // baseline only has users + + cmd_status().unwrap(); + } +} diff --git a/crates/vespertide-cli/src/utils.rs b/crates/vespertide-cli/src/utils.rs index dbaa616..9ee271b 100644 --- a/crates/vespertide-cli/src/utils.rs +++ b/crates/vespertide-cli/src/utils.rs @@ -1,314 +1,317 @@ -use vespertide_config::FileFormat; - -// Re-export loader functions for convenience -pub use vespertide_loader::{load_config, load_migrations, load_models}; - -/// Generate a migration filename from version and optional comment with format and pattern. -pub fn migration_filename_with_format_and_pattern( - version: u32, - comment: Option<&str>, - format: FileFormat, - pattern: &str, -) -> String { - let sanitized = sanitize_comment(comment); - let name = render_migration_name(pattern, version, &sanitized); - - let ext = match format { - FileFormat::Json => "json", - FileFormat::Yaml => "yaml", - FileFormat::Yml => "yml", - }; - - format!("{name}.{ext}") -} - -fn sanitize_comment(comment: Option<&str>) -> String { - comment - .map(|c| { - c.to_lowercase() - .chars() - .map(|ch| { - if ch.is_alphanumeric() || ch == ' ' { - ch - } else { - '_' - } - }) - .collect::() - .split_whitespace() - .collect::>() - .join("_") - }) - .unwrap_or_default() -} - -fn render_migration_name(pattern: &str, version: u32, sanitized_comment: &str) -> String { - let default_version = format!("{:04}", version); - let chars: Vec = pattern.chars().collect(); - let mut i = 0; - let mut out = String::new(); - - while i < chars.len() { - if chars[i] == '%' { - // Handle %v, %m, and %0Nv (width-padded). - if i + 1 < chars.len() { - let next = chars[i + 1]; - if next == 'v' { - out.push_str(&default_version); - i += 2; - continue; - } else if next == 'm' { - out.push_str(sanitized_comment); - i += 2; - continue; - } else if next == '0' { - let mut j = i + 2; - let mut width = String::new(); - while j < chars.len() && chars[j].is_ascii_digit() { - width.push(chars[j]); - j += 1; - } - if j < chars.len() && chars[j] == 'v' { - let w: usize = width.parse().unwrap_or(0); - if w == 0 { - out.push_str(&default_version); - } else { - out.push_str(&format!("{:0width$}", version, width = w)); - } - i = j + 1; - continue; - } - } - } - } - out.push(chars[i]); - i += 1; - } - - let mut name = out; - - // Trim redundant trailing separators when comment is empty. - while name.ends_with('_') || name.ends_with('-') || name.ends_with('.') { - name.pop(); - } - - if name.is_empty() { - default_version - } else { - name - } -} - -#[cfg(test)] -mod tests { - use super::*; - use rstest::rstest; - use serial_test::serial; - use std::fs; - use std::path::PathBuf; - use tempfile::tempdir; - use vespertide_config::VespertideConfig; - use vespertide_core::{ - ColumnDef, ColumnType, MigrationPlan, SimpleColumnType, TableConstraint, TableDef, - schema::foreign_key::ForeignKeySyntax, - }; - - struct CwdGuard { - original: PathBuf, - } - - impl CwdGuard { - fn new(dir: &PathBuf) -> Self { - let original = std::env::current_dir().unwrap(); - std::env::set_current_dir(dir).unwrap(); - Self { original } - } - } - - impl Drop for CwdGuard { - fn drop(&mut self) { - let _ = std::env::set_current_dir(&self.original); - } - } - - fn write_config() { - let cfg = VespertideConfig::default(); - let text = serde_json::to_string_pretty(&cfg).unwrap(); - fs::write("vespertide.json", text).unwrap(); - } - - #[test] - #[serial] - fn load_config_missing_file_errors() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - let err = load_config().unwrap_err(); - assert!(err.to_string().contains("vespertide.json not found")); - } - - #[test] - #[serial] - fn load_models_reads_yaml_and_validates() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - write_config(); - - fs::create_dir_all("models").unwrap(); - let table = TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::PrimaryKey { - auto_increment: false, - columns: vec!["id".into()], - }], - }; - fs::write("models/users.yaml", serde_yaml::to_string(&table).unwrap()).unwrap(); - - let models = load_models(&VespertideConfig::default()).unwrap(); - assert_eq!(models.len(), 1); - assert_eq!(models[0].name, "users"); - } - - #[test] - #[serial] - fn load_models_recursive_processes_subdirectories() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - write_config(); - - fs::create_dir_all("models/subdir").unwrap(); - - // Create model in subdirectory - let table = TableDef { - name: "subtable".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::PrimaryKey { - auto_increment: false, - columns: vec!["id".into()], - }], - }; - let content = serde_json::to_string_pretty(&table).unwrap(); - fs::write("models/subdir/subtable.json", content).unwrap(); - - let models = load_models(&VespertideConfig::default()).unwrap(); - assert_eq!(models.len(), 1); - assert_eq!(models[0].name, "subtable"); - } - - #[test] - #[serial] - fn load_migrations_reads_yaml_and_sorts() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - write_config(); - - fs::create_dir_all("migrations").unwrap(); - let plan1 = MigrationPlan { - comment: Some("first".into()), - created_at: None, - version: 2, - actions: vec![], - }; - let plan0 = MigrationPlan { - comment: Some("zero".into()), - created_at: None, - version: 1, - actions: vec![], - }; - fs::write( - "migrations/0002_first.yaml", - serde_yaml::to_string(&plan1).unwrap(), - ) - .unwrap(); - fs::write( - "migrations/0001_zero.yaml", - serde_yaml::to_string(&plan0).unwrap(), - ) - .unwrap(); - - let plans = load_migrations(&VespertideConfig::default()).unwrap(); - assert_eq!(plans.len(), 2); - assert_eq!(plans[0].version, 1); - assert_eq!(plans[1].version, 2); - } - - #[rstest] - #[case( - 5, - Some("Hello! World"), - FileFormat::Yml, - "%04v_%m", - "0005_hello__world.yml" - )] - #[case(3, None, FileFormat::Json, "%0v__", "0003.json")] // width 0 falls back to default version and trailing separators are trimmed - #[case(12, None, FileFormat::Json, "%v", "0012.json")] - #[case(7, None, FileFormat::Json, "%m", "0007.json")] // uses default when comment only and empty - fn migration_filename_with_format_and_pattern_tests( - #[case] version: u32, - #[case] comment: Option<&str>, - #[case] format: FileFormat, - #[case] pattern: &str, - #[case] expected: &str, - ) { - let name = migration_filename_with_format_and_pattern(version, comment, format, pattern); - assert_eq!(name, expected); - } - - #[test] - #[serial] - fn load_models_fails_on_invalid_fk_format() { - let tmp = tempdir().unwrap(); - let _guard = CwdGuard::new(&tmp.path().to_path_buf()); - write_config(); - - fs::create_dir_all("models").unwrap(); - - // Create a model with invalid FK string format (missing dot separator) - let table = TableDef { - name: "orders".into(), - columns: vec![ColumnDef { - name: "user_id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - // Invalid FK format: should be "table.column" but missing the dot - foreign_key: Some(ForeignKeySyntax::String("invalid_format".into())), - }], - constraints: vec![], - }; - fs::write( - "models/orders.json", - serde_json::to_string_pretty(&table).unwrap(), - ) - .unwrap(); - - let result = load_models(&VespertideConfig::default()); - assert!(result.is_err()); - let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("Failed to normalize table 'orders'")); - } -} +use vespertide_config::FileFormat; + +// Re-export loader functions for convenience +pub use vespertide_loader::{load_config, load_migrations, load_models}; + +/// Generate a migration filename from version and optional comment with format and pattern. +pub fn migration_filename_with_format_and_pattern( + version: u32, + comment: Option<&str>, + format: FileFormat, + pattern: &str, +) -> String { + let sanitized = sanitize_comment(comment); + let name = render_migration_name(pattern, version, &sanitized); + + let ext = match format { + FileFormat::Json => "json", + FileFormat::Yaml => "yaml", + FileFormat::Yml => "yml", + }; + + format!("{name}.{ext}") +} + +fn sanitize_comment(comment: Option<&str>) -> String { + comment + .map(|c| { + c.to_lowercase() + .chars() + .map(|ch| { + if ch.is_alphanumeric() || ch == ' ' { + ch + } else { + '_' + } + }) + .collect::() + .split_whitespace() + .collect::>() + .join("_") + }) + .unwrap_or_default() +} + +fn render_migration_name(pattern: &str, version: u32, sanitized_comment: &str) -> String { + let default_version = format!("{:04}", version); + let chars: Vec = pattern.chars().collect(); + let mut i = 0; + let mut out = String::new(); + + while i < chars.len() { + if chars[i] == '%' { + // Handle %v, %m, and %0Nv (width-padded). + if i + 1 < chars.len() { + let next = chars[i + 1]; + if next == 'v' { + out.push_str(&default_version); + i += 2; + continue; + } else if next == 'm' { + out.push_str(sanitized_comment); + i += 2; + continue; + } else if next == '0' { + let mut j = i + 2; + let mut width = String::new(); + while j < chars.len() && chars[j].is_ascii_digit() { + width.push(chars[j]); + j += 1; + } + if j < chars.len() && chars[j] == 'v' { + let w: usize = width.parse().unwrap_or(0); + if w == 0 { + out.push_str(&default_version); + } else { + out.push_str(&format!("{:0width$}", version, width = w)); + } + i = j + 1; + continue; + } + } + } + } + out.push(chars[i]); + i += 1; + } + + let mut name = out; + + // Trim redundant trailing separators when comment is empty. + while name.ends_with('_') || name.ends_with('-') || name.ends_with('.') { + name.pop(); + } + + if name.is_empty() { + default_version + } else { + name + } +} + +#[cfg(test)] +mod tests { + use super::*; + use rstest::rstest; + use serial_test::serial; + use std::fs; + use std::path::PathBuf; + use tempfile::tempdir; + use vespertide_config::VespertideConfig; + use vespertide_core::{ + ColumnDef, ColumnType, MigrationPlan, SimpleColumnType, TableConstraint, TableDef, + schema::foreign_key::ForeignKeySyntax, + }; + + struct CwdGuard { + original: PathBuf, + } + + impl CwdGuard { + fn new(dir: &PathBuf) -> Self { + let original = std::env::current_dir().unwrap(); + std::env::set_current_dir(dir).unwrap(); + Self { original } + } + } + + impl Drop for CwdGuard { + fn drop(&mut self) { + let _ = std::env::set_current_dir(&self.original); + } + } + + fn write_config() { + let cfg = VespertideConfig::default(); + let text = serde_json::to_string_pretty(&cfg).unwrap(); + fs::write("vespertide.json", text).unwrap(); + } + + #[test] + #[serial] + fn load_config_missing_file_errors() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + let err = load_config().unwrap_err(); + assert!(err.to_string().contains("vespertide.json not found")); + } + + #[test] + #[serial] + fn load_models_reads_yaml_and_validates() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + write_config(); + + fs::create_dir_all("models").unwrap(); + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::PrimaryKey { + auto_increment: false, + columns: vec!["id".into()], + }], + }; + fs::write("models/users.yaml", serde_yaml::to_string(&table).unwrap()).unwrap(); + + let models = load_models(&VespertideConfig::default()).unwrap(); + assert_eq!(models.len(), 1); + assert_eq!(models[0].name, "users"); + } + + #[test] + #[serial] + fn load_models_recursive_processes_subdirectories() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + write_config(); + + fs::create_dir_all("models/subdir").unwrap(); + + // Create model in subdirectory + let table = TableDef { + name: "subtable".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::PrimaryKey { + auto_increment: false, + columns: vec!["id".into()], + }], + }; + let content = serde_json::to_string_pretty(&table).unwrap(); + fs::write("models/subdir/subtable.json", content).unwrap(); + + let models = load_models(&VespertideConfig::default()).unwrap(); + assert_eq!(models.len(), 1); + assert_eq!(models[0].name, "subtable"); + } + + #[test] + #[serial] + fn load_migrations_reads_yaml_and_sorts() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + write_config(); + + fs::create_dir_all("migrations").unwrap(); + let plan1 = MigrationPlan { + comment: Some("first".into()), + created_at: None, + version: 2, + actions: vec![], + }; + let plan0 = MigrationPlan { + comment: Some("zero".into()), + created_at: None, + version: 1, + actions: vec![], + }; + fs::write( + "migrations/0002_first.yaml", + serde_yaml::to_string(&plan1).unwrap(), + ) + .unwrap(); + fs::write( + "migrations/0001_zero.yaml", + serde_yaml::to_string(&plan0).unwrap(), + ) + .unwrap(); + + let plans = load_migrations(&VespertideConfig::default()).unwrap(); + assert_eq!(plans.len(), 2); + assert_eq!(plans[0].version, 1); + assert_eq!(plans[1].version, 2); + } + + #[rstest] + #[case( + 5, + Some("Hello! World"), + FileFormat::Yml, + "%04v_%m", + "0005_hello__world.yml" + )] + #[case(3, None, FileFormat::Json, "%0v__", "0003.json")] // width 0 falls back to default version and trailing separators are trimmed + #[case(12, None, FileFormat::Json, "%v", "0012.json")] + #[case(7, None, FileFormat::Json, "%m", "0007.json")] // uses default when comment only and empty + fn migration_filename_with_format_and_pattern_tests( + #[case] version: u32, + #[case] comment: Option<&str>, + #[case] format: FileFormat, + #[case] pattern: &str, + #[case] expected: &str, + ) { + let name = migration_filename_with_format_and_pattern(version, comment, format, pattern); + assert_eq!(name, expected); + } + + #[test] + #[serial] + fn load_models_fails_on_invalid_fk_format() { + let tmp = tempdir().unwrap(); + let _guard = CwdGuard::new(&tmp.path().to_path_buf()); + write_config(); + + fs::create_dir_all("models").unwrap(); + + // Create a model with invalid FK string format (missing dot separator) + let table = TableDef { + name: "orders".into(), + description: None, + columns: vec![ColumnDef { + name: "user_id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + // Invalid FK format: should be "table.column" but missing the dot + foreign_key: Some(ForeignKeySyntax::String("invalid_format".into())), + }], + constraints: vec![], + }; + fs::write( + "models/orders.json", + serde_json::to_string_pretty(&table).unwrap(), + ) + .unwrap(); + + let result = load_models(&VespertideConfig::default()); + assert!(result.is_err()); + let err_msg = result.unwrap_err().to_string(); + assert!(err_msg.contains("Failed to normalize table 'orders'")); + } +} diff --git a/crates/vespertide-core/src/schema/table.rs b/crates/vespertide-core/src/schema/table.rs index 3e95b53..6e3987d 100644 --- a/crates/vespertide-core/src/schema/table.rs +++ b/crates/vespertide-core/src/schema/table.rs @@ -1,1543 +1,1582 @@ -use schemars::JsonSchema; - -use serde::{Deserialize, Serialize}; -use std::collections::{HashMap, HashSet}; - -use crate::schema::{ - StrOrBoolOrArray, column::ColumnDef, constraint::TableConstraint, - foreign_key::ForeignKeySyntax, names::TableName, primary_key::PrimaryKeySyntax, -}; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum TableValidationError { - DuplicateIndexColumn { - index_name: String, - column_name: String, - }, - InvalidForeignKeyFormat { - column_name: String, - value: String, - }, -} - -impl std::fmt::Display for TableValidationError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - TableValidationError::DuplicateIndexColumn { - index_name, - column_name, - } => { - write!( - f, - "Duplicate index '{}' on column '{}': the same index name cannot be applied to the same column multiple times", - index_name, column_name - ) - } - TableValidationError::InvalidForeignKeyFormat { column_name, value } => { - write!( - f, - "Invalid foreign key format '{}' on column '{}': expected 'table.column' format", - value, column_name - ) - } - } - } -} - -impl std::error::Error for TableValidationError {} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] -#[serde(rename_all = "snake_case")] -pub struct TableDef { - pub name: TableName, - pub columns: Vec, - pub constraints: Vec, -} - -impl TableDef { - /// Normalizes inline column constraints (primary_key, unique, index, foreign_key) - /// into table-level constraints. - /// Returns a new TableDef with all inline constraints converted to table-level. - /// - /// # Errors - /// - /// Returns an error if the same index name is applied to the same column multiple times. - pub fn normalize(&self) -> Result { - let mut constraints = self.constraints.clone(); - - // Collect columns with inline primary_key and check for auto_increment - let mut pk_columns: Vec = Vec::new(); - let mut pk_auto_increment = false; - - for col in &self.columns { - if let Some(ref pk) = col.primary_key { - match pk { - PrimaryKeySyntax::Bool(true) => { - pk_columns.push(col.name.clone()); - } - PrimaryKeySyntax::Bool(false) => {} - PrimaryKeySyntax::Object(pk_def) => { - pk_columns.push(col.name.clone()); - if pk_def.auto_increment { - pk_auto_increment = true; - } - } - } - } - } - - // Add primary key constraint if any columns have inline pk and no existing pk constraint. - if !pk_columns.is_empty() { - let has_pk_constraint = constraints - .iter() - .any(|c| matches!(c, TableConstraint::PrimaryKey { .. })); - - if !has_pk_constraint { - constraints.push(TableConstraint::PrimaryKey { - auto_increment: pk_auto_increment, - columns: pk_columns, - }); - } - } - - // Group columns by unique constraint name to create composite unique constraints - // Use same pattern as index grouping - let mut unique_groups: HashMap> = HashMap::new(); - let mut unique_order: Vec = Vec::new(); // Preserve order of first occurrence - - for col in &self.columns { - if let Some(ref unique_val) = col.unique { - match unique_val { - StrOrBoolOrArray::Str(name) => { - // Named unique constraint - group by name for composite constraints - let unique_name = name.clone(); - - if !unique_groups.contains_key(&unique_name) { - unique_order.push(unique_name.clone()); - } - - unique_groups - .entry(unique_name) - .or_default() - .push(col.name.clone()); - } - StrOrBoolOrArray::Bool(true) => { - // Use special marker for auto-generated unique constraints (without custom name) - let group_key = format!("__auto_{}", col.name); - - if !unique_groups.contains_key(&group_key) { - unique_order.push(group_key.clone()); - } - - unique_groups - .entry(group_key) - .or_default() - .push(col.name.clone()); - } - StrOrBoolOrArray::Bool(false) => continue, - StrOrBoolOrArray::Array(names) => { - // Array format: each element is a constraint name - // This column will be part of all these named constraints - for unique_name in names { - if !unique_groups.contains_key(unique_name.as_str()) { - unique_order.push(unique_name.clone()); - } - - unique_groups - .entry(unique_name.clone()) - .or_default() - .push(col.name.clone()); - } - } - } - } - } - - // Create unique constraints from grouped columns in order - for unique_name in unique_order { - let columns = unique_groups.get(&unique_name).unwrap().clone(); - - // Determine if this is an auto-generated unique (from unique: true) - // or a named unique (from unique: "name") - let constraint_name = if unique_name.starts_with("__auto_") { - // Auto-generated unique - use None so SQL generation can create the name - None - } else { - // Named unique - preserve the custom name - Some(unique_name.clone()) - }; - - // Check if this unique constraint already exists - let exists = constraints.iter().any(|c| { - if let TableConstraint::Unique { - name, - columns: cols, - } = c - { - // Match by name if both have names, otherwise match by columns - match (&constraint_name, name) { - (Some(n1), Some(n2)) => n1 == n2, - (None, None) => cols == &columns, - _ => false, - } - } else { - false - } - }); - - if !exists { - constraints.push(TableConstraint::Unique { - name: constraint_name, - columns, - }); - } - } - - // Process inline foreign_key and index for each column - for col in &self.columns { - // Handle inline foreign_key - if let Some(ref fk_syntax) = col.foreign_key { - // Convert ForeignKeySyntax to ForeignKeyDef - let (ref_table, ref_columns, on_delete, on_update) = match fk_syntax { - ForeignKeySyntax::String(s) => { - // Parse "table.column" format - let parts: Vec<&str> = s.split('.').collect(); - if parts.len() != 2 || parts[0].is_empty() || parts[1].is_empty() { - return Err(TableValidationError::InvalidForeignKeyFormat { - column_name: col.name.clone(), - value: s.clone(), - }); - } - (parts[0].to_string(), vec![parts[1].to_string()], None, None) - } - ForeignKeySyntax::Object(fk_def) => ( - fk_def.ref_table.clone(), - fk_def.ref_columns.clone(), - fk_def.on_delete.clone(), - fk_def.on_update.clone(), - ), - }; - - // Check if this foreign key already exists - let exists = constraints.iter().any(|c| { - if let TableConstraint::ForeignKey { columns, .. } = c { - columns.len() == 1 && columns[0] == col.name - } else { - false - } - }); - - if !exists { - constraints.push(TableConstraint::ForeignKey { - name: None, - columns: vec![col.name.clone()], - ref_table, - ref_columns, - on_delete, - on_update, - }); - } - } - } - - // Group columns by index name to create composite indexes - // Use a HashMap to group, but preserve column order by tracking first occurrence - let mut index_groups: HashMap> = HashMap::new(); - let mut index_order: Vec = Vec::new(); // Preserve order of first occurrence - // Track which columns are already in each index from inline definitions to detect duplicates - // Only track inline definitions, not existing table-level indexes (they can be extended) - let mut inline_index_column_tracker: HashMap> = HashMap::new(); - - for col in &self.columns { - if let Some(ref index_val) = col.index { - match index_val { - StrOrBoolOrArray::Str(name) => { - // Named index - group by name - let index_name = name.clone(); - - // Check for duplicate - only check inline definitions, not existing table-level indexes - if let Some(columns) = inline_index_column_tracker.get(name.as_str()) - && columns.contains(col.name.as_str()) - { - return Err(TableValidationError::DuplicateIndexColumn { - index_name: name.clone(), - column_name: col.name.clone(), - }); - } - - if !index_groups.contains_key(&index_name) { - index_order.push(index_name.clone()); - } - - index_groups - .entry(index_name.clone()) - .or_default() - .push(col.name.clone()); - - inline_index_column_tracker - .entry(index_name) - .or_default() - .insert(col.name.clone()); - } - StrOrBoolOrArray::Bool(true) => { - // Use special marker for auto-generated indexes (without custom name) - // We use the column name as a unique key to group, but will use None for the constraint name - // This allows SQL generation to auto-generate the name based on naming conventions - let group_key = format!("__auto_{}", col.name); - - // Check for duplicate - only check inline definitions - if let Some(columns) = inline_index_column_tracker.get(group_key.as_str()) - && columns.contains(col.name.as_str()) - { - return Err(TableValidationError::DuplicateIndexColumn { - index_name: group_key.clone(), - column_name: col.name.clone(), - }); - } - - if !index_groups.contains_key(&group_key) { - index_order.push(group_key.clone()); - } - - index_groups - .entry(group_key.clone()) - .or_default() - .push(col.name.clone()); - - inline_index_column_tracker - .entry(group_key) - .or_default() - .insert(col.name.clone()); - } - StrOrBoolOrArray::Bool(false) => continue, - StrOrBoolOrArray::Array(names) => { - // Array format: each element is an index name - // This column will be part of all these named indexes - // Check for duplicates within the array - let mut seen_in_array = HashSet::new(); - for index_name in names { - // Check for duplicate within the same array - if seen_in_array.contains(index_name.as_str()) { - return Err(TableValidationError::DuplicateIndexColumn { - index_name: index_name.clone(), - column_name: col.name.clone(), - }); - } - seen_in_array.insert(index_name.clone()); - - // Check for duplicate across different inline definitions - // Only check inline definitions, not existing table-level indexes - if let Some(columns) = - inline_index_column_tracker.get(index_name.as_str()) - && columns.contains(col.name.as_str()) - { - return Err(TableValidationError::DuplicateIndexColumn { - index_name: index_name.clone(), - column_name: col.name.clone(), - }); - } - - if !index_groups.contains_key(index_name.as_str()) { - index_order.push(index_name.clone()); - } - - index_groups - .entry(index_name.clone()) - .or_default() - .push(col.name.clone()); - - inline_index_column_tracker - .entry(index_name.clone()) - .or_default() - .insert(col.name.clone()); - } - } - } - } - } - - // Create indexes from grouped columns in order - for index_name in index_order { - let columns = index_groups.get(&index_name).unwrap().clone(); - - // Determine if this is an auto-generated index (from index: true) - // or a named index (from index: "name") - let constraint_name = if index_name.starts_with("__auto_") { - // Auto-generated index - use None so SQL generation can create the name - None - } else { - // Named index - preserve the custom name - Some(index_name.clone()) - }; - - // Check if this index already exists - let exists = constraints.iter().any(|c| { - if let TableConstraint::Index { - name, - columns: cols, - } = c - { - // Match by name if both have names, otherwise match by columns - match (&constraint_name, name) { - (Some(n1), Some(n2)) => n1 == n2, - (None, None) => cols == &columns, - _ => false, - } - } else { - false - } - }); - - if !exists { - constraints.push(TableConstraint::Index { - name: constraint_name, - columns, - }); - } - } - - Ok(TableDef { - name: self.name.clone(), - columns: self.columns.clone(), - constraints, - }) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::schema::column::{ColumnType, SimpleColumnType}; - use crate::schema::foreign_key::{ForeignKeyDef, ForeignKeySyntax}; - use crate::schema::primary_key::PrimaryKeySyntax; - use crate::schema::reference::ReferenceAction; - use crate::schema::str_or_bool::StrOrBoolOrArray; - - fn col(name: &str, ty: ColumnType) -> ColumnDef { - ColumnDef { - name: name.to_string(), - r#type: ty, - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - } - } - - #[test] - fn normalize_inline_primary_key() { - let mut id_col = col("id", ColumnType::Simple(SimpleColumnType::Integer)); - id_col.primary_key = Some(PrimaryKeySyntax::Bool(true)); - - let table = TableDef { - name: "users".into(), - columns: vec![ - id_col, - col("name", ColumnType::Simple(SimpleColumnType::Text)), - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - assert_eq!(normalized.constraints.len(), 1); - assert!(matches!( - &normalized.constraints[0], - TableConstraint::PrimaryKey { columns, .. } if columns == &["id".to_string()] - )); - } - - #[test] - fn normalize_multiple_inline_primary_keys() { - let mut id_col = col("id", ColumnType::Simple(SimpleColumnType::Integer)); - id_col.primary_key = Some(PrimaryKeySyntax::Bool(true)); - - let mut tenant_col = col("tenant_id", ColumnType::Simple(SimpleColumnType::Integer)); - tenant_col.primary_key = Some(PrimaryKeySyntax::Bool(true)); - - let table = TableDef { - name: "users".into(), - columns: vec![id_col, tenant_col], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - assert_eq!(normalized.constraints.len(), 1); - assert!(matches!( - &normalized.constraints[0], - TableConstraint::PrimaryKey { columns, .. } if columns == &["id".to_string(), "tenant_id".to_string()] - )); - } - - #[test] - fn normalize_does_not_duplicate_existing_pk() { - let mut id_col = col("id", ColumnType::Simple(SimpleColumnType::Integer)); - id_col.primary_key = Some(PrimaryKeySyntax::Bool(true)); - - let table = TableDef { - name: "users".into(), - columns: vec![id_col], - constraints: vec![TableConstraint::PrimaryKey { - auto_increment: false, - columns: vec!["id".into()], - }], - }; - - let normalized = table.normalize().unwrap(); - assert_eq!(normalized.constraints.len(), 1); - } - - #[test] - fn normalize_ignores_primary_key_false() { - let mut id_col = col("id", ColumnType::Simple(SimpleColumnType::Integer)); - id_col.primary_key = Some(PrimaryKeySyntax::Bool(false)); - - let table = TableDef { - name: "users".into(), - columns: vec![ - id_col, - col("name", ColumnType::Simple(SimpleColumnType::Text)), - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - // primary_key: false should be ignored, so no primary key constraint should be added - assert_eq!(normalized.constraints.len(), 0); - } - - #[test] - fn normalize_inline_unique_bool() { - let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); - email_col.unique = Some(StrOrBoolOrArray::Bool(true)); - - let table = TableDef { - name: "users".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - email_col, - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - assert_eq!(normalized.constraints.len(), 1); - assert!(matches!( - &normalized.constraints[0], - TableConstraint::Unique { name: None, columns } if columns == &["email".to_string()] - )); - } - - #[test] - fn normalize_inline_unique_with_name() { - let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); - email_col.unique = Some(StrOrBoolOrArray::Str("uq_users_email".into())); - - let table = TableDef { - name: "users".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - email_col, - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - assert_eq!(normalized.constraints.len(), 1); - assert!(matches!( - &normalized.constraints[0], - TableConstraint::Unique { name: Some(n), columns } - if n == "uq_users_email" && columns == &["email".to_string()] - )); - } - - #[test] - fn normalize_composite_unique_from_string_name() { - // Test that multiple columns with the same unique constraint name - // are grouped into a single composite unique constraint - let mut route_col = col("join_route", ColumnType::Simple(SimpleColumnType::Text)); - route_col.unique = Some(StrOrBoolOrArray::Str("route_provider_id".into())); - - let mut provider_col = col("provider_id", ColumnType::Simple(SimpleColumnType::Text)); - provider_col.unique = Some(StrOrBoolOrArray::Str("route_provider_id".into())); - - let table = TableDef { - name: "user".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - route_col, - provider_col, - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - assert_eq!(normalized.constraints.len(), 1); - assert!(matches!( - &normalized.constraints[0], - TableConstraint::Unique { name: Some(n), columns } - if n == "route_provider_id" - && columns == &["join_route".to_string(), "provider_id".to_string()] - )); - } - - #[test] - fn normalize_unique_name_mismatch_creates_both_constraints() { - // Test coverage for line 181: When an inline unique has a name but existing doesn't (or vice versa), - // they should not match and both constraints should be created - let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); - email_col.unique = Some(StrOrBoolOrArray::Str("named_unique".into())); - - let table = TableDef { - name: "user".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - email_col, - ], - constraints: vec![ - // Existing unnamed unique constraint on same column - TableConstraint::Unique { - name: None, - columns: vec!["email".into()], - }, - ], - }; - - let normalized = table.normalize().unwrap(); - - // Should have 2 unique constraints: one named, one unnamed - let unique_constraints: Vec<_> = normalized - .constraints - .iter() - .filter(|c| matches!(c, TableConstraint::Unique { .. })) - .collect(); - assert_eq!( - unique_constraints.len(), - 2, - "Should keep both named and unnamed unique constraints as they don't match" - ); - - // Verify we have one named and one unnamed - let has_named = unique_constraints.iter().any( - |c| matches!(c, TableConstraint::Unique { name: Some(n), .. } if n == "named_unique"), - ); - let has_unnamed = unique_constraints - .iter() - .any(|c| matches!(c, TableConstraint::Unique { name: None, .. })); - - assert!(has_named, "Should have named unique constraint"); - assert!(has_unnamed, "Should have unnamed unique constraint"); - } - - #[test] - fn normalize_inline_index_bool() { - let mut name_col = col("name", ColumnType::Simple(SimpleColumnType::Text)); - name_col.index = Some(StrOrBoolOrArray::Bool(true)); - - let table = TableDef { - name: "users".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - name_col, - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - // Count Index constraints - let indexes: Vec<_> = normalized - .constraints - .iter() - .filter(|c| matches!(c, TableConstraint::Index { .. })) - .collect(); - assert_eq!(indexes.len(), 1); - // Auto-generated indexes (from index: true) should have name: None - // SQL generation will create the actual name based on naming conventions - assert!(matches!( - indexes[0], - TableConstraint::Index { name: None, columns } - if columns == &["name".to_string()] - )); - } - - #[test] - fn normalize_inline_index_with_name() { - let mut name_col = col("name", ColumnType::Simple(SimpleColumnType::Text)); - name_col.index = Some(StrOrBoolOrArray::Str("custom_idx_name".into())); - - let table = TableDef { - name: "users".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - name_col, - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - let indexes: Vec<_> = normalized - .constraints - .iter() - .filter(|c| matches!(c, TableConstraint::Index { .. })) - .collect(); - assert_eq!(indexes.len(), 1); - assert!(matches!( - indexes[0], - TableConstraint::Index { name: Some(n), .. } - if n == "custom_idx_name" - )); - } - - #[test] - fn normalize_inline_foreign_key() { - let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); - user_id_col.foreign_key = Some(ForeignKeySyntax::Object(ForeignKeyDef { - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: Some(ReferenceAction::Cascade), - on_update: None, - })); - - let table = TableDef { - name: "posts".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - user_id_col, - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - assert_eq!(normalized.constraints.len(), 1); - assert!(matches!( - &normalized.constraints[0], - TableConstraint::ForeignKey { - name: None, - columns, - ref_table, - ref_columns, - on_delete: Some(ReferenceAction::Cascade), - on_update: None, - } if columns == &["user_id".to_string()] - && ref_table == "users" - && ref_columns == &["id".to_string()] - )); - } - - #[test] - fn normalize_all_inline_constraints() { - let mut id_col = col("id", ColumnType::Simple(SimpleColumnType::Integer)); - id_col.primary_key = Some(PrimaryKeySyntax::Bool(true)); - - let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); - email_col.unique = Some(StrOrBoolOrArray::Bool(true)); - - let mut name_col = col("name", ColumnType::Simple(SimpleColumnType::Text)); - name_col.index = Some(StrOrBoolOrArray::Bool(true)); - - let mut user_id_col = col("org_id", ColumnType::Simple(SimpleColumnType::Integer)); - user_id_col.foreign_key = Some(ForeignKeySyntax::Object(ForeignKeyDef { - ref_table: "orgs".into(), - ref_columns: vec!["id".into()], - on_delete: None, - on_update: None, - })); - - let table = TableDef { - name: "users".into(), - columns: vec![id_col, email_col, name_col, user_id_col], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - // Should have: PrimaryKey, Unique, ForeignKey, Index - // Count non-Index constraints - let non_index_constraints: Vec<_> = normalized - .constraints - .iter() - .filter(|c| !matches!(c, TableConstraint::Index { .. })) - .collect(); - assert_eq!(non_index_constraints.len(), 3); - // Should have: 1 index - let indexes: Vec<_> = normalized - .constraints - .iter() - .filter(|c| matches!(c, TableConstraint::Index { .. })) - .collect(); - assert_eq!(indexes.len(), 1); - } - - #[test] - fn normalize_composite_index_from_string_name() { - let mut updated_at_col = col( - "updated_at", - ColumnType::Simple(SimpleColumnType::Timestamp), - ); - updated_at_col.index = Some(StrOrBoolOrArray::Str("tuple".into())); - - let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); - user_id_col.index = Some(StrOrBoolOrArray::Str("tuple".into())); - - let table = TableDef { - name: "post".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - updated_at_col, - user_id_col, - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - let indexes: Vec<_> = normalized - .constraints - .iter() - .filter_map(|c| { - if let TableConstraint::Index { name, columns } = c { - Some((name.clone(), columns.clone())) - } else { - None - } - }) - .collect(); - assert_eq!(indexes.len(), 1); - assert_eq!(indexes[0].0, Some("tuple".to_string())); - assert_eq!( - indexes[0].1, - vec!["updated_at".to_string(), "user_id".to_string()] - ); - } - - #[test] - fn normalize_multiple_different_indexes() { - let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); - col1.index = Some(StrOrBoolOrArray::Str("idx_a".into())); - - let mut col2 = col("col2", ColumnType::Simple(SimpleColumnType::Text)); - col2.index = Some(StrOrBoolOrArray::Str("idx_a".into())); - - let mut col3 = col("col3", ColumnType::Simple(SimpleColumnType::Text)); - col3.index = Some(StrOrBoolOrArray::Str("idx_b".into())); - - let mut col4 = col("col4", ColumnType::Simple(SimpleColumnType::Text)); - col4.index = Some(StrOrBoolOrArray::Bool(true)); - - let table = TableDef { - name: "test".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - col1, - col2, - col3, - col4, - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - let indexes: Vec<_> = normalized - .constraints - .iter() - .filter_map(|c| { - if let TableConstraint::Index { name, columns } = c { - Some((name.clone(), columns.clone())) - } else { - None - } - }) - .collect(); - assert_eq!(indexes.len(), 3); - - // Check idx_a composite index - let idx_a = indexes - .iter() - .find(|(n, _)| n == &Some("idx_a".to_string())) - .unwrap(); - assert_eq!(idx_a.1, vec!["col1".to_string(), "col2".to_string()]); - - // Check idx_b single column index - let idx_b = indexes - .iter() - .find(|(n, _)| n == &Some("idx_b".to_string())) - .unwrap(); - assert_eq!(idx_b.1, vec!["col3".to_string()]); - - // Check auto-generated index for col4 (should have name: None) - let idx_col4 = indexes.iter().find(|(n, _)| n.is_none()).unwrap(); - assert_eq!(idx_col4.1, vec!["col4".to_string()]); - } - - #[test] - fn normalize_false_values_are_ignored() { - let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); - email_col.unique = Some(StrOrBoolOrArray::Bool(false)); - email_col.index = Some(StrOrBoolOrArray::Bool(false)); - - let table = TableDef { - name: "users".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - email_col, - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - assert_eq!(normalized.constraints.len(), 0); - } - - #[test] - fn normalize_table_without_primary_key() { - // Test normalize with a table that has no primary key columns - // This should cover lines 67-69, 72-73, and 93 (pk_columns.is_empty() branch) - let table = TableDef { - name: "users".into(), - columns: vec![ - col("name", ColumnType::Simple(SimpleColumnType::Text)), - col("email", ColumnType::Simple(SimpleColumnType::Text)), - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - // Should not add any primary key constraint - assert_eq!(normalized.constraints.len(), 0); - } - - #[test] - fn normalize_multiple_indexes_from_same_array() { - // Multiple columns with same array of index names should create multiple composite indexes - let mut updated_at_col = col( - "updated_at", - ColumnType::Simple(SimpleColumnType::Timestamp), - ); - updated_at_col.index = Some(StrOrBoolOrArray::Array(vec![ - "tuple".into(), - "tuple2".into(), - ])); - - let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); - user_id_col.index = Some(StrOrBoolOrArray::Array(vec![ - "tuple".into(), - "tuple2".into(), - ])); - - let table = TableDef { - name: "post".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - updated_at_col, - user_id_col, - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - // Should have: tuple (composite: updated_at, user_id), tuple2 (composite: updated_at, user_id) - let indexes: Vec<_> = normalized - .constraints - .iter() - .filter_map(|c| { - if let TableConstraint::Index { name, columns } = c { - Some((name.clone(), columns.clone())) - } else { - None - } - }) - .collect(); - assert_eq!(indexes.len(), 2); - - let tuple_idx = indexes - .iter() - .find(|(n, _)| n == &Some("tuple".to_string())) - .unwrap(); - let mut sorted_cols = tuple_idx.1.clone(); - sorted_cols.sort(); - assert_eq!( - sorted_cols, - vec!["updated_at".to_string(), "user_id".to_string()] - ); - - let tuple2_idx = indexes - .iter() - .find(|(n, _)| n == &Some("tuple2".to_string())) - .unwrap(); - let mut sorted_cols2 = tuple2_idx.1.clone(); - sorted_cols2.sort(); - assert_eq!( - sorted_cols2, - vec!["updated_at".to_string(), "user_id".to_string()] - ); - } - - #[test] - fn normalize_inline_unique_with_array_existing_constraint() { - // Test Array format where constraint already exists - should add column to existing - let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); - col1.unique = Some(StrOrBoolOrArray::Array(vec!["uq_group".into()])); - - let mut col2 = col("col2", ColumnType::Simple(SimpleColumnType::Text)); - col2.unique = Some(StrOrBoolOrArray::Array(vec!["uq_group".into()])); - - let table = TableDef { - name: "test".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - col1, - col2, - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - assert_eq!(normalized.constraints.len(), 1); - let unique_constraint = &normalized.constraints[0]; - assert!(matches!( - unique_constraint, - TableConstraint::Unique { name: Some(n), columns: _ } - if n == "uq_group" - )); - if let TableConstraint::Unique { columns, .. } = unique_constraint { - let mut sorted_cols = columns.clone(); - sorted_cols.sort(); - assert_eq!(sorted_cols, vec!["col1".to_string(), "col2".to_string()]); - } - } - - #[test] - fn normalize_inline_unique_with_array_column_already_in_constraint() { - // Test Array format where column is already in constraint - should not duplicate - let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); - col1.unique = Some(StrOrBoolOrArray::Array(vec!["uq_group".into()])); - - let table = TableDef { - name: "test".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - col1.clone(), - ], - constraints: vec![], - }; - - let normalized1 = table.normalize().unwrap(); - assert_eq!(normalized1.constraints.len(), 1); - - // Add same column again - should not create duplicate - let table2 = TableDef { - name: "test".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - col1, - ], - constraints: normalized1.constraints.clone(), - }; - - let normalized2 = table2.normalize().unwrap(); - assert_eq!(normalized2.constraints.len(), 1); - if let TableConstraint::Unique { columns, .. } = &normalized2.constraints[0] { - assert_eq!(columns.len(), 1); - assert_eq!(columns[0], "col1"); - } - } - - #[test] - fn normalize_inline_unique_str_already_exists() { - // Test that existing unique constraint with same name and column is not duplicated - let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); - email_col.unique = Some(StrOrBoolOrArray::Str("uq_email".into())); - - let table = TableDef { - name: "users".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - email_col, - ], - constraints: vec![TableConstraint::Unique { - name: Some("uq_email".into()), - columns: vec!["email".into()], - }], - }; - - let normalized = table.normalize().unwrap(); - // Should not duplicate the constraint - let unique_constraints: Vec<_> = normalized - .constraints - .iter() - .filter(|c| matches!(c, TableConstraint::Unique { .. })) - .collect(); - assert_eq!(unique_constraints.len(), 1); - } - - #[test] - fn normalize_inline_unique_bool_already_exists() { - // Test that existing unnamed unique constraint with same column is not duplicated - let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); - email_col.unique = Some(StrOrBoolOrArray::Bool(true)); - - let table = TableDef { - name: "users".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - email_col, - ], - constraints: vec![TableConstraint::Unique { - name: None, - columns: vec!["email".into()], - }], - }; - - let normalized = table.normalize().unwrap(); - // Should not duplicate the constraint - let unique_constraints: Vec<_> = normalized - .constraints - .iter() - .filter(|c| matches!(c, TableConstraint::Unique { .. })) - .collect(); - assert_eq!(unique_constraints.len(), 1); - } - - #[test] - fn normalize_inline_foreign_key_already_exists() { - // Test that existing foreign key constraint is not duplicated - let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); - user_id_col.foreign_key = Some(ForeignKeySyntax::Object(ForeignKeyDef { - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: None, - on_update: None, - })); - - let table = TableDef { - name: "posts".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - user_id_col, - ], - constraints: vec![TableConstraint::ForeignKey { - name: None, - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: None, - on_update: None, - }], - }; - - let normalized = table.normalize().unwrap(); - // Should not duplicate the foreign key - let fk_constraints: Vec<_> = normalized - .constraints - .iter() - .filter(|c| matches!(c, TableConstraint::ForeignKey { .. })) - .collect(); - assert_eq!(fk_constraints.len(), 1); - } - - #[test] - fn normalize_duplicate_index_same_column_str() { - // Same index name applied to the same column multiple times should error - // This tests inline index duplicate, not table-level index - let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); - col1.index = Some(StrOrBoolOrArray::Str("idx1".into())); - - let table = TableDef { - name: "test".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - col1.clone(), - { - // Same column with same index name again - let mut c = col1.clone(); - c.index = Some(StrOrBoolOrArray::Str("idx1".into())); - c - }, - ], - constraints: vec![], - }; - - let result = table.normalize(); - assert!(result.is_err()); - if let Err(TableValidationError::DuplicateIndexColumn { - index_name, - column_name, - }) = result - { - assert_eq!(index_name, "idx1"); - assert_eq!(column_name, "col1"); - } else { - panic!("Expected DuplicateIndexColumn error"); - } - } - - #[test] - fn normalize_duplicate_index_same_column_array() { - // Same index name in array applied to the same column should error - let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); - col1.index = Some(StrOrBoolOrArray::Array(vec!["idx1".into(), "idx1".into()])); - - let table = TableDef { - name: "test".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - col1, - ], - constraints: vec![], - }; - - let result = table.normalize(); - assert!(result.is_err()); - if let Err(TableValidationError::DuplicateIndexColumn { - index_name, - column_name, - }) = result - { - assert_eq!(index_name, "idx1"); - assert_eq!(column_name, "col1"); - } else { - panic!("Expected DuplicateIndexColumn error"); - } - } - - #[test] - fn normalize_duplicate_index_same_column_multiple_definitions() { - // Same index name applied to the same column in different ways should error - let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); - col1.index = Some(StrOrBoolOrArray::Str("idx1".into())); - - let table = TableDef { - name: "test".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - col1.clone(), - { - let mut c = col1.clone(); - c.index = Some(StrOrBoolOrArray::Array(vec!["idx1".into()])); - c - }, - ], - constraints: vec![], - }; - - let result = table.normalize(); - assert!(result.is_err()); - if let Err(TableValidationError::DuplicateIndexColumn { - index_name, - column_name, - }) = result - { - assert_eq!(index_name, "idx1"); - assert_eq!(column_name, "col1"); - } else { - panic!("Expected DuplicateIndexColumn error"); - } - } - - #[test] - fn test_table_validation_error_display() { - let error = TableValidationError::DuplicateIndexColumn { - index_name: "idx_test".into(), - column_name: "col1".into(), - }; - let error_msg = format!("{}", error); - assert!(error_msg.contains("idx_test")); - assert!(error_msg.contains("col1")); - assert!(error_msg.contains("Duplicate index")); - } - - #[test] - fn normalize_inline_unique_str_with_different_constraint_type() { - // Test that other constraint types don't match in the exists check - let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); - email_col.unique = Some(StrOrBoolOrArray::Str("uq_email".into())); - - let table = TableDef { - name: "users".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - email_col, - ], - constraints: vec![ - // Add a PrimaryKey constraint (different type) - should not match - TableConstraint::PrimaryKey { - auto_increment: false, - columns: vec!["id".into()], - }, - ], - }; - - let normalized = table.normalize().unwrap(); - // Should have: PrimaryKey (existing) + Unique (new) - assert_eq!(normalized.constraints.len(), 2); - } - - #[test] - fn normalize_inline_unique_array_with_different_constraint_type() { - // Test that other constraint types don't match in the exists check for Array case - let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); - col1.unique = Some(StrOrBoolOrArray::Array(vec!["uq_group".into()])); - - let table = TableDef { - name: "test".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - col1, - ], - constraints: vec![ - // Add a PrimaryKey constraint (different type) - should not match - TableConstraint::PrimaryKey { - auto_increment: false, - columns: vec!["id".into()], - }, - ], - }; - - let normalized = table.normalize().unwrap(); - // Should have: PrimaryKey (existing) + Unique (new) - assert_eq!(normalized.constraints.len(), 2); - } - - #[test] - fn normalize_duplicate_index_bool_true_same_column() { - // Test that Bool(true) with duplicate on same column errors - let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); - col1.index = Some(StrOrBoolOrArray::Bool(true)); - - let table = TableDef { - name: "test".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - col1.clone(), - { - // Same column with Bool(true) again - let mut c = col1.clone(); - c.index = Some(StrOrBoolOrArray::Bool(true)); - c - }, - ], - constraints: vec![], - }; - - let result = table.normalize(); - assert!(result.is_err()); - if let Err(TableValidationError::DuplicateIndexColumn { - index_name, - column_name, - }) = result - { - // The group key for auto-generated indexes is "__auto_{column}" - assert!(index_name.contains("__auto_")); - assert!(index_name.contains("col1")); - assert_eq!(column_name, "col1"); - } else { - panic!("Expected DuplicateIndexColumn error"); - } - } - - #[test] - fn normalize_inline_foreign_key_string_syntax() { - // Test ForeignKeySyntax::String with valid "table.column" format - let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); - user_id_col.foreign_key = Some(ForeignKeySyntax::String("users.id".into())); - - let table = TableDef { - name: "posts".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - user_id_col, - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - assert_eq!(normalized.constraints.len(), 1); - assert!(matches!( - &normalized.constraints[0], - TableConstraint::ForeignKey { - name: None, - columns, - ref_table, - ref_columns, - on_delete: None, - on_update: None, - } if columns == &["user_id".to_string()] - && ref_table == "users" - && ref_columns == &["id".to_string()] - )); - } - - #[test] - fn normalize_inline_foreign_key_invalid_format_no_dot() { - // Test ForeignKeySyntax::String with invalid format (no dot) - let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); - user_id_col.foreign_key = Some(ForeignKeySyntax::String("usersid".into())); - - let table = TableDef { - name: "posts".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - user_id_col, - ], - constraints: vec![], - }; - - let result = table.normalize(); - assert!(result.is_err()); - if let Err(TableValidationError::InvalidForeignKeyFormat { column_name, value }) = result { - assert_eq!(column_name, "user_id"); - assert_eq!(value, "usersid"); - } else { - panic!("Expected InvalidForeignKeyFormat error"); - } - } - - #[test] - fn normalize_inline_foreign_key_invalid_format_empty_table() { - // Test ForeignKeySyntax::String with empty table part - let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); - user_id_col.foreign_key = Some(ForeignKeySyntax::String(".id".into())); - - let table = TableDef { - name: "posts".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - user_id_col, - ], - constraints: vec![], - }; - - let result = table.normalize(); - assert!(result.is_err()); - if let Err(TableValidationError::InvalidForeignKeyFormat { column_name, value }) = result { - assert_eq!(column_name, "user_id"); - assert_eq!(value, ".id"); - } else { - panic!("Expected InvalidForeignKeyFormat error"); - } - } - - #[test] - fn normalize_inline_foreign_key_invalid_format_empty_column() { - // Test ForeignKeySyntax::String with empty column part - let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); - user_id_col.foreign_key = Some(ForeignKeySyntax::String("users.".into())); - - let table = TableDef { - name: "posts".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - user_id_col, - ], - constraints: vec![], - }; - - let result = table.normalize(); - assert!(result.is_err()); - if let Err(TableValidationError::InvalidForeignKeyFormat { column_name, value }) = result { - assert_eq!(column_name, "user_id"); - assert_eq!(value, "users."); - } else { - panic!("Expected InvalidForeignKeyFormat error"); - } - } - - #[test] - fn normalize_inline_foreign_key_invalid_format_too_many_parts() { - // Test ForeignKeySyntax::String with too many parts - let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); - user_id_col.foreign_key = Some(ForeignKeySyntax::String("schema.users.id".into())); - - let table = TableDef { - name: "posts".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - user_id_col, - ], - constraints: vec![], - }; - - let result = table.normalize(); - assert!(result.is_err()); - if let Err(TableValidationError::InvalidForeignKeyFormat { column_name, value }) = result { - assert_eq!(column_name, "user_id"); - assert_eq!(value, "schema.users.id"); - } else { - panic!("Expected InvalidForeignKeyFormat error"); - } - } - - #[test] - fn normalize_inline_primary_key_with_auto_increment() { - use crate::schema::primary_key::PrimaryKeyDef; - - let mut id_col = col("id", ColumnType::Simple(SimpleColumnType::Integer)); - id_col.primary_key = Some(PrimaryKeySyntax::Object(PrimaryKeyDef { - auto_increment: true, - })); - - let table = TableDef { - name: "users".into(), - columns: vec![ - id_col, - col("name", ColumnType::Simple(SimpleColumnType::Text)), - ], - constraints: vec![], - }; - - let normalized = table.normalize().unwrap(); - assert_eq!(normalized.constraints.len(), 1); - assert!(matches!( - &normalized.constraints[0], - TableConstraint::PrimaryKey { auto_increment: true, columns } if columns == &["id".to_string()] - )); - } - - #[test] - fn normalize_duplicate_inline_index_on_same_column() { - // This test triggers the DuplicateIndexColumn error (lines 251-253) - // by having the same column appear twice in the same named index group - use crate::schema::str_or_bool::StrOrBoolOrArray; - - // Create a column that references the same index name twice (via Array) - let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); - email_col.index = Some(StrOrBoolOrArray::Array(vec![ - "idx_email".into(), - "idx_email".into(), // Duplicate reference - ])); - - let table = TableDef { - name: "users".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - email_col, - ], - constraints: vec![], - }; - - let result = table.normalize(); - assert!(result.is_err()); - if let Err(TableValidationError::DuplicateIndexColumn { - index_name, - column_name, - }) = result - { - assert_eq!(index_name, "idx_email"); - assert_eq!(column_name, "email"); - } else { - panic!("Expected DuplicateIndexColumn error, got: {:?}", result); - } - } - - #[test] - fn test_invalid_foreign_key_format_error_display() { - let error = TableValidationError::InvalidForeignKeyFormat { - column_name: "user_id".into(), - value: "invalid".into(), - }; - let error_msg = format!("{}", error); - assert!(error_msg.contains("user_id")); - assert!(error_msg.contains("invalid")); - assert!(error_msg.contains("table.column")); - } -} +use schemars::JsonSchema; + +use serde::{Deserialize, Serialize}; +use std::collections::{HashMap, HashSet}; + +use crate::schema::{ + StrOrBoolOrArray, column::ColumnDef, constraint::TableConstraint, + foreign_key::ForeignKeySyntax, names::TableName, primary_key::PrimaryKeySyntax, +}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum TableValidationError { + DuplicateIndexColumn { + index_name: String, + column_name: String, + }, + InvalidForeignKeyFormat { + column_name: String, + value: String, + }, +} + +impl std::fmt::Display for TableValidationError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + TableValidationError::DuplicateIndexColumn { + index_name, + column_name, + } => { + write!( + f, + "Duplicate index '{}' on column '{}': the same index name cannot be applied to the same column multiple times", + index_name, column_name + ) + } + TableValidationError::InvalidForeignKeyFormat { column_name, value } => { + write!( + f, + "Invalid foreign key format '{}' on column '{}': expected 'table.column' format", + value, column_name + ) + } + } + } +} + +impl std::error::Error for TableValidationError {} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub struct TableDef { + pub name: TableName, + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + pub columns: Vec, + pub constraints: Vec, +} + +impl TableDef { + /// Normalizes inline column constraints (primary_key, unique, index, foreign_key) + /// into table-level constraints. + /// Returns a new TableDef with all inline constraints converted to table-level. + /// + /// # Errors + /// + /// Returns an error if the same index name is applied to the same column multiple times. + pub fn normalize(&self) -> Result { + let mut constraints = self.constraints.clone(); + + // Collect columns with inline primary_key and check for auto_increment + let mut pk_columns: Vec = Vec::new(); + let mut pk_auto_increment = false; + + for col in &self.columns { + if let Some(ref pk) = col.primary_key { + match pk { + PrimaryKeySyntax::Bool(true) => { + pk_columns.push(col.name.clone()); + } + PrimaryKeySyntax::Bool(false) => {} + PrimaryKeySyntax::Object(pk_def) => { + pk_columns.push(col.name.clone()); + if pk_def.auto_increment { + pk_auto_increment = true; + } + } + } + } + } + + // Add primary key constraint if any columns have inline pk and no existing pk constraint. + if !pk_columns.is_empty() { + let has_pk_constraint = constraints + .iter() + .any(|c| matches!(c, TableConstraint::PrimaryKey { .. })); + + if !has_pk_constraint { + constraints.push(TableConstraint::PrimaryKey { + auto_increment: pk_auto_increment, + columns: pk_columns, + }); + } + } + + // Group columns by unique constraint name to create composite unique constraints + // Use same pattern as index grouping + let mut unique_groups: HashMap> = HashMap::new(); + let mut unique_order: Vec = Vec::new(); // Preserve order of first occurrence + + for col in &self.columns { + if let Some(ref unique_val) = col.unique { + match unique_val { + StrOrBoolOrArray::Str(name) => { + // Named unique constraint - group by name for composite constraints + let unique_name = name.clone(); + + if !unique_groups.contains_key(&unique_name) { + unique_order.push(unique_name.clone()); + } + + unique_groups + .entry(unique_name) + .or_default() + .push(col.name.clone()); + } + StrOrBoolOrArray::Bool(true) => { + // Use special marker for auto-generated unique constraints (without custom name) + let group_key = format!("__auto_{}", col.name); + + if !unique_groups.contains_key(&group_key) { + unique_order.push(group_key.clone()); + } + + unique_groups + .entry(group_key) + .or_default() + .push(col.name.clone()); + } + StrOrBoolOrArray::Bool(false) => continue, + StrOrBoolOrArray::Array(names) => { + // Array format: each element is a constraint name + // This column will be part of all these named constraints + for unique_name in names { + if !unique_groups.contains_key(unique_name.as_str()) { + unique_order.push(unique_name.clone()); + } + + unique_groups + .entry(unique_name.clone()) + .or_default() + .push(col.name.clone()); + } + } + } + } + } + + // Create unique constraints from grouped columns in order + for unique_name in unique_order { + let columns = unique_groups.get(&unique_name).unwrap().clone(); + + // Determine if this is an auto-generated unique (from unique: true) + // or a named unique (from unique: "name") + let constraint_name = if unique_name.starts_with("__auto_") { + // Auto-generated unique - use None so SQL generation can create the name + None + } else { + // Named unique - preserve the custom name + Some(unique_name.clone()) + }; + + // Check if this unique constraint already exists + let exists = constraints.iter().any(|c| { + if let TableConstraint::Unique { + name, + columns: cols, + } = c + { + // Match by name if both have names, otherwise match by columns + match (&constraint_name, name) { + (Some(n1), Some(n2)) => n1 == n2, + (None, None) => cols == &columns, + _ => false, + } + } else { + false + } + }); + + if !exists { + constraints.push(TableConstraint::Unique { + name: constraint_name, + columns, + }); + } + } + + // Process inline foreign_key and index for each column + for col in &self.columns { + // Handle inline foreign_key + if let Some(ref fk_syntax) = col.foreign_key { + // Convert ForeignKeySyntax to ForeignKeyDef + let (ref_table, ref_columns, on_delete, on_update) = match fk_syntax { + ForeignKeySyntax::String(s) => { + // Parse "table.column" format + let parts: Vec<&str> = s.split('.').collect(); + if parts.len() != 2 || parts[0].is_empty() || parts[1].is_empty() { + return Err(TableValidationError::InvalidForeignKeyFormat { + column_name: col.name.clone(), + value: s.clone(), + }); + } + (parts[0].to_string(), vec![parts[1].to_string()], None, None) + } + ForeignKeySyntax::Object(fk_def) => ( + fk_def.ref_table.clone(), + fk_def.ref_columns.clone(), + fk_def.on_delete.clone(), + fk_def.on_update.clone(), + ), + }; + + // Check if this foreign key already exists + let exists = constraints.iter().any(|c| { + if let TableConstraint::ForeignKey { columns, .. } = c { + columns.len() == 1 && columns[0] == col.name + } else { + false + } + }); + + if !exists { + constraints.push(TableConstraint::ForeignKey { + name: None, + columns: vec![col.name.clone()], + ref_table, + ref_columns, + on_delete, + on_update, + }); + } + } + } + + // Group columns by index name to create composite indexes + // Use a HashMap to group, but preserve column order by tracking first occurrence + let mut index_groups: HashMap> = HashMap::new(); + let mut index_order: Vec = Vec::new(); // Preserve order of first occurrence + // Track which columns are already in each index from inline definitions to detect duplicates + // Only track inline definitions, not existing table-level indexes (they can be extended) + let mut inline_index_column_tracker: HashMap> = HashMap::new(); + + for col in &self.columns { + if let Some(ref index_val) = col.index { + match index_val { + StrOrBoolOrArray::Str(name) => { + // Named index - group by name + let index_name = name.clone(); + + // Check for duplicate - only check inline definitions, not existing table-level indexes + if let Some(columns) = inline_index_column_tracker.get(name.as_str()) + && columns.contains(col.name.as_str()) + { + return Err(TableValidationError::DuplicateIndexColumn { + index_name: name.clone(), + column_name: col.name.clone(), + }); + } + + if !index_groups.contains_key(&index_name) { + index_order.push(index_name.clone()); + } + + index_groups + .entry(index_name.clone()) + .or_default() + .push(col.name.clone()); + + inline_index_column_tracker + .entry(index_name) + .or_default() + .insert(col.name.clone()); + } + StrOrBoolOrArray::Bool(true) => { + // Use special marker for auto-generated indexes (without custom name) + // We use the column name as a unique key to group, but will use None for the constraint name + // This allows SQL generation to auto-generate the name based on naming conventions + let group_key = format!("__auto_{}", col.name); + + // Check for duplicate - only check inline definitions + if let Some(columns) = inline_index_column_tracker.get(group_key.as_str()) + && columns.contains(col.name.as_str()) + { + return Err(TableValidationError::DuplicateIndexColumn { + index_name: group_key.clone(), + column_name: col.name.clone(), + }); + } + + if !index_groups.contains_key(&group_key) { + index_order.push(group_key.clone()); + } + + index_groups + .entry(group_key.clone()) + .or_default() + .push(col.name.clone()); + + inline_index_column_tracker + .entry(group_key) + .or_default() + .insert(col.name.clone()); + } + StrOrBoolOrArray::Bool(false) => continue, + StrOrBoolOrArray::Array(names) => { + // Array format: each element is an index name + // This column will be part of all these named indexes + // Check for duplicates within the array + let mut seen_in_array = HashSet::new(); + for index_name in names { + // Check for duplicate within the same array + if seen_in_array.contains(index_name.as_str()) { + return Err(TableValidationError::DuplicateIndexColumn { + index_name: index_name.clone(), + column_name: col.name.clone(), + }); + } + seen_in_array.insert(index_name.clone()); + + // Check for duplicate across different inline definitions + // Only check inline definitions, not existing table-level indexes + if let Some(columns) = + inline_index_column_tracker.get(index_name.as_str()) + && columns.contains(col.name.as_str()) + { + return Err(TableValidationError::DuplicateIndexColumn { + index_name: index_name.clone(), + column_name: col.name.clone(), + }); + } + + if !index_groups.contains_key(index_name.as_str()) { + index_order.push(index_name.clone()); + } + + index_groups + .entry(index_name.clone()) + .or_default() + .push(col.name.clone()); + + inline_index_column_tracker + .entry(index_name.clone()) + .or_default() + .insert(col.name.clone()); + } + } + } + } + } + + // Create indexes from grouped columns in order + for index_name in index_order { + let columns = index_groups.get(&index_name).unwrap().clone(); + + // Determine if this is an auto-generated index (from index: true) + // or a named index (from index: "name") + let constraint_name = if index_name.starts_with("__auto_") { + // Auto-generated index - use None so SQL generation can create the name + None + } else { + // Named index - preserve the custom name + Some(index_name.clone()) + }; + + // Check if this index already exists + let exists = constraints.iter().any(|c| { + if let TableConstraint::Index { + name, + columns: cols, + } = c + { + // Match by name if both have names, otherwise match by columns + match (&constraint_name, name) { + (Some(n1), Some(n2)) => n1 == n2, + (None, None) => cols == &columns, + _ => false, + } + } else { + false + } + }); + + if !exists { + constraints.push(TableConstraint::Index { + name: constraint_name, + columns, + }); + } + } + + Ok(TableDef { + name: self.name.clone(), + description: self.description.clone(), + columns: self.columns.clone(), + constraints, + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::schema::column::{ColumnType, SimpleColumnType}; + use crate::schema::foreign_key::{ForeignKeyDef, ForeignKeySyntax}; + use crate::schema::primary_key::PrimaryKeySyntax; + use crate::schema::reference::ReferenceAction; + use crate::schema::str_or_bool::StrOrBoolOrArray; + + fn col(name: &str, ty: ColumnType) -> ColumnDef { + ColumnDef { + name: name.to_string(), + r#type: ty, + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + } + } + + #[test] + fn normalize_inline_primary_key() { + let mut id_col = col("id", ColumnType::Simple(SimpleColumnType::Integer)); + id_col.primary_key = Some(PrimaryKeySyntax::Bool(true)); + + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![ + id_col, + col("name", ColumnType::Simple(SimpleColumnType::Text)), + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + assert_eq!(normalized.constraints.len(), 1); + assert!(matches!( + &normalized.constraints[0], + TableConstraint::PrimaryKey { columns, .. } if columns == &["id".to_string()] + )); + } + + #[test] + fn normalize_multiple_inline_primary_keys() { + let mut id_col = col("id", ColumnType::Simple(SimpleColumnType::Integer)); + id_col.primary_key = Some(PrimaryKeySyntax::Bool(true)); + + let mut tenant_col = col("tenant_id", ColumnType::Simple(SimpleColumnType::Integer)); + tenant_col.primary_key = Some(PrimaryKeySyntax::Bool(true)); + + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![id_col, tenant_col], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + assert_eq!(normalized.constraints.len(), 1); + assert!(matches!( + &normalized.constraints[0], + TableConstraint::PrimaryKey { columns, .. } if columns == &["id".to_string(), "tenant_id".to_string()] + )); + } + + #[test] + fn normalize_does_not_duplicate_existing_pk() { + let mut id_col = col("id", ColumnType::Simple(SimpleColumnType::Integer)); + id_col.primary_key = Some(PrimaryKeySyntax::Bool(true)); + + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![id_col], + constraints: vec![TableConstraint::PrimaryKey { + auto_increment: false, + columns: vec!["id".into()], + }], + }; + + let normalized = table.normalize().unwrap(); + assert_eq!(normalized.constraints.len(), 1); + } + + #[test] + fn normalize_ignores_primary_key_false() { + let mut id_col = col("id", ColumnType::Simple(SimpleColumnType::Integer)); + id_col.primary_key = Some(PrimaryKeySyntax::Bool(false)); + + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![ + id_col, + col("name", ColumnType::Simple(SimpleColumnType::Text)), + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + // primary_key: false should be ignored, so no primary key constraint should be added + assert_eq!(normalized.constraints.len(), 0); + } + + #[test] + fn normalize_inline_unique_bool() { + let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); + email_col.unique = Some(StrOrBoolOrArray::Bool(true)); + + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + email_col, + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + assert_eq!(normalized.constraints.len(), 1); + assert!(matches!( + &normalized.constraints[0], + TableConstraint::Unique { name: None, columns } if columns == &["email".to_string()] + )); + } + + #[test] + fn normalize_inline_unique_with_name() { + let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); + email_col.unique = Some(StrOrBoolOrArray::Str("uq_users_email".into())); + + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + email_col, + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + assert_eq!(normalized.constraints.len(), 1); + assert!(matches!( + &normalized.constraints[0], + TableConstraint::Unique { name: Some(n), columns } + if n == "uq_users_email" && columns == &["email".to_string()] + )); + } + + #[test] + fn normalize_composite_unique_from_string_name() { + // Test that multiple columns with the same unique constraint name + // are grouped into a single composite unique constraint + let mut route_col = col("join_route", ColumnType::Simple(SimpleColumnType::Text)); + route_col.unique = Some(StrOrBoolOrArray::Str("route_provider_id".into())); + + let mut provider_col = col("provider_id", ColumnType::Simple(SimpleColumnType::Text)); + provider_col.unique = Some(StrOrBoolOrArray::Str("route_provider_id".into())); + + let table = TableDef { + name: "user".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + route_col, + provider_col, + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + assert_eq!(normalized.constraints.len(), 1); + assert!(matches!( + &normalized.constraints[0], + TableConstraint::Unique { name: Some(n), columns } + if n == "route_provider_id" + && columns == &["join_route".to_string(), "provider_id".to_string()] + )); + } + + #[test] + fn normalize_unique_name_mismatch_creates_both_constraints() { + // Test coverage for line 181: When an inline unique has a name but existing doesn't (or vice versa), + // they should not match and both constraints should be created + let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); + email_col.unique = Some(StrOrBoolOrArray::Str("named_unique".into())); + + let table = TableDef { + name: "user".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + email_col, + ], + constraints: vec![ + // Existing unnamed unique constraint on same column + TableConstraint::Unique { + name: None, + columns: vec!["email".into()], + }, + ], + }; + + let normalized = table.normalize().unwrap(); + + // Should have 2 unique constraints: one named, one unnamed + let unique_constraints: Vec<_> = normalized + .constraints + .iter() + .filter(|c| matches!(c, TableConstraint::Unique { .. })) + .collect(); + assert_eq!( + unique_constraints.len(), + 2, + "Should keep both named and unnamed unique constraints as they don't match" + ); + + // Verify we have one named and one unnamed + let has_named = unique_constraints.iter().any( + |c| matches!(c, TableConstraint::Unique { name: Some(n), .. } if n == "named_unique"), + ); + let has_unnamed = unique_constraints + .iter() + .any(|c| matches!(c, TableConstraint::Unique { name: None, .. })); + + assert!(has_named, "Should have named unique constraint"); + assert!(has_unnamed, "Should have unnamed unique constraint"); + } + + #[test] + fn normalize_inline_index_bool() { + let mut name_col = col("name", ColumnType::Simple(SimpleColumnType::Text)); + name_col.index = Some(StrOrBoolOrArray::Bool(true)); + + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + name_col, + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + // Count Index constraints + let indexes: Vec<_> = normalized + .constraints + .iter() + .filter(|c| matches!(c, TableConstraint::Index { .. })) + .collect(); + assert_eq!(indexes.len(), 1); + // Auto-generated indexes (from index: true) should have name: None + // SQL generation will create the actual name based on naming conventions + assert!(matches!( + indexes[0], + TableConstraint::Index { name: None, columns } + if columns == &["name".to_string()] + )); + } + + #[test] + fn normalize_inline_index_with_name() { + let mut name_col = col("name", ColumnType::Simple(SimpleColumnType::Text)); + name_col.index = Some(StrOrBoolOrArray::Str("custom_idx_name".into())); + + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + name_col, + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + let indexes: Vec<_> = normalized + .constraints + .iter() + .filter(|c| matches!(c, TableConstraint::Index { .. })) + .collect(); + assert_eq!(indexes.len(), 1); + assert!(matches!( + indexes[0], + TableConstraint::Index { name: Some(n), .. } + if n == "custom_idx_name" + )); + } + + #[test] + fn normalize_inline_foreign_key() { + let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); + user_id_col.foreign_key = Some(ForeignKeySyntax::Object(ForeignKeyDef { + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: Some(ReferenceAction::Cascade), + on_update: None, + })); + + let table = TableDef { + name: "posts".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + user_id_col, + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + assert_eq!(normalized.constraints.len(), 1); + assert!(matches!( + &normalized.constraints[0], + TableConstraint::ForeignKey { + name: None, + columns, + ref_table, + ref_columns, + on_delete: Some(ReferenceAction::Cascade), + on_update: None, + } if columns == &["user_id".to_string()] + && ref_table == "users" + && ref_columns == &["id".to_string()] + )); + } + + #[test] + fn normalize_all_inline_constraints() { + let mut id_col = col("id", ColumnType::Simple(SimpleColumnType::Integer)); + id_col.primary_key = Some(PrimaryKeySyntax::Bool(true)); + + let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); + email_col.unique = Some(StrOrBoolOrArray::Bool(true)); + + let mut name_col = col("name", ColumnType::Simple(SimpleColumnType::Text)); + name_col.index = Some(StrOrBoolOrArray::Bool(true)); + + let mut user_id_col = col("org_id", ColumnType::Simple(SimpleColumnType::Integer)); + user_id_col.foreign_key = Some(ForeignKeySyntax::Object(ForeignKeyDef { + ref_table: "orgs".into(), + ref_columns: vec!["id".into()], + on_delete: None, + on_update: None, + })); + + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![id_col, email_col, name_col, user_id_col], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + // Should have: PrimaryKey, Unique, ForeignKey, Index + // Count non-Index constraints + let non_index_constraints: Vec<_> = normalized + .constraints + .iter() + .filter(|c| !matches!(c, TableConstraint::Index { .. })) + .collect(); + assert_eq!(non_index_constraints.len(), 3); + // Should have: 1 index + let indexes: Vec<_> = normalized + .constraints + .iter() + .filter(|c| matches!(c, TableConstraint::Index { .. })) + .collect(); + assert_eq!(indexes.len(), 1); + } + + #[test] + fn normalize_composite_index_from_string_name() { + let mut updated_at_col = col( + "updated_at", + ColumnType::Simple(SimpleColumnType::Timestamp), + ); + updated_at_col.index = Some(StrOrBoolOrArray::Str("tuple".into())); + + let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); + user_id_col.index = Some(StrOrBoolOrArray::Str("tuple".into())); + + let table = TableDef { + name: "post".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + updated_at_col, + user_id_col, + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + let indexes: Vec<_> = normalized + .constraints + .iter() + .filter_map(|c| { + if let TableConstraint::Index { name, columns } = c { + Some((name.clone(), columns.clone())) + } else { + None + } + }) + .collect(); + assert_eq!(indexes.len(), 1); + assert_eq!(indexes[0].0, Some("tuple".to_string())); + assert_eq!( + indexes[0].1, + vec!["updated_at".to_string(), "user_id".to_string()] + ); + } + + #[test] + fn normalize_multiple_different_indexes() { + let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); + col1.index = Some(StrOrBoolOrArray::Str("idx_a".into())); + + let mut col2 = col("col2", ColumnType::Simple(SimpleColumnType::Text)); + col2.index = Some(StrOrBoolOrArray::Str("idx_a".into())); + + let mut col3 = col("col3", ColumnType::Simple(SimpleColumnType::Text)); + col3.index = Some(StrOrBoolOrArray::Str("idx_b".into())); + + let mut col4 = col("col4", ColumnType::Simple(SimpleColumnType::Text)); + col4.index = Some(StrOrBoolOrArray::Bool(true)); + + let table = TableDef { + name: "test".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + col1, + col2, + col3, + col4, + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + let indexes: Vec<_> = normalized + .constraints + .iter() + .filter_map(|c| { + if let TableConstraint::Index { name, columns } = c { + Some((name.clone(), columns.clone())) + } else { + None + } + }) + .collect(); + assert_eq!(indexes.len(), 3); + + // Check idx_a composite index + let idx_a = indexes + .iter() + .find(|(n, _)| n == &Some("idx_a".to_string())) + .unwrap(); + assert_eq!(idx_a.1, vec!["col1".to_string(), "col2".to_string()]); + + // Check idx_b single column index + let idx_b = indexes + .iter() + .find(|(n, _)| n == &Some("idx_b".to_string())) + .unwrap(); + assert_eq!(idx_b.1, vec!["col3".to_string()]); + + // Check auto-generated index for col4 (should have name: None) + let idx_col4 = indexes.iter().find(|(n, _)| n.is_none()).unwrap(); + assert_eq!(idx_col4.1, vec!["col4".to_string()]); + } + + #[test] + fn normalize_false_values_are_ignored() { + let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); + email_col.unique = Some(StrOrBoolOrArray::Bool(false)); + email_col.index = Some(StrOrBoolOrArray::Bool(false)); + + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + email_col, + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + assert_eq!(normalized.constraints.len(), 0); + } + + #[test] + fn normalize_table_without_primary_key() { + // Test normalize with a table that has no primary key columns + // This should cover lines 67-69, 72-73, and 93 (pk_columns.is_empty() branch) + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![ + col("name", ColumnType::Simple(SimpleColumnType::Text)), + col("email", ColumnType::Simple(SimpleColumnType::Text)), + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + // Should not add any primary key constraint + assert_eq!(normalized.constraints.len(), 0); + } + + #[test] + fn normalize_multiple_indexes_from_same_array() { + // Multiple columns with same array of index names should create multiple composite indexes + let mut updated_at_col = col( + "updated_at", + ColumnType::Simple(SimpleColumnType::Timestamp), + ); + updated_at_col.index = Some(StrOrBoolOrArray::Array(vec![ + "tuple".into(), + "tuple2".into(), + ])); + + let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); + user_id_col.index = Some(StrOrBoolOrArray::Array(vec![ + "tuple".into(), + "tuple2".into(), + ])); + + let table = TableDef { + name: "post".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + updated_at_col, + user_id_col, + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + // Should have: tuple (composite: updated_at, user_id), tuple2 (composite: updated_at, user_id) + let indexes: Vec<_> = normalized + .constraints + .iter() + .filter_map(|c| { + if let TableConstraint::Index { name, columns } = c { + Some((name.clone(), columns.clone())) + } else { + None + } + }) + .collect(); + assert_eq!(indexes.len(), 2); + + let tuple_idx = indexes + .iter() + .find(|(n, _)| n == &Some("tuple".to_string())) + .unwrap(); + let mut sorted_cols = tuple_idx.1.clone(); + sorted_cols.sort(); + assert_eq!( + sorted_cols, + vec!["updated_at".to_string(), "user_id".to_string()] + ); + + let tuple2_idx = indexes + .iter() + .find(|(n, _)| n == &Some("tuple2".to_string())) + .unwrap(); + let mut sorted_cols2 = tuple2_idx.1.clone(); + sorted_cols2.sort(); + assert_eq!( + sorted_cols2, + vec!["updated_at".to_string(), "user_id".to_string()] + ); + } + + #[test] + fn normalize_inline_unique_with_array_existing_constraint() { + // Test Array format where constraint already exists - should add column to existing + let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); + col1.unique = Some(StrOrBoolOrArray::Array(vec!["uq_group".into()])); + + let mut col2 = col("col2", ColumnType::Simple(SimpleColumnType::Text)); + col2.unique = Some(StrOrBoolOrArray::Array(vec!["uq_group".into()])); + + let table = TableDef { + name: "test".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + col1, + col2, + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + assert_eq!(normalized.constraints.len(), 1); + let unique_constraint = &normalized.constraints[0]; + assert!(matches!( + unique_constraint, + TableConstraint::Unique { name: Some(n), columns: _ } + if n == "uq_group" + )); + if let TableConstraint::Unique { columns, .. } = unique_constraint { + let mut sorted_cols = columns.clone(); + sorted_cols.sort(); + assert_eq!(sorted_cols, vec!["col1".to_string(), "col2".to_string()]); + } + } + + #[test] + fn normalize_inline_unique_with_array_column_already_in_constraint() { + // Test Array format where column is already in constraint - should not duplicate + let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); + col1.unique = Some(StrOrBoolOrArray::Array(vec!["uq_group".into()])); + + let table = TableDef { + name: "test".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + col1.clone(), + ], + constraints: vec![], + }; + + let normalized1 = table.normalize().unwrap(); + assert_eq!(normalized1.constraints.len(), 1); + + // Add same column again - should not create duplicate + let table2 = TableDef { + name: "test".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + col1, + ], + constraints: normalized1.constraints.clone(), + }; + + let normalized2 = table2.normalize().unwrap(); + assert_eq!(normalized2.constraints.len(), 1); + if let TableConstraint::Unique { columns, .. } = &normalized2.constraints[0] { + assert_eq!(columns.len(), 1); + assert_eq!(columns[0], "col1"); + } + } + + #[test] + fn normalize_inline_unique_str_already_exists() { + // Test that existing unique constraint with same name and column is not duplicated + let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); + email_col.unique = Some(StrOrBoolOrArray::Str("uq_email".into())); + + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + email_col, + ], + constraints: vec![TableConstraint::Unique { + name: Some("uq_email".into()), + columns: vec!["email".into()], + }], + }; + + let normalized = table.normalize().unwrap(); + // Should not duplicate the constraint + let unique_constraints: Vec<_> = normalized + .constraints + .iter() + .filter(|c| matches!(c, TableConstraint::Unique { .. })) + .collect(); + assert_eq!(unique_constraints.len(), 1); + } + + #[test] + fn normalize_inline_unique_bool_already_exists() { + // Test that existing unnamed unique constraint with same column is not duplicated + let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); + email_col.unique = Some(StrOrBoolOrArray::Bool(true)); + + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + email_col, + ], + constraints: vec![TableConstraint::Unique { + name: None, + columns: vec!["email".into()], + }], + }; + + let normalized = table.normalize().unwrap(); + // Should not duplicate the constraint + let unique_constraints: Vec<_> = normalized + .constraints + .iter() + .filter(|c| matches!(c, TableConstraint::Unique { .. })) + .collect(); + assert_eq!(unique_constraints.len(), 1); + } + + #[test] + fn normalize_inline_foreign_key_already_exists() { + // Test that existing foreign key constraint is not duplicated + let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); + user_id_col.foreign_key = Some(ForeignKeySyntax::Object(ForeignKeyDef { + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: None, + on_update: None, + })); + + let table = TableDef { + name: "posts".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + user_id_col, + ], + constraints: vec![TableConstraint::ForeignKey { + name: None, + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: None, + on_update: None, + }], + }; + + let normalized = table.normalize().unwrap(); + // Should not duplicate the foreign key + let fk_constraints: Vec<_> = normalized + .constraints + .iter() + .filter(|c| matches!(c, TableConstraint::ForeignKey { .. })) + .collect(); + assert_eq!(fk_constraints.len(), 1); + } + + #[test] + fn normalize_duplicate_index_same_column_str() { + // Same index name applied to the same column multiple times should error + // This tests inline index duplicate, not table-level index + let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); + col1.index = Some(StrOrBoolOrArray::Str("idx1".into())); + + let table = TableDef { + name: "test".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + col1.clone(), + { + // Same column with same index name again + let mut c = col1.clone(); + c.index = Some(StrOrBoolOrArray::Str("idx1".into())); + c + }, + ], + constraints: vec![], + }; + + let result = table.normalize(); + assert!(result.is_err()); + if let Err(TableValidationError::DuplicateIndexColumn { + index_name, + column_name, + }) = result + { + assert_eq!(index_name, "idx1"); + assert_eq!(column_name, "col1"); + } else { + panic!("Expected DuplicateIndexColumn error"); + } + } + + #[test] + fn normalize_duplicate_index_same_column_array() { + // Same index name in array applied to the same column should error + let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); + col1.index = Some(StrOrBoolOrArray::Array(vec!["idx1".into(), "idx1".into()])); + + let table = TableDef { + name: "test".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + col1, + ], + constraints: vec![], + }; + + let result = table.normalize(); + assert!(result.is_err()); + if let Err(TableValidationError::DuplicateIndexColumn { + index_name, + column_name, + }) = result + { + assert_eq!(index_name, "idx1"); + assert_eq!(column_name, "col1"); + } else { + panic!("Expected DuplicateIndexColumn error"); + } + } + + #[test] + fn normalize_duplicate_index_same_column_multiple_definitions() { + // Same index name applied to the same column in different ways should error + let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); + col1.index = Some(StrOrBoolOrArray::Str("idx1".into())); + + let table = TableDef { + name: "test".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + col1.clone(), + { + let mut c = col1.clone(); + c.index = Some(StrOrBoolOrArray::Array(vec!["idx1".into()])); + c + }, + ], + constraints: vec![], + }; + + let result = table.normalize(); + assert!(result.is_err()); + if let Err(TableValidationError::DuplicateIndexColumn { + index_name, + column_name, + }) = result + { + assert_eq!(index_name, "idx1"); + assert_eq!(column_name, "col1"); + } else { + panic!("Expected DuplicateIndexColumn error"); + } + } + + #[test] + fn test_table_validation_error_display() { + let error = TableValidationError::DuplicateIndexColumn { + index_name: "idx_test".into(), + column_name: "col1".into(), + }; + let error_msg = format!("{}", error); + assert!(error_msg.contains("idx_test")); + assert!(error_msg.contains("col1")); + assert!(error_msg.contains("Duplicate index")); + } + + #[test] + fn normalize_inline_unique_str_with_different_constraint_type() { + // Test that other constraint types don't match in the exists check + let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); + email_col.unique = Some(StrOrBoolOrArray::Str("uq_email".into())); + + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + email_col, + ], + constraints: vec![ + // Add a PrimaryKey constraint (different type) - should not match + TableConstraint::PrimaryKey { + auto_increment: false, + columns: vec!["id".into()], + }, + ], + }; + + let normalized = table.normalize().unwrap(); + // Should have: PrimaryKey (existing) + Unique (new) + assert_eq!(normalized.constraints.len(), 2); + } + + #[test] + fn normalize_inline_unique_array_with_different_constraint_type() { + // Test that other constraint types don't match in the exists check for Array case + let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); + col1.unique = Some(StrOrBoolOrArray::Array(vec!["uq_group".into()])); + + let table = TableDef { + name: "test".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + col1, + ], + constraints: vec![ + // Add a PrimaryKey constraint (different type) - should not match + TableConstraint::PrimaryKey { + auto_increment: false, + columns: vec!["id".into()], + }, + ], + }; + + let normalized = table.normalize().unwrap(); + // Should have: PrimaryKey (existing) + Unique (new) + assert_eq!(normalized.constraints.len(), 2); + } + + #[test] + fn normalize_duplicate_index_bool_true_same_column() { + // Test that Bool(true) with duplicate on same column errors + let mut col1 = col("col1", ColumnType::Simple(SimpleColumnType::Text)); + col1.index = Some(StrOrBoolOrArray::Bool(true)); + + let table = TableDef { + name: "test".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + col1.clone(), + { + // Same column with Bool(true) again + let mut c = col1.clone(); + c.index = Some(StrOrBoolOrArray::Bool(true)); + c + }, + ], + constraints: vec![], + }; + + let result = table.normalize(); + assert!(result.is_err()); + if let Err(TableValidationError::DuplicateIndexColumn { + index_name, + column_name, + }) = result + { + // The group key for auto-generated indexes is "__auto_{column}" + assert!(index_name.contains("__auto_")); + assert!(index_name.contains("col1")); + assert_eq!(column_name, "col1"); + } else { + panic!("Expected DuplicateIndexColumn error"); + } + } + + #[test] + fn normalize_inline_foreign_key_string_syntax() { + // Test ForeignKeySyntax::String with valid "table.column" format + let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); + user_id_col.foreign_key = Some(ForeignKeySyntax::String("users.id".into())); + + let table = TableDef { + name: "posts".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + user_id_col, + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + assert_eq!(normalized.constraints.len(), 1); + assert!(matches!( + &normalized.constraints[0], + TableConstraint::ForeignKey { + name: None, + columns, + ref_table, + ref_columns, + on_delete: None, + on_update: None, + } if columns == &["user_id".to_string()] + && ref_table == "users" + && ref_columns == &["id".to_string()] + )); + } + + #[test] + fn normalize_inline_foreign_key_invalid_format_no_dot() { + // Test ForeignKeySyntax::String with invalid format (no dot) + let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); + user_id_col.foreign_key = Some(ForeignKeySyntax::String("usersid".into())); + + let table = TableDef { + name: "posts".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + user_id_col, + ], + constraints: vec![], + }; + + let result = table.normalize(); + assert!(result.is_err()); + if let Err(TableValidationError::InvalidForeignKeyFormat { column_name, value }) = result { + assert_eq!(column_name, "user_id"); + assert_eq!(value, "usersid"); + } else { + panic!("Expected InvalidForeignKeyFormat error"); + } + } + + #[test] + fn normalize_inline_foreign_key_invalid_format_empty_table() { + // Test ForeignKeySyntax::String with empty table part + let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); + user_id_col.foreign_key = Some(ForeignKeySyntax::String(".id".into())); + + let table = TableDef { + name: "posts".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + user_id_col, + ], + constraints: vec![], + }; + + let result = table.normalize(); + assert!(result.is_err()); + if let Err(TableValidationError::InvalidForeignKeyFormat { column_name, value }) = result { + assert_eq!(column_name, "user_id"); + assert_eq!(value, ".id"); + } else { + panic!("Expected InvalidForeignKeyFormat error"); + } + } + + #[test] + fn normalize_inline_foreign_key_invalid_format_empty_column() { + // Test ForeignKeySyntax::String with empty column part + let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); + user_id_col.foreign_key = Some(ForeignKeySyntax::String("users.".into())); + + let table = TableDef { + name: "posts".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + user_id_col, + ], + constraints: vec![], + }; + + let result = table.normalize(); + assert!(result.is_err()); + if let Err(TableValidationError::InvalidForeignKeyFormat { column_name, value }) = result { + assert_eq!(column_name, "user_id"); + assert_eq!(value, "users."); + } else { + panic!("Expected InvalidForeignKeyFormat error"); + } + } + + #[test] + fn normalize_inline_foreign_key_invalid_format_too_many_parts() { + // Test ForeignKeySyntax::String with too many parts + let mut user_id_col = col("user_id", ColumnType::Simple(SimpleColumnType::Integer)); + user_id_col.foreign_key = Some(ForeignKeySyntax::String("schema.users.id".into())); + + let table = TableDef { + name: "posts".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + user_id_col, + ], + constraints: vec![], + }; + + let result = table.normalize(); + assert!(result.is_err()); + if let Err(TableValidationError::InvalidForeignKeyFormat { column_name, value }) = result { + assert_eq!(column_name, "user_id"); + assert_eq!(value, "schema.users.id"); + } else { + panic!("Expected InvalidForeignKeyFormat error"); + } + } + + #[test] + fn normalize_inline_primary_key_with_auto_increment() { + use crate::schema::primary_key::PrimaryKeyDef; + + let mut id_col = col("id", ColumnType::Simple(SimpleColumnType::Integer)); + id_col.primary_key = Some(PrimaryKeySyntax::Object(PrimaryKeyDef { + auto_increment: true, + })); + + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![ + id_col, + col("name", ColumnType::Simple(SimpleColumnType::Text)), + ], + constraints: vec![], + }; + + let normalized = table.normalize().unwrap(); + assert_eq!(normalized.constraints.len(), 1); + assert!(matches!( + &normalized.constraints[0], + TableConstraint::PrimaryKey { auto_increment: true, columns } if columns == &["id".to_string()] + )); + } + + #[test] + fn normalize_duplicate_inline_index_on_same_column() { + // This test triggers the DuplicateIndexColumn error (lines 251-253) + // by having the same column appear twice in the same named index group + use crate::schema::str_or_bool::StrOrBoolOrArray; + + // Create a column that references the same index name twice (via Array) + let mut email_col = col("email", ColumnType::Simple(SimpleColumnType::Text)); + email_col.index = Some(StrOrBoolOrArray::Array(vec![ + "idx_email".into(), + "idx_email".into(), // Duplicate reference + ])); + + let table = TableDef { + name: "users".into(), + description: None, + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + email_col, + ], + constraints: vec![], + }; + + let result = table.normalize(); + assert!(result.is_err()); + if let Err(TableValidationError::DuplicateIndexColumn { + index_name, + column_name, + }) = result + { + assert_eq!(index_name, "idx_email"); + assert_eq!(column_name, "email"); + } else { + panic!("Expected DuplicateIndexColumn error, got: {:?}", result); + } + } + + #[test] + fn test_invalid_foreign_key_format_error_display() { + let error = TableValidationError::InvalidForeignKeyFormat { + column_name: "user_id".into(), + value: "invalid".into(), + }; + let error_msg = format!("{}", error); + assert!(error_msg.contains("user_id")); + assert!(error_msg.contains("invalid")); + assert!(error_msg.contains("table.column")); + } +} diff --git a/crates/vespertide-exporter/src/orm.rs b/crates/vespertide-exporter/src/orm.rs index f753b63..949760e 100644 --- a/crates/vespertide-exporter/src/orm.rs +++ b/crates/vespertide-exporter/src/orm.rs @@ -57,6 +57,7 @@ mod tests { fn simple_table() -> TableDef { TableDef { name: "test".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), diff --git a/crates/vespertide-exporter/src/seaorm/mod.rs b/crates/vespertide-exporter/src/seaorm/mod.rs index 1f9f4e4..4244c86 100644 --- a/crates/vespertide-exporter/src/seaorm/mod.rs +++ b/crates/vespertide-exporter/src/seaorm/mod.rs @@ -1224,6 +1224,7 @@ mod helper_tests { // media table without FK chain let media = TableDef { name: "media".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Uuid), @@ -1253,6 +1254,7 @@ mod helper_tests { // media table let media = TableDef { name: "media".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Uuid), @@ -1273,6 +1275,7 @@ mod helper_tests { // article table with FK to media let article = TableDef { name: "article".into(), + description: None, columns: vec![ ColumnDef { name: "media_id".into(), @@ -1325,6 +1328,7 @@ mod helper_tests { use vespertide_core::{ColumnType, SimpleColumnType}; let media = TableDef { name: "media".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Uuid), @@ -1361,6 +1365,7 @@ mod helper_tests { // media table let media = TableDef { name: "media".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Uuid), @@ -1381,6 +1386,7 @@ mod helper_tests { // article table with FK to media let article = TableDef { name: "article".into(), + description: None, columns: vec![ ColumnDef { name: "media_id".into(), @@ -1424,6 +1430,7 @@ mod helper_tests { // article_user table with FK to article.media_id let article_user = TableDef { name: "article_user".into(), + description: None, columns: vec![ ColumnDef { name: "article_media_id".into(), @@ -1496,6 +1503,7 @@ mod helper_tests { // level_a (root) let level_a = TableDef { name: "level_a".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Uuid), @@ -1516,6 +1524,7 @@ mod helper_tests { // level_b with FK to level_a let level_b = TableDef { name: "level_b".into(), + description: None, columns: vec![ColumnDef { name: "a_id".into(), r#type: ColumnType::Simple(SimpleColumnType::Uuid), @@ -1546,6 +1555,7 @@ mod helper_tests { // level_c with FK to level_b let level_c = TableDef { name: "level_c".into(), + description: None, columns: vec![ColumnDef { name: "b_id".into(), r#type: ColumnType::Simple(SimpleColumnType::Uuid), @@ -1587,6 +1597,7 @@ mod helper_tests { // user table let user = TableDef { name: "user".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Uuid), @@ -1607,6 +1618,7 @@ mod helper_tests { // post table with FK to user (not PK, so has_many) let post = TableDef { name: "post".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), @@ -1666,6 +1678,7 @@ mod helper_tests { // user table let user = TableDef { name: "user".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Uuid), @@ -1686,6 +1699,7 @@ mod helper_tests { // profile table with FK to user that is also the PK (one-to-one) let profile = TableDef { name: "profile".into(), + description: None, columns: vec![ ColumnDef { name: "user_id".into(), @@ -1745,6 +1759,7 @@ mod helper_tests { // user table let user = TableDef { name: "user".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Uuid), @@ -1765,6 +1780,7 @@ mod helper_tests { // settings table with unique FK to user (one-to-one via UNIQUE constraint) let settings = TableDef { name: "settings".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), @@ -1833,6 +1849,7 @@ mod tests { #[rstest] #[case("basic_single_pk", TableDef { name: "users".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), nullable: false, default: None, comment: None, primary_key: None, unique: None, index: None, foreign_key: None }, ColumnDef { name: "display_name".into(), r#type: ColumnType::Simple(SimpleColumnType::Text), nullable: true, default: None, comment: None, primary_key: None, unique: None, index: None, foreign_key: None }, @@ -1841,6 +1858,7 @@ mod tests { })] #[case("composite_pk", TableDef { name: "accounts".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), nullable: false, default: None, comment: None, primary_key: None, unique: None, index: None, foreign_key: None }, ColumnDef { name: "tenant_id".into(), r#type: ColumnType::Simple(SimpleColumnType::BigInt), nullable: false, default: None, comment: None, primary_key: None, unique: None, index: None, foreign_key: None }, @@ -1849,6 +1867,7 @@ mod tests { })] #[case("fk_single", TableDef { name: "posts".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), nullable: false, default: None, comment: None, primary_key: None, unique: None, index: None, foreign_key: None }, ColumnDef { name: "user_id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), nullable: false, default: None, comment: None, primary_key: None, unique: None, index: None, foreign_key: None }, @@ -1868,6 +1887,7 @@ mod tests { })] #[case("fk_composite", TableDef { name: "invoices".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), nullable: false, default: None, comment: None, primary_key: None, unique: None, index: None, foreign_key: None }, ColumnDef { name: "customer_id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), nullable: false, default: None, comment: None, primary_key: None, unique: None, index: None, foreign_key: None }, @@ -1887,6 +1907,7 @@ mod tests { })] #[case("inline_pk", TableDef { name: "users".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Uuid), nullable: false, default: Some("gen_random_uuid()".into()), comment: None, primary_key: Some(PrimaryKeySyntax::Bool(true)), unique: None, index: None, foreign_key: None }, ColumnDef { name: "email".into(), r#type: ColumnType::Simple(SimpleColumnType::Text), nullable: false, default: None, comment: None, primary_key: None, unique: Some(vespertide_core::StrOrBoolOrArray::Bool(true)), index: None, foreign_key: None }, @@ -1898,6 +1919,7 @@ mod tests { use vespertide_core::schema::reference::ReferenceAction; let mut table = TableDef { name: "article_user".into(), + description: None, columns: vec![ ColumnDef { name: "article_id".into(), @@ -1984,6 +2006,7 @@ mod tests { })] #[case("enum_type", TableDef { name: "orders".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), nullable: false, default: None, comment: None, primary_key: Some(PrimaryKeySyntax::Bool(true)), unique: None, index: None, foreign_key: None }, ColumnDef { @@ -2005,6 +2028,7 @@ mod tests { })] #[case("enum_nullable", TableDef { name: "tasks".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), nullable: false, default: None, comment: None, primary_key: Some(PrimaryKeySyntax::Bool(true)), unique: None, index: None, foreign_key: None }, ColumnDef { @@ -2026,6 +2050,7 @@ mod tests { })] #[case("enum_multiple_columns", TableDef { name: "products".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), nullable: false, default: None, comment: None, primary_key: Some(PrimaryKeySyntax::Bool(true)), unique: None, index: None, foreign_key: None }, ColumnDef { @@ -2061,6 +2086,7 @@ mod tests { })] #[case("enum_shared", TableDef { name: "documents".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), nullable: false, default: None, comment: None, primary_key: Some(PrimaryKeySyntax::Bool(true)), unique: None, index: None, foreign_key: None }, ColumnDef { @@ -2096,6 +2122,7 @@ mod tests { })] #[case("enum_special_values", TableDef { name: "events".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), nullable: false, default: None, comment: None, primary_key: Some(PrimaryKeySyntax::Bool(true)), unique: None, index: None, foreign_key: None }, ColumnDef { @@ -2117,6 +2144,7 @@ mod tests { })] #[case("unique_and_indexed", TableDef { name: "users".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), nullable: false, default: None, comment: None, primary_key: Some(PrimaryKeySyntax::Bool(true)), unique: None, index: None, foreign_key: None }, ColumnDef { name: "email".into(), r#type: ColumnType::Simple(SimpleColumnType::Text), nullable: false, default: None, comment: None, primary_key: None, unique: None, index: None, foreign_key: None }, @@ -2132,6 +2160,7 @@ mod tests { })] #[case("enum_with_default", TableDef { name: "tasks".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), nullable: false, default: None, comment: None, primary_key: Some(PrimaryKeySyntax::Bool(true)), unique: None, index: None, foreign_key: None }, ColumnDef { @@ -2155,6 +2184,7 @@ mod tests { })] #[case("table_level_pk", TableDef { name: "orders".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Uuid), nullable: false, default: None, comment: None, primary_key: None, unique: None, index: None, foreign_key: None }, ColumnDef { name: "customer_id".into(), r#type: ColumnType::Simple(SimpleColumnType::Uuid), nullable: false, default: None, comment: None, primary_key: None, unique: None, index: None, foreign_key: None }, @@ -2189,6 +2219,7 @@ mod tests { fn table_with_pk(name: &str, columns: Vec, pk_cols: Vec<&str>) -> TableDef { TableDef { name: name.into(), + description: None, columns, constraints: vec![TableConstraint::PrimaryKey { auto_increment: false, @@ -2219,6 +2250,7 @@ mod tests { } TableDef { name: name.into(), + description: None, columns, constraints, } @@ -2515,6 +2547,7 @@ mod tests { use vespertide_core::ComplexColumnType; let table = TableDef { name: "products".into(), + description: None, columns: vec![ColumnDef { name: "price".into(), r#type: ColumnType::Complex(ComplexColumnType::Numeric { @@ -2557,6 +2590,7 @@ mod tests { use vespertide_core::schema::primary_key::PrimaryKeySyntax; TableDef { name: "tasks".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), @@ -2617,6 +2651,7 @@ mod tests { use vespertide_core::schema::primary_key::PrimaryKeySyntax; let table = TableDef { name: "settings".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), @@ -2671,6 +2706,7 @@ mod tests { let table = TableDef { name: "items".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), @@ -2701,6 +2737,7 @@ mod tests { let table = TableDef { name: "orders".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), @@ -2747,6 +2784,7 @@ mod tests { let table = TableDef { name: "users".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), @@ -2778,6 +2816,7 @@ mod tests { let table = TableDef { name: "products".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), diff --git a/crates/vespertide-loader/src/models.rs b/crates/vespertide-loader/src/models.rs index 8beb727..1e60f82 100644 --- a/crates/vespertide-loader/src/models.rs +++ b/crates/vespertide-loader/src/models.rs @@ -220,6 +220,7 @@ mod tests { fs::create_dir_all("models").unwrap(); let table = TableDef { name: "users".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), @@ -255,6 +256,7 @@ mod tests { // Create model in subdirectory let table = TableDef { name: "subtable".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), @@ -291,6 +293,7 @@ mod tests { // Create a model with invalid FK string format (missing dot separator) let table = TableDef { name: "orders".into(), + description: None, columns: vec![ColumnDef { name: "user_id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), @@ -326,6 +329,7 @@ mod tests { let table = TableDef { name: "users".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), @@ -394,6 +398,7 @@ mod tests { let table = TableDef { name: "users".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), @@ -429,6 +434,7 @@ mod tests { let table = TableDef { name: "users".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), @@ -465,6 +471,7 @@ mod tests { let table = TableDef { name: "subtable".into(), + description: None, columns: vec![ColumnDef { name: "id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), @@ -531,6 +538,7 @@ mod tests { // Create a model with invalid FK format let table = TableDef { name: "orders".into(), + description: None, columns: vec![ColumnDef { name: "user_id".into(), r#type: ColumnType::Simple(SimpleColumnType::Integer), diff --git a/crates/vespertide-planner/src/apply.rs b/crates/vespertide-planner/src/apply.rs index 9dd8b2d..cde9241 100644 --- a/crates/vespertide-planner/src/apply.rs +++ b/crates/vespertide-planner/src/apply.rs @@ -18,6 +18,7 @@ pub fn apply_action( } schema.push(TableDef { name: table.clone(), + description: None, columns: columns.clone(), constraints: constraints.clone(), }); @@ -377,6 +378,7 @@ mod tests { fn table(name: &str, columns: Vec, constraints: Vec) -> TableDef { TableDef { name: name.to_string(), + description: None, columns, constraints, } diff --git a/crates/vespertide-planner/src/diff.rs b/crates/vespertide-planner/src/diff.rs index 6d747ee..87ff24b 100644 --- a/crates/vespertide-planner/src/diff.rs +++ b/crates/vespertide-planner/src/diff.rs @@ -446,6 +446,7 @@ mod tests { ) -> TableDef { TableDef { name: name.to_string(), + description: None, columns, constraints, } @@ -1089,6 +1090,7 @@ mod tests { let table = TableDef { name: "test".into(), + description: None, columns: vec![ col("id", ColumnType::Simple(SimpleColumnType::Integer)), col1.clone(), @@ -1120,6 +1122,7 @@ mod tests { let table = TableDef { name: "test".into(), + description: None, columns: vec![ col("id", ColumnType::Simple(SimpleColumnType::Integer)), col1.clone(), @@ -1158,6 +1161,7 @@ mod tests { ) -> TableDef { TableDef { name: name.to_string(), + description: None, columns: vec![ col("id", ColumnType::Simple(SimpleColumnType::Integer)), col(fk_column, ColumnType::Simple(SimpleColumnType::Integer)), @@ -1176,6 +1180,7 @@ mod tests { fn simple_table(name: &str) -> TableDef { TableDef { name: name.to_string(), + description: None, columns: vec![col("id", ColumnType::Simple(SimpleColumnType::Integer))], constraints: vec![], } @@ -1356,6 +1361,7 @@ mod tests { // Create circular dependency: A -> B -> A let table_a = TableDef { name: "table_a".to_string(), + description: None, columns: vec![ col("id", ColumnType::Simple(SimpleColumnType::Integer)), col("b_id", ColumnType::Simple(SimpleColumnType::Integer)), @@ -1372,6 +1378,7 @@ mod tests { let table_b = TableDef { name: "table_b".to_string(), + description: None, columns: vec![ col("id", ColumnType::Simple(SimpleColumnType::Integer)), col("a_id", ColumnType::Simple(SimpleColumnType::Integer)), @@ -1549,24 +1556,28 @@ mod tests { let user = TableDef { name: "user".to_string(), + description: None, columns: vec![col_pk("id")], constraints: vec![], }; let product = TableDef { name: "product".to_string(), + description: None, columns: vec![col_pk("id")], constraints: vec![], }; let project = TableDef { name: "project".to_string(), + description: None, columns: vec![col_pk("id"), col_inline_fk("user_id", "user")], constraints: vec![], }; let code = TableDef { name: "code".to_string(), + description: None, columns: vec![ col_pk("id"), col_inline_fk("product_id", "product"), @@ -1578,6 +1589,7 @@ mod tests { let order = TableDef { name: "order".to_string(), + description: None, columns: vec![ col_pk("id"), col_inline_fk("user_id", "user"), @@ -1590,6 +1602,7 @@ mod tests { let payment = TableDef { name: "payment".to_string(), + description: None, columns: vec![col_pk("id"), col_inline_fk("order_id", "order")], constraints: vec![], }; @@ -1683,12 +1696,14 @@ mod tests { // Table with multiple FKs referencing the same table (like code.creator_user_id and code.used_by_user_id) let user = TableDef { name: "user".to_string(), + description: None, columns: vec![col_pk("id")], constraints: vec![], }; let code = TableDef { name: "code".to_string(), + description: None, columns: vec![ col_pk("id"), col_inline_fk("creator_user_id", "user"), diff --git a/crates/vespertide-planner/src/plan.rs b/crates/vespertide-planner/src/plan.rs index 864b369..b681bcf 100644 --- a/crates/vespertide-planner/src/plan.rs +++ b/crates/vespertide-planner/src/plan.rs @@ -61,6 +61,7 @@ mod tests { ) -> TableDef { TableDef { name: name.to_string(), + description: None, columns, constraints, } diff --git a/crates/vespertide-planner/src/schema.rs b/crates/vespertide-planner/src/schema.rs index 64cad33..c657c1e 100644 --- a/crates/vespertide-planner/src/schema.rs +++ b/crates/vespertide-planner/src/schema.rs @@ -39,6 +39,7 @@ mod tests { fn table(name: &str, columns: Vec, constraints: Vec) -> TableDef { TableDef { name: name.to_string(), + description: None, columns, constraints, } diff --git a/crates/vespertide-planner/src/validate.rs b/crates/vespertide-planner/src/validate.rs index d432ec1..e0e7a1a 100644 --- a/crates/vespertide-planner/src/validate.rs +++ b/crates/vespertide-planner/src/validate.rs @@ -477,6 +477,7 @@ mod tests { fn table(name: &str, columns: Vec, constraints: Vec) -> TableDef { TableDef { name: name.to_string(), + description: None, columns, constraints, } diff --git a/crates/vespertide-query/src/sql/add_column.rs b/crates/vespertide-query/src/sql/add_column.rs index 64fd8d0..4dc75c9 100644 --- a/crates/vespertide-query/src/sql/add_column.rs +++ b/crates/vespertide-query/src/sql/add_column.rs @@ -1,691 +1,699 @@ -use sea_query::{Alias, Expr, Query, Table, TableAlterStatement}; - -use vespertide_core::{ColumnDef, TableDef}; - -use super::create_table::build_create_table_for_backend; -use super::helpers::{ - build_create_enum_type_sql, build_schema_statement, build_sea_column_def_with_table, - collect_sqlite_enum_check_clauses, normalize_enum_default, normalize_fill_with, -}; -use super::rename_table::build_rename_table; -use super::types::{BuiltQuery, DatabaseBackend, RawSql}; -use crate::error::QueryError; - -fn build_add_column_alter_for_backend( - backend: &DatabaseBackend, - table: &str, - column: &ColumnDef, -) -> TableAlterStatement { - let col_def = build_sea_column_def_with_table(backend, table, column); - Table::alter() - .table(Alias::new(table)) - .add_column(col_def) - .to_owned() -} - -/// Check if the column type is an enum -fn is_enum_column(column: &ColumnDef) -> bool { - matches!( - column.r#type, - vespertide_core::ColumnType::Complex(vespertide_core::ComplexColumnType::Enum { .. }) - ) -} - -pub fn build_add_column( - backend: &DatabaseBackend, - table: &str, - column: &ColumnDef, - fill_with: Option<&str>, - current_schema: &[TableDef], -) -> Result, QueryError> { - // SQLite: NOT NULL additions or enum columns require table recreation - // (enum columns need CHECK constraint which requires table recreation in SQLite) - let sqlite_needs_recreation = - *backend == DatabaseBackend::Sqlite && (!column.nullable || is_enum_column(column)); - - if sqlite_needs_recreation { - let table_def = current_schema - .iter() - .find(|t| t.name == table) - .ok_or_else(|| QueryError::Other(format!( - "Table '{}' not found in current schema. SQLite requires current schema information to add columns.", - table - )))?; - - let mut new_columns = table_def.columns.clone(); - new_columns.push(column.clone()); - - let temp_table = format!("{}_temp", table); - let create_temp = build_create_table_for_backend( - backend, - &temp_table, - &new_columns, - &table_def.constraints, - ); - - // For SQLite, add CHECK constraints for enum columns - // Use original table name for constraint naming (table will be renamed back) - let enum_check_clauses = collect_sqlite_enum_check_clauses(table, &new_columns); - let create_query = if !enum_check_clauses.is_empty() { - let base_sql = build_schema_statement(&create_temp, *backend); - let mut modified_sql = base_sql; - if let Some(pos) = modified_sql.rfind(')') { - let check_sql = enum_check_clauses.join(", "); - modified_sql.insert_str(pos, &format!(", {}", check_sql)); - } - BuiltQuery::Raw(RawSql::per_backend( - modified_sql.clone(), - modified_sql.clone(), - modified_sql, - )) - } else { - BuiltQuery::CreateTable(Box::new(create_temp)) - }; - - // Copy existing data, filling new column - let mut select_query = Query::select(); - for col in &table_def.columns { - select_query = select_query.column(Alias::new(&col.name)).to_owned(); - } - let normalized_fill = normalize_fill_with(fill_with); - let fill_expr = if let Some(fill) = normalized_fill.as_deref() { - Expr::cust(normalize_enum_default(&column.r#type, fill)) - } else if let Some(def) = &column.default { - Expr::cust(normalize_enum_default(&column.r#type, &def.to_sql())) - } else { - Expr::cust("NULL") - }; - select_query = select_query - .expr_as(fill_expr, Alias::new(&column.name)) - .from(Alias::new(table)) - .to_owned(); - - let mut columns_alias: Vec = table_def - .columns - .iter() - .map(|c| Alias::new(&c.name)) - .collect(); - columns_alias.push(Alias::new(&column.name)); - let insert_stmt = Query::insert() - .into_table(Alias::new(&temp_table)) - .columns(columns_alias) - .select_from(select_query) - .unwrap() - .to_owned(); - let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); - - let drop_query = - BuiltQuery::DropTable(Box::new(Table::drop().table(Alias::new(table)).to_owned())); - let rename_query = build_rename_table(&temp_table, table); - - // Recreate indexes from Index constraints - let mut index_queries = Vec::new(); - for constraint in &table_def.constraints { - if let vespertide_core::TableConstraint::Index { name, columns } = constraint { - let index_name = - vespertide_naming::build_index_name(table, columns, name.as_deref()); - let mut idx_stmt = sea_query::Index::create(); - idx_stmt = idx_stmt.name(&index_name).to_owned(); - for col_name in columns { - idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); - } - idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); - index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); - } - } - - let mut stmts = vec![create_query, insert_query, drop_query, rename_query]; - stmts.extend(index_queries); - return Ok(stmts); - } - - let mut stmts: Vec = Vec::new(); - - // If column type is an enum, create the type first (PostgreSQL only) - if let Some(create_type_sql) = build_create_enum_type_sql(table, &column.r#type) { - stmts.push(BuiltQuery::Raw(create_type_sql)); - } - - // If adding NOT NULL without default, we need special handling - let needs_backfill = !column.nullable && column.default.is_none() && fill_with.is_some(); - - if needs_backfill { - // Add as nullable first - let mut temp_col = column.clone(); - temp_col.nullable = true; - - stmts.push(BuiltQuery::AlterTable(Box::new( - build_add_column_alter_for_backend(backend, table, &temp_col), - ))); - - // Backfill with provided value - if let Some(fill) = normalize_fill_with(fill_with) { - let update_stmt = Query::update() - .table(Alias::new(table)) - .value(Alias::new(&column.name), Expr::cust(fill)) - .to_owned(); - stmts.push(BuiltQuery::Update(Box::new(update_stmt))); - } - - // Set NOT NULL - let not_null_col = build_sea_column_def_with_table(backend, table, column); - let alter_not_null = Table::alter() - .table(Alias::new(table)) - .modify_column(not_null_col) - .to_owned(); - stmts.push(BuiltQuery::AlterTable(Box::new(alter_not_null))); - } else { - stmts.push(BuiltQuery::AlterTable(Box::new( - build_add_column_alter_for_backend(backend, table, column), - ))); - } - - Ok(stmts) -} - -#[cfg(test)] -mod tests { - use super::*; - use insta::{assert_snapshot, with_settings}; - use rstest::rstest; - use vespertide_core::{ColumnType, SimpleColumnType, TableDef}; - - #[rstest] - #[case::add_column_with_backfill_postgres( - "add_column_with_backfill_postgres", - DatabaseBackend::Postgres, - &["ALTER TABLE \"users\" ADD COLUMN \"nickname\" text"] - )] - #[case::add_column_with_backfill_mysql( - "add_column_with_backfill_mysql", - DatabaseBackend::MySql, - &["ALTER TABLE `users` ADD COLUMN `nickname` text"] - )] - #[case::add_column_with_backfill_sqlite( - "add_column_with_backfill_sqlite", - DatabaseBackend::Sqlite, - &["CREATE TABLE \"users_temp\""] - )] - #[case::add_column_simple_postgres( - "add_column_simple_postgres", - DatabaseBackend::Postgres, - &["ALTER TABLE \"users\" ADD COLUMN \"nickname\""] - )] - #[case::add_column_simple_mysql( - "add_column_simple_mysql", - DatabaseBackend::MySql, - &["ALTER TABLE `users` ADD COLUMN `nickname` text"] - )] - #[case::add_column_simple_sqlite( - "add_column_simple_sqlite", - DatabaseBackend::Sqlite, - &["ALTER TABLE \"users\" ADD COLUMN \"nickname\""] - )] - #[case::add_column_nullable_postgres( - "add_column_nullable_postgres", - DatabaseBackend::Postgres, - &["ALTER TABLE \"users\" ADD COLUMN \"email\" text"] - )] - #[case::add_column_nullable_mysql( - "add_column_nullable_mysql", - DatabaseBackend::MySql, - &["ALTER TABLE `users` ADD COLUMN `email` text"] - )] - #[case::add_column_nullable_sqlite( - "add_column_nullable_sqlite", - DatabaseBackend::Sqlite, - &["ALTER TABLE \"users\" ADD COLUMN \"email\" text"] - )] - fn test_add_column( - #[case] title: &str, - #[case] backend: DatabaseBackend, - #[case] expected: &[&str], - ) { - let column = ColumnDef { - name: if title.contains("age") { - "age" - } else if title.contains("nullable") { - "email" - } else { - "nickname" - } - .into(), - r#type: if title.contains("age") { - ColumnType::Simple(SimpleColumnType::Integer) - } else { - ColumnType::Simple(SimpleColumnType::Text) - }, - nullable: !title.contains("backfill"), - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }; - let fill_with = if title.contains("backfill") { - Some("0") - } else { - None - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }]; - let result = - build_add_column(&backend, "users", &column, fill_with, ¤t_schema).unwrap(); - let sql = result[0].build(backend); - for exp in expected { - assert!( - sql.contains(exp), - "Expected SQL to contain '{}', got: {}", - exp, - sql - ); - } - - with_settings!({ snapshot_suffix => format!("add_column_{}", title) }, { - assert_snapshot!(result.iter().map(|q| q.build(backend)).collect::>().join("\n")); - }); - } - - #[test] - fn test_add_column_sqlite_table_not_found() { - let column = ColumnDef { - name: "nickname".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }; - let current_schema = vec![]; // Empty schema - table not found - let result = build_add_column( - &DatabaseBackend::Sqlite, - "users", - &column, - None, - ¤t_schema, - ); - assert!(result.is_err()); - let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("Table 'users' not found in current schema")); - } - - #[test] - fn test_add_column_sqlite_with_default() { - let column = ColumnDef { - name: "age".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: Some("18".into()), - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }]; - let result = build_add_column( - &DatabaseBackend::Sqlite, - "users", - &column, - None, - ¤t_schema, - ); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(DatabaseBackend::Sqlite)) - .collect::>() - .join("\n"); - // Should use default value (18) for fill - assert!(sql.contains("18")); - } - - #[test] - fn test_add_column_sqlite_without_fill_or_default() { - let column = ColumnDef { - name: "age".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }]; - let result = build_add_column( - &DatabaseBackend::Sqlite, - "users", - &column, - None, - ¤t_schema, - ); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(DatabaseBackend::Sqlite)) - .collect::>() - .join("\n"); - // Should use NULL for fill - assert!(sql.contains("NULL")); - } - - #[test] - fn test_add_column_sqlite_with_indexes() { - use vespertide_core::TableConstraint; - - let column = ColumnDef { - name: "nickname".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::Index { - name: Some("idx_id".into()), - columns: vec!["id".into()], - }], - }]; - let result = build_add_column( - &DatabaseBackend::Sqlite, - "users", - &column, - None, - ¤t_schema, - ); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(DatabaseBackend::Sqlite)) - .collect::>() - .join("\n"); - // Should recreate index - assert!(sql.contains("CREATE INDEX")); - assert!(sql.contains("idx_id")); - } - - #[rstest] - #[case::add_column_with_enum_type_postgres(DatabaseBackend::Postgres)] - #[case::add_column_with_enum_type_mysql(DatabaseBackend::MySql)] - #[case::add_column_with_enum_type_sqlite(DatabaseBackend::Sqlite)] - fn test_add_column_with_enum_type(#[case] backend: DatabaseBackend) { - use insta::{assert_snapshot, with_settings}; - use vespertide_core::{ComplexColumnType, EnumValues}; - - // Test that adding an enum column creates the enum type first (PostgreSQL only) - let column = ColumnDef { - name: "status".into(), - r#type: ColumnType::Complex(ComplexColumnType::Enum { - name: "status_type".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }]; - let result = build_add_column(&backend, "users", &column, None, ¤t_schema); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join(";\n"); - - with_settings!({ snapshot_suffix => format!("add_column_with_enum_type_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::postgres(DatabaseBackend::Postgres)] - #[case::mysql(DatabaseBackend::MySql)] - #[case::sqlite(DatabaseBackend::Sqlite)] - fn test_add_column_enum_non_nullable_with_default(#[case] backend: DatabaseBackend) { - use insta::{assert_snapshot, with_settings}; - use vespertide_core::{ComplexColumnType, EnumValues}; - - // Test adding an enum column that is non-nullable with a default value - let column = ColumnDef { - name: "status".into(), - r#type: ColumnType::Complex(ComplexColumnType::Enum { - name: "user_status".into(), - values: EnumValues::String(vec![ - "active".into(), - "inactive".into(), - "pending".into(), - ]), - }), - nullable: false, - default: Some("active".into()), - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }]; - let result = build_add_column(&backend, "users", &column, None, ¤t_schema); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join(";\n"); - - with_settings!({ snapshot_suffix => format!("enum_non_nullable_with_default_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::postgres(DatabaseBackend::Postgres)] - #[case::mysql(DatabaseBackend::MySql)] - #[case::sqlite(DatabaseBackend::Sqlite)] - fn test_add_column_with_empty_string_default(#[case] backend: DatabaseBackend) { - use insta::{assert_snapshot, with_settings}; - - // Test adding a text column with empty string default - let column = ColumnDef { - name: "nickname".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: false, - default: Some("".into()), // Empty string default - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }]; - let result = build_add_column(&backend, "users", &column, None, ¤t_schema); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join(";\n"); - - // Verify empty string becomes '' - assert!( - sql.contains("''"), - "Expected SQL to contain empty string literal '', got: {}", - sql - ); - - with_settings!({ snapshot_suffix => format!("empty_string_default_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::postgres(DatabaseBackend::Postgres)] - #[case::mysql(DatabaseBackend::MySql)] - #[case::sqlite(DatabaseBackend::Sqlite)] - fn test_add_column_with_fill_with_empty_string(#[case] backend: DatabaseBackend) { - use insta::{assert_snapshot, with_settings}; - - // Test adding a column with fill_with as empty string - let column = ColumnDef { - name: "nickname".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }]; - // fill_with empty string should become '' - let result = build_add_column(&backend, "users", &column, Some(""), ¤t_schema); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join(";\n"); - - // Verify empty string becomes '' - assert!( - sql.contains("''"), - "Expected SQL to contain empty string literal '', got: {}", - sql - ); - - with_settings!({ snapshot_suffix => format!("fill_with_empty_string_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } -} +use sea_query::{Alias, Expr, Query, Table, TableAlterStatement}; + +use vespertide_core::{ColumnDef, TableDef}; + +use super::create_table::build_create_table_for_backend; +use super::helpers::{ + build_create_enum_type_sql, build_schema_statement, build_sea_column_def_with_table, + collect_sqlite_enum_check_clauses, normalize_enum_default, normalize_fill_with, +}; +use super::rename_table::build_rename_table; +use super::types::{BuiltQuery, DatabaseBackend, RawSql}; +use crate::error::QueryError; + +fn build_add_column_alter_for_backend( + backend: &DatabaseBackend, + table: &str, + column: &ColumnDef, +) -> TableAlterStatement { + let col_def = build_sea_column_def_with_table(backend, table, column); + Table::alter() + .table(Alias::new(table)) + .add_column(col_def) + .to_owned() +} + +/// Check if the column type is an enum +fn is_enum_column(column: &ColumnDef) -> bool { + matches!( + column.r#type, + vespertide_core::ColumnType::Complex(vespertide_core::ComplexColumnType::Enum { .. }) + ) +} + +pub fn build_add_column( + backend: &DatabaseBackend, + table: &str, + column: &ColumnDef, + fill_with: Option<&str>, + current_schema: &[TableDef], +) -> Result, QueryError> { + // SQLite: NOT NULL additions or enum columns require table recreation + // (enum columns need CHECK constraint which requires table recreation in SQLite) + let sqlite_needs_recreation = + *backend == DatabaseBackend::Sqlite && (!column.nullable || is_enum_column(column)); + + if sqlite_needs_recreation { + let table_def = current_schema + .iter() + .find(|t| t.name == table) + .ok_or_else(|| QueryError::Other(format!( + "Table '{}' not found in current schema. SQLite requires current schema information to add columns.", + table + )))?; + + let mut new_columns = table_def.columns.clone(); + new_columns.push(column.clone()); + + let temp_table = format!("{}_temp", table); + let create_temp = build_create_table_for_backend( + backend, + &temp_table, + &new_columns, + &table_def.constraints, + ); + + // For SQLite, add CHECK constraints for enum columns + // Use original table name for constraint naming (table will be renamed back) + let enum_check_clauses = collect_sqlite_enum_check_clauses(table, &new_columns); + let create_query = if !enum_check_clauses.is_empty() { + let base_sql = build_schema_statement(&create_temp, *backend); + let mut modified_sql = base_sql; + if let Some(pos) = modified_sql.rfind(')') { + let check_sql = enum_check_clauses.join(", "); + modified_sql.insert_str(pos, &format!(", {}", check_sql)); + } + BuiltQuery::Raw(RawSql::per_backend( + modified_sql.clone(), + modified_sql.clone(), + modified_sql, + )) + } else { + BuiltQuery::CreateTable(Box::new(create_temp)) + }; + + // Copy existing data, filling new column + let mut select_query = Query::select(); + for col in &table_def.columns { + select_query = select_query.column(Alias::new(&col.name)).to_owned(); + } + let normalized_fill = normalize_fill_with(fill_with); + let fill_expr = if let Some(fill) = normalized_fill.as_deref() { + Expr::cust(normalize_enum_default(&column.r#type, fill)) + } else if let Some(def) = &column.default { + Expr::cust(normalize_enum_default(&column.r#type, &def.to_sql())) + } else { + Expr::cust("NULL") + }; + select_query = select_query + .expr_as(fill_expr, Alias::new(&column.name)) + .from(Alias::new(table)) + .to_owned(); + + let mut columns_alias: Vec = table_def + .columns + .iter() + .map(|c| Alias::new(&c.name)) + .collect(); + columns_alias.push(Alias::new(&column.name)); + let insert_stmt = Query::insert() + .into_table(Alias::new(&temp_table)) + .columns(columns_alias) + .select_from(select_query) + .unwrap() + .to_owned(); + let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); + + let drop_query = + BuiltQuery::DropTable(Box::new(Table::drop().table(Alias::new(table)).to_owned())); + let rename_query = build_rename_table(&temp_table, table); + + // Recreate indexes from Index constraints + let mut index_queries = Vec::new(); + for constraint in &table_def.constraints { + if let vespertide_core::TableConstraint::Index { name, columns } = constraint { + let index_name = + vespertide_naming::build_index_name(table, columns, name.as_deref()); + let mut idx_stmt = sea_query::Index::create(); + idx_stmt = idx_stmt.name(&index_name).to_owned(); + for col_name in columns { + idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); + } + idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); + index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); + } + } + + let mut stmts = vec![create_query, insert_query, drop_query, rename_query]; + stmts.extend(index_queries); + return Ok(stmts); + } + + let mut stmts: Vec = Vec::new(); + + // If column type is an enum, create the type first (PostgreSQL only) + if let Some(create_type_sql) = build_create_enum_type_sql(table, &column.r#type) { + stmts.push(BuiltQuery::Raw(create_type_sql)); + } + + // If adding NOT NULL without default, we need special handling + let needs_backfill = !column.nullable && column.default.is_none() && fill_with.is_some(); + + if needs_backfill { + // Add as nullable first + let mut temp_col = column.clone(); + temp_col.nullable = true; + + stmts.push(BuiltQuery::AlterTable(Box::new( + build_add_column_alter_for_backend(backend, table, &temp_col), + ))); + + // Backfill with provided value + if let Some(fill) = normalize_fill_with(fill_with) { + let update_stmt = Query::update() + .table(Alias::new(table)) + .value(Alias::new(&column.name), Expr::cust(fill)) + .to_owned(); + stmts.push(BuiltQuery::Update(Box::new(update_stmt))); + } + + // Set NOT NULL + let not_null_col = build_sea_column_def_with_table(backend, table, column); + let alter_not_null = Table::alter() + .table(Alias::new(table)) + .modify_column(not_null_col) + .to_owned(); + stmts.push(BuiltQuery::AlterTable(Box::new(alter_not_null))); + } else { + stmts.push(BuiltQuery::AlterTable(Box::new( + build_add_column_alter_for_backend(backend, table, column), + ))); + } + + Ok(stmts) +} + +#[cfg(test)] +mod tests { + use super::*; + use insta::{assert_snapshot, with_settings}; + use rstest::rstest; + use vespertide_core::{ColumnType, SimpleColumnType, TableDef}; + + #[rstest] + #[case::add_column_with_backfill_postgres( + "add_column_with_backfill_postgres", + DatabaseBackend::Postgres, + &["ALTER TABLE \"users\" ADD COLUMN \"nickname\" text"] + )] + #[case::add_column_with_backfill_mysql( + "add_column_with_backfill_mysql", + DatabaseBackend::MySql, + &["ALTER TABLE `users` ADD COLUMN `nickname` text"] + )] + #[case::add_column_with_backfill_sqlite( + "add_column_with_backfill_sqlite", + DatabaseBackend::Sqlite, + &["CREATE TABLE \"users_temp\""] + )] + #[case::add_column_simple_postgres( + "add_column_simple_postgres", + DatabaseBackend::Postgres, + &["ALTER TABLE \"users\" ADD COLUMN \"nickname\""] + )] + #[case::add_column_simple_mysql( + "add_column_simple_mysql", + DatabaseBackend::MySql, + &["ALTER TABLE `users` ADD COLUMN `nickname` text"] + )] + #[case::add_column_simple_sqlite( + "add_column_simple_sqlite", + DatabaseBackend::Sqlite, + &["ALTER TABLE \"users\" ADD COLUMN \"nickname\""] + )] + #[case::add_column_nullable_postgres( + "add_column_nullable_postgres", + DatabaseBackend::Postgres, + &["ALTER TABLE \"users\" ADD COLUMN \"email\" text"] + )] + #[case::add_column_nullable_mysql( + "add_column_nullable_mysql", + DatabaseBackend::MySql, + &["ALTER TABLE `users` ADD COLUMN `email` text"] + )] + #[case::add_column_nullable_sqlite( + "add_column_nullable_sqlite", + DatabaseBackend::Sqlite, + &["ALTER TABLE \"users\" ADD COLUMN \"email\" text"] + )] + fn test_add_column( + #[case] title: &str, + #[case] backend: DatabaseBackend, + #[case] expected: &[&str], + ) { + let column = ColumnDef { + name: if title.contains("age") { + "age" + } else if title.contains("nullable") { + "email" + } else { + "nickname" + } + .into(), + r#type: if title.contains("age") { + ColumnType::Simple(SimpleColumnType::Integer) + } else { + ColumnType::Simple(SimpleColumnType::Text) + }, + nullable: !title.contains("backfill"), + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }; + let fill_with = if title.contains("backfill") { + Some("0") + } else { + None + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }]; + let result = + build_add_column(&backend, "users", &column, fill_with, ¤t_schema).unwrap(); + let sql = result[0].build(backend); + for exp in expected { + assert!( + sql.contains(exp), + "Expected SQL to contain '{}', got: {}", + exp, + sql + ); + } + + with_settings!({ snapshot_suffix => format!("add_column_{}", title) }, { + assert_snapshot!(result.iter().map(|q| q.build(backend)).collect::>().join("\n")); + }); + } + + #[test] + fn test_add_column_sqlite_table_not_found() { + let column = ColumnDef { + name: "nickname".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }; + let current_schema = vec![]; // Empty schema - table not found + let result = build_add_column( + &DatabaseBackend::Sqlite, + "users", + &column, + None, + ¤t_schema, + ); + assert!(result.is_err()); + let err_msg = result.unwrap_err().to_string(); + assert!(err_msg.contains("Table 'users' not found in current schema")); + } + + #[test] + fn test_add_column_sqlite_with_default() { + let column = ColumnDef { + name: "age".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: Some("18".into()), + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }]; + let result = build_add_column( + &DatabaseBackend::Sqlite, + "users", + &column, + None, + ¤t_schema, + ); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(DatabaseBackend::Sqlite)) + .collect::>() + .join("\n"); + // Should use default value (18) for fill + assert!(sql.contains("18")); + } + + #[test] + fn test_add_column_sqlite_without_fill_or_default() { + let column = ColumnDef { + name: "age".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }]; + let result = build_add_column( + &DatabaseBackend::Sqlite, + "users", + &column, + None, + ¤t_schema, + ); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(DatabaseBackend::Sqlite)) + .collect::>() + .join("\n"); + // Should use NULL for fill + assert!(sql.contains("NULL")); + } + + #[test] + fn test_add_column_sqlite_with_indexes() { + use vespertide_core::TableConstraint; + + let column = ColumnDef { + name: "nickname".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::Index { + name: Some("idx_id".into()), + columns: vec!["id".into()], + }], + }]; + let result = build_add_column( + &DatabaseBackend::Sqlite, + "users", + &column, + None, + ¤t_schema, + ); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(DatabaseBackend::Sqlite)) + .collect::>() + .join("\n"); + // Should recreate index + assert!(sql.contains("CREATE INDEX")); + assert!(sql.contains("idx_id")); + } + + #[rstest] + #[case::add_column_with_enum_type_postgres(DatabaseBackend::Postgres)] + #[case::add_column_with_enum_type_mysql(DatabaseBackend::MySql)] + #[case::add_column_with_enum_type_sqlite(DatabaseBackend::Sqlite)] + fn test_add_column_with_enum_type(#[case] backend: DatabaseBackend) { + use insta::{assert_snapshot, with_settings}; + use vespertide_core::{ComplexColumnType, EnumValues}; + + // Test that adding an enum column creates the enum type first (PostgreSQL only) + let column = ColumnDef { + name: "status".into(), + r#type: ColumnType::Complex(ComplexColumnType::Enum { + name: "status_type".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }]; + let result = build_add_column(&backend, "users", &column, None, ¤t_schema); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join(";\n"); + + with_settings!({ snapshot_suffix => format!("add_column_with_enum_type_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::postgres(DatabaseBackend::Postgres)] + #[case::mysql(DatabaseBackend::MySql)] + #[case::sqlite(DatabaseBackend::Sqlite)] + fn test_add_column_enum_non_nullable_with_default(#[case] backend: DatabaseBackend) { + use insta::{assert_snapshot, with_settings}; + use vespertide_core::{ComplexColumnType, EnumValues}; + + // Test adding an enum column that is non-nullable with a default value + let column = ColumnDef { + name: "status".into(), + r#type: ColumnType::Complex(ComplexColumnType::Enum { + name: "user_status".into(), + values: EnumValues::String(vec![ + "active".into(), + "inactive".into(), + "pending".into(), + ]), + }), + nullable: false, + default: Some("active".into()), + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }]; + let result = build_add_column(&backend, "users", &column, None, ¤t_schema); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join(";\n"); + + with_settings!({ snapshot_suffix => format!("enum_non_nullable_with_default_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::postgres(DatabaseBackend::Postgres)] + #[case::mysql(DatabaseBackend::MySql)] + #[case::sqlite(DatabaseBackend::Sqlite)] + fn test_add_column_with_empty_string_default(#[case] backend: DatabaseBackend) { + use insta::{assert_snapshot, with_settings}; + + // Test adding a text column with empty string default + let column = ColumnDef { + name: "nickname".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: false, + default: Some("".into()), // Empty string default + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }]; + let result = build_add_column(&backend, "users", &column, None, ¤t_schema); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join(";\n"); + + // Verify empty string becomes '' + assert!( + sql.contains("''"), + "Expected SQL to contain empty string literal '', got: {}", + sql + ); + + with_settings!({ snapshot_suffix => format!("empty_string_default_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::postgres(DatabaseBackend::Postgres)] + #[case::mysql(DatabaseBackend::MySql)] + #[case::sqlite(DatabaseBackend::Sqlite)] + fn test_add_column_with_fill_with_empty_string(#[case] backend: DatabaseBackend) { + use insta::{assert_snapshot, with_settings}; + + // Test adding a column with fill_with as empty string + let column = ColumnDef { + name: "nickname".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }]; + // fill_with empty string should become '' + let result = build_add_column(&backend, "users", &column, Some(""), ¤t_schema); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join(";\n"); + + // Verify empty string becomes '' + assert!( + sql.contains("''"), + "Expected SQL to contain empty string literal '', got: {}", + sql + ); + + with_settings!({ snapshot_suffix => format!("fill_with_empty_string_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } +} diff --git a/crates/vespertide-query/src/sql/add_constraint.rs b/crates/vespertide-query/src/sql/add_constraint.rs index 983ebfd..dc972bb 100644 --- a/crates/vespertide-query/src/sql/add_constraint.rs +++ b/crates/vespertide-query/src/sql/add_constraint.rs @@ -1,1217 +1,1229 @@ -use sea_query::{Alias, ForeignKey, Index, Query, Table}; - -use vespertide_core::{TableConstraint, TableDef}; - -use super::create_table::build_create_table_for_backend; -use super::helpers::{build_schema_statement, to_sea_fk_action}; -use super::rename_table::build_rename_table; -use super::types::{BuiltQuery, DatabaseBackend}; -use crate::error::QueryError; -use crate::sql::RawSql; - -/// Extract CHECK constraint clauses from a list of constraints -fn extract_check_clauses(constraints: &[TableConstraint]) -> Vec { - constraints - .iter() - .filter_map(|c| { - if let TableConstraint::Check { name, expr } = c { - Some(format!("CONSTRAINT \"{}\" CHECK ({})", name, expr)) - } else { - None - } - }) - .collect() -} - -/// Build CREATE TABLE query with CHECK constraints properly embedded -fn build_create_with_checks( - backend: &DatabaseBackend, - create_stmt: &sea_query::TableCreateStatement, - check_clauses: &[String], -) -> BuiltQuery { - if check_clauses.is_empty() { - BuiltQuery::CreateTable(Box::new(create_stmt.clone())) - } else { - let base_sql = build_schema_statement(create_stmt, *backend); - let mut modified_sql = base_sql; - if let Some(pos) = modified_sql.rfind(')') { - let check_sql = check_clauses.join(", "); - modified_sql.insert_str(pos, &format!(", {}", check_sql)); - } - BuiltQuery::Raw(RawSql::per_backend( - modified_sql.clone(), - modified_sql.clone(), - modified_sql, - )) - } -} - -pub fn build_add_constraint( - backend: &DatabaseBackend, - table: &str, - constraint: &TableConstraint, - current_schema: &[TableDef], -) -> Result, QueryError> { - match constraint { - TableConstraint::PrimaryKey { columns, .. } => { - if *backend == DatabaseBackend::Sqlite { - // SQLite does not support ALTER TABLE ... ADD PRIMARY KEY - // Use temporary table approach - let table_def = current_schema - .iter() - .find(|t| t.name == table) - .ok_or_else(|| QueryError::Other(format!( - "Table '{}' not found in current schema. SQLite requires current schema information to add constraints.", - table - )))?; - - // Create new constraints with the added primary key constraint - let mut new_constraints = table_def.constraints.clone(); - new_constraints.push(constraint.clone()); - - // Generate temporary table name - let temp_table = format!("{}_temp", table); - - // 1. Create temporary table with new constraints - let create_temp_table = build_create_table_for_backend( - backend, - &temp_table, - &table_def.columns, - &new_constraints, - ); - - // Handle CHECK constraints (sea-query doesn't support them natively) - let check_clauses = extract_check_clauses(&new_constraints); - let create_query = - build_create_with_checks(backend, &create_temp_table, &check_clauses); - - // 2. Copy data - let column_aliases: Vec = table_def - .columns - .iter() - .map(|c| Alias::new(&c.name)) - .collect(); - let mut select_query = Query::select(); - for col_alias in &column_aliases { - select_query = select_query.column(col_alias.clone()).to_owned(); - } - select_query = select_query.from(Alias::new(table)).to_owned(); - - let insert_stmt = Query::insert() - .into_table(Alias::new(&temp_table)) - .columns(column_aliases.clone()) - .select_from(select_query) - .unwrap() - .to_owned(); - let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); - - // 3. Drop original table - let drop_table = Table::drop().table(Alias::new(table)).to_owned(); - let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); - - // 4. Rename temporary table - let rename_query = build_rename_table(&temp_table, table); - - // 5. Recreate indexes from Index constraints - let mut index_queries = Vec::new(); - for c in &table_def.constraints { - if let TableConstraint::Index { - name: idx_name, - columns: idx_cols, - } = c - { - let index_name = vespertide_naming::build_index_name( - table, - idx_cols, - idx_name.as_deref(), - ); - let mut idx_stmt = sea_query::Index::create(); - idx_stmt = idx_stmt.name(&index_name).to_owned(); - for col_name in idx_cols { - idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); - } - idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); - index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); - } - } - - let mut queries = vec![create_query, insert_query, drop_query, rename_query]; - queries.extend(index_queries); - Ok(queries) - } else { - // sea_query lacks ALTER TABLE ADD PRIMARY KEY; emit backend SQL - let pg_cols = columns - .iter() - .map(|c| format!("\"{}\"", c)) - .collect::>() - .join(", "); - let mysql_cols = columns - .iter() - .map(|c| format!("`{}`", c)) - .collect::>() - .join(", "); - let pg_sql = format!("ALTER TABLE \"{}\" ADD PRIMARY KEY ({})", table, pg_cols); - let mysql_sql = format!("ALTER TABLE `{}` ADD PRIMARY KEY ({})", table, mysql_cols); - Ok(vec![BuiltQuery::Raw(RawSql::per_backend( - pg_sql.clone(), - mysql_sql, - pg_sql, - ))]) - } - } - TableConstraint::Unique { name, columns } => { - // SQLite does not support ALTER TABLE ... ADD CONSTRAINT UNIQUE - // Always generate a proper name: uq_{table}_{key} or uq_{table}_{columns} - let index_name = - super::helpers::build_unique_constraint_name(table, columns, name.as_deref()); - let mut idx = Index::create() - .table(Alias::new(table)) - .name(&index_name) - .unique() - .to_owned(); - for col in columns { - idx = idx.col(Alias::new(col)).to_owned(); - } - Ok(vec![BuiltQuery::CreateIndex(Box::new(idx))]) - } - TableConstraint::ForeignKey { - name, - columns, - ref_table, - ref_columns, - on_delete, - on_update, - } => { - // SQLite does not support ALTER TABLE ... ADD CONSTRAINT FOREIGN KEY - if *backend == DatabaseBackend::Sqlite { - // Use temporary table approach for SQLite - let table_def = current_schema - .iter() - .find(|t| t.name == table) - .ok_or_else(|| QueryError::Other(format!( - "Table '{}' not found in current schema. SQLite requires current schema information to add constraints.", - table - )))?; - - // Create new constraints with the added foreign key constraint - let mut new_constraints = table_def.constraints.clone(); - new_constraints.push(constraint.clone()); - - // Generate temporary table name - let temp_table = format!("{}_temp", table); - - // 1. Create temporary table with new constraints - let create_temp_table = build_create_table_for_backend( - backend, - &temp_table, - &table_def.columns, - &new_constraints, - ); - - // Handle CHECK constraints (sea-query doesn't support them natively) - let check_clauses = extract_check_clauses(&new_constraints); - let create_query = - build_create_with_checks(backend, &create_temp_table, &check_clauses); - - // 2. Copy data (all columns) - let column_aliases: Vec = table_def - .columns - .iter() - .map(|c| Alias::new(&c.name)) - .collect(); - let mut select_query = Query::select(); - for col_alias in &column_aliases { - select_query = select_query.column(col_alias.clone()).to_owned(); - } - select_query = select_query.from(Alias::new(table)).to_owned(); - - let insert_stmt = Query::insert() - .into_table(Alias::new(&temp_table)) - .columns(column_aliases.clone()) - .select_from(select_query) - .unwrap() - .to_owned(); - let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); - - // 3. Drop original table - let drop_table = Table::drop().table(Alias::new(table)).to_owned(); - let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); - - // 4. Rename temporary table to original name - let rename_query = build_rename_table(&temp_table, table); - - // 5. Recreate indexes from Index constraints - let mut index_queries = Vec::new(); - for c in &table_def.constraints { - if let TableConstraint::Index { - name: idx_name, - columns: idx_cols, - } = c - { - let index_name = vespertide_naming::build_index_name( - table, - idx_cols, - idx_name.as_deref(), - ); - let mut idx_stmt = sea_query::Index::create(); - idx_stmt = idx_stmt.name(&index_name).to_owned(); - for col_name in idx_cols { - idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); - } - idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); - index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); - } - } - - let mut queries = vec![create_query, insert_query, drop_query, rename_query]; - queries.extend(index_queries); - Ok(queries) - } else { - // Build foreign key using ForeignKey::create - let fk_name = - vespertide_naming::build_foreign_key_name(table, columns, name.as_deref()); - let mut fk = ForeignKey::create(); - fk = fk.name(&fk_name).to_owned(); - fk = fk.from_tbl(Alias::new(table)).to_owned(); - for col in columns { - fk = fk.from_col(Alias::new(col)).to_owned(); - } - fk = fk.to_tbl(Alias::new(ref_table)).to_owned(); - for col in ref_columns { - fk = fk.to_col(Alias::new(col)).to_owned(); - } - if let Some(action) = on_delete { - fk = fk.on_delete(to_sea_fk_action(action)).to_owned(); - } - if let Some(action) = on_update { - fk = fk.on_update(to_sea_fk_action(action)).to_owned(); - } - Ok(vec![BuiltQuery::CreateForeignKey(Box::new(fk))]) - } - } - TableConstraint::Index { name, columns } => { - // Index constraints are simple CREATE INDEX statements for all backends - let index_name = vespertide_naming::build_index_name(table, columns, name.as_deref()); - let mut idx = Index::create() - .table(Alias::new(table)) - .name(&index_name) - .to_owned(); - for col in columns { - idx = idx.col(Alias::new(col)).to_owned(); - } - Ok(vec![BuiltQuery::CreateIndex(Box::new(idx))]) - } - TableConstraint::Check { name, expr } => { - // SQLite does not support ALTER TABLE ... ADD CONSTRAINT CHECK - if *backend == DatabaseBackend::Sqlite { - // Use temporary table approach for SQLite - let table_def = current_schema - .iter() - .find(|t| t.name == table) - .ok_or_else(|| QueryError::Other(format!( - "Table '{}' not found in current schema. SQLite requires current schema information to add constraints.", - table - )))?; - - // Create new constraints with the added check constraint - let mut new_constraints = table_def.constraints.clone(); - new_constraints.push(constraint.clone()); - - // Generate temporary table name - let temp_table = format!("{}_temp", table); - - // 1. Create temporary table with new constraints - let create_temp_table = build_create_table_for_backend( - backend, - &temp_table, - &table_def.columns, - &new_constraints, - ); - - // Handle CHECK constraints (sea-query doesn't support them natively) - let check_clauses = extract_check_clauses(&new_constraints); - let create_query = - build_create_with_checks(backend, &create_temp_table, &check_clauses); - - // 2. Copy data (all columns) - let column_aliases: Vec = table_def - .columns - .iter() - .map(|c| Alias::new(&c.name)) - .collect(); - let mut select_query = Query::select(); - for col_alias in &column_aliases { - select_query = select_query.column(col_alias.clone()).to_owned(); - } - select_query = select_query.from(Alias::new(table)).to_owned(); - - let insert_stmt = Query::insert() - .into_table(Alias::new(&temp_table)) - .columns(column_aliases.clone()) - .select_from(select_query) - .unwrap() - .to_owned(); - let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); - - // 3. Drop original table - let drop_table = Table::drop().table(Alias::new(table)).to_owned(); - let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); - - // 4. Rename temporary table to original name - let rename_query = build_rename_table(&temp_table, table); - - // 5. Recreate indexes from Index constraints - let mut index_queries = Vec::new(); - for c in &table_def.constraints { - if let TableConstraint::Index { - name: idx_name, - columns: idx_cols, - } = c - { - let index_name = vespertide_naming::build_index_name( - table, - idx_cols, - idx_name.as_deref(), - ); - let mut idx_stmt = sea_query::Index::create(); - idx_stmt = idx_stmt.name(&index_name).to_owned(); - for col_name in idx_cols { - idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); - } - idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); - index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); - } - } - - let mut queries = vec![create_query, insert_query, drop_query, rename_query]; - queries.extend(index_queries); - Ok(queries) - } else { - let pg_sql = format!( - "ALTER TABLE \"{}\" ADD CONSTRAINT \"{}\" CHECK ({})", - table, name, expr - ); - let mysql_sql = format!( - "ALTER TABLE `{}` ADD CONSTRAINT `{}` CHECK ({})", - table, name, expr - ); - Ok(vec![BuiltQuery::Raw(RawSql::per_backend( - pg_sql.clone(), - mysql_sql, - pg_sql, - ))]) - } - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::sql::types::DatabaseBackend; - use insta::{assert_snapshot, with_settings}; - use rstest::rstest; - use vespertide_core::{ - ColumnDef, ColumnType, ReferenceAction, SimpleColumnType, TableConstraint, TableDef, - }; - - #[rstest] - #[case::add_constraint_primary_key_postgres( - "add_constraint_primary_key_postgres", - DatabaseBackend::Postgres, - &["ALTER TABLE \"users\" ADD PRIMARY KEY (\"id\")"] - )] - #[case::add_constraint_primary_key_mysql( - "add_constraint_primary_key_mysql", - DatabaseBackend::MySql, - &["ALTER TABLE `users` ADD PRIMARY KEY (`id`)"] - )] - #[case::add_constraint_primary_key_sqlite( - "add_constraint_primary_key_sqlite", - DatabaseBackend::Sqlite, - &["CREATE TABLE \"users_temp\""] - )] - #[case::add_constraint_unique_named_postgres( - "add_constraint_unique_named_postgres", - DatabaseBackend::Postgres, - &["CREATE UNIQUE INDEX \"uq_users__uq_email\" ON \"users\" (\"email\")"] - )] - #[case::add_constraint_unique_named_mysql( - "add_constraint_unique_named_mysql", - DatabaseBackend::MySql, - &["CREATE UNIQUE INDEX `uq_users__uq_email` ON `users` (`email`)"] - )] - #[case::add_constraint_unique_named_sqlite( - "add_constraint_unique_named_sqlite", - DatabaseBackend::Sqlite, - &["CREATE UNIQUE INDEX \"uq_users__uq_email\" ON \"users\" (\"email\")"] - )] - #[case::add_constraint_foreign_key_postgres( - "add_constraint_foreign_key_postgres", - DatabaseBackend::Postgres, - &["FOREIGN KEY (\"user_id\")", "REFERENCES \"users\" (\"id\")", "ON DELETE CASCADE", "ON UPDATE RESTRICT"] - )] - #[case::add_constraint_foreign_key_mysql( - "add_constraint_foreign_key_mysql", - DatabaseBackend::MySql, - &["FOREIGN KEY (`user_id`)", "REFERENCES `users` (`id`)", "ON DELETE CASCADE", "ON UPDATE RESTRICT"] - )] - #[case::add_constraint_foreign_key_sqlite( - "add_constraint_foreign_key_sqlite", - DatabaseBackend::Sqlite, - &["CREATE TABLE \"users_temp\""] - )] - #[case::add_constraint_check_named_postgres( - "add_constraint_check_named_postgres", - DatabaseBackend::Postgres, - &["ADD CONSTRAINT \"chk_age\" CHECK (age > 0)"] - )] - #[case::add_constraint_check_named_mysql( - "add_constraint_check_named_mysql", - DatabaseBackend::MySql, - &["ADD CONSTRAINT `chk_age` CHECK (age > 0)"] - )] - #[case::add_constraint_check_named_sqlite( - "add_constraint_check_named_sqlite", - DatabaseBackend::Sqlite, - &["CREATE TABLE \"users_temp\""] - )] - fn test_add_constraint( - #[case] title: &str, - #[case] backend: DatabaseBackend, - #[case] expected: &[&str], - ) { - let constraint = if title.contains("primary_key") { - TableConstraint::PrimaryKey { - columns: vec!["id".into()], - auto_increment: false, - } - } else if title.contains("unique") { - TableConstraint::Unique { - name: Some("uq_email".into()), - columns: vec!["email".into()], - } - } else if title.contains("foreign_key") { - TableConstraint::ForeignKey { - name: Some("fk_user".into()), - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: Some(ReferenceAction::Cascade), - on_update: Some(ReferenceAction::Restrict), - } - } else { - TableConstraint::Check { - name: "chk_age".into(), - expr: "age > 0".into(), - } - }; - - // For SQLite, we need to provide current schema - let current_schema = vec![TableDef { - name: "users".into(), - columns: if title.contains("foreign_key") { - vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "user_id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ] - } else { - vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: if title.contains("check") { - "age".into() - } else { - "email".into() - }, - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ] - }, - constraints: vec![], - }]; - - let result = build_add_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); - let sql = result[0].build(backend); - for exp in expected { - assert!( - sql.contains(exp), - "Expected SQL to contain '{}', got: {}", - exp, - sql - ); - } - - with_settings!({ snapshot_suffix => format!("add_constraint_{}", title) }, { - assert_snapshot!(result.iter().map(|q| q.build(backend)).collect::>().join("\n")); - }); - } - - #[test] - fn test_add_constraint_primary_key_sqlite_table_not_found() { - let constraint = TableConstraint::PrimaryKey { - columns: vec!["id".into()], - auto_increment: false, - }; - let current_schema = vec![]; // Empty schema - table not found - let result = build_add_constraint( - &DatabaseBackend::Sqlite, - "users", - &constraint, - ¤t_schema, - ); - assert!(result.is_err()); - let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("Table 'users' not found in current schema")); - } - - #[test] - fn test_add_constraint_primary_key_sqlite_with_check_constraints() { - let constraint = TableConstraint::PrimaryKey { - columns: vec!["id".into()], - auto_increment: false, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::Check { - name: "chk_id".into(), - expr: "id > 0".into(), - }], - }]; - let result = build_add_constraint( - &DatabaseBackend::Sqlite, - "users", - &constraint, - ¤t_schema, - ); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(DatabaseBackend::Sqlite)) - .collect::>() - .join("\n"); - // Should include CHECK constraint in CREATE TABLE - assert!(sql.contains("CONSTRAINT \"chk_id\" CHECK")); - } - - #[test] - fn test_add_constraint_primary_key_sqlite_with_indexes() { - let constraint = TableConstraint::PrimaryKey { - columns: vec!["id".into()], - auto_increment: false, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::Index { - name: Some("idx_id".into()), - columns: vec!["id".into()], - }], - }]; - let result = build_add_constraint( - &DatabaseBackend::Sqlite, - "users", - &constraint, - ¤t_schema, - ); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(DatabaseBackend::Sqlite)) - .collect::>() - .join("\n"); - // Should recreate index - assert!(sql.contains("CREATE INDEX")); - assert!(sql.contains("idx_id")); - } - - #[test] - fn test_add_constraint_primary_key_sqlite_with_unique_constraint() { - // Note: Unique indexes are now TableConstraint::Unique, not Index - // Index constraints don't have a unique flag - use Unique constraint instead - let constraint = TableConstraint::PrimaryKey { - columns: vec!["id".into()], - auto_increment: false, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::Unique { - name: Some("uq_email".into()), - columns: vec!["email".into()], - }], - }]; - let result = build_add_constraint( - &DatabaseBackend::Sqlite, - "users", - &constraint, - ¤t_schema, - ); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(DatabaseBackend::Sqlite)) - .collect::>() - .join("\n"); - // Unique constraint should be in CREATE TABLE statement (for SQLite temp table approach) - assert!(sql.contains("CREATE TABLE")); - } - - #[test] - fn test_add_constraint_foreign_key_sqlite_table_not_found() { - let constraint = TableConstraint::ForeignKey { - name: Some("fk_user".into()), - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: None, - on_update: None, - }; - let current_schema = vec![]; // Empty schema - table not found - let result = build_add_constraint( - &DatabaseBackend::Sqlite, - "posts", - &constraint, - ¤t_schema, - ); - assert!(result.is_err()); - let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("Table 'posts' not found in current schema")); - } - - #[test] - fn test_add_constraint_foreign_key_sqlite_with_check_constraints() { - let constraint = TableConstraint::ForeignKey { - name: Some("fk_user".into()), - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: None, - on_update: None, - }; - let current_schema = vec![TableDef { - name: "posts".into(), - columns: vec![ColumnDef { - name: "user_id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::Check { - name: "chk_user_id".into(), - expr: "user_id > 0".into(), - }], - }]; - let result = build_add_constraint( - &DatabaseBackend::Sqlite, - "posts", - &constraint, - ¤t_schema, - ); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(DatabaseBackend::Sqlite)) - .collect::>() - .join("\n"); - // Should include CHECK constraint in CREATE TABLE - assert!(sql.contains("CONSTRAINT \"chk_user_id\" CHECK")); - } - - #[test] - fn test_add_constraint_foreign_key_sqlite_with_indexes() { - let constraint = TableConstraint::ForeignKey { - name: Some("fk_user".into()), - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: None, - on_update: None, - }; - let current_schema = vec![TableDef { - name: "posts".into(), - columns: vec![ColumnDef { - name: "user_id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::Index { - name: Some("idx_user_id".into()), - columns: vec!["user_id".into()], - }], - }]; - let result = build_add_constraint( - &DatabaseBackend::Sqlite, - "posts", - &constraint, - ¤t_schema, - ); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(DatabaseBackend::Sqlite)) - .collect::>() - .join("\n"); - // Should recreate index - assert!(sql.contains("CREATE INDEX")); - assert!(sql.contains("idx_user_id")); - } - - #[test] - fn test_add_constraint_foreign_key_sqlite_with_unique_constraint() { - // Note: Unique indexes are now TableConstraint::Unique - let constraint = TableConstraint::ForeignKey { - name: Some("fk_user".into()), - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: None, - on_update: None, - }; - let current_schema = vec![TableDef { - name: "posts".into(), - columns: vec![ColumnDef { - name: "user_id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::Unique { - name: Some("uq_user_id".into()), - columns: vec!["user_id".into()], - }], - }]; - let result = build_add_constraint( - &DatabaseBackend::Sqlite, - "posts", - &constraint, - ¤t_schema, - ); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(DatabaseBackend::Sqlite)) - .collect::>() - .join("\n"); - // Unique constraint should be in CREATE TABLE statement - assert!(sql.contains("CREATE TABLE")); - } - - #[test] - fn test_add_constraint_check_sqlite_table_not_found() { - let constraint = TableConstraint::Check { - name: "chk_age".into(), - expr: "age > 0".into(), - }; - let current_schema = vec![]; // Empty schema - table not found - let result = build_add_constraint( - &DatabaseBackend::Sqlite, - "users", - &constraint, - ¤t_schema, - ); - assert!(result.is_err()); - let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("Table 'users' not found in current schema")); - } - - #[test] - fn test_add_constraint_check_sqlite_without_existing_check() { - // Test when there are no existing CHECK constraints (line 376) - let constraint = TableConstraint::Check { - name: "chk_age".into(), - expr: "age > 0".into(), - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "age".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], // No existing CHECK constraints - }]; - let result = build_add_constraint( - &DatabaseBackend::Sqlite, - "users", - &constraint, - ¤t_schema, - ); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(DatabaseBackend::Sqlite)) - .collect::>() - .join("\n"); - // Should create table with CHECK constraint - assert!(sql.contains("CREATE TABLE")); - assert!(sql.contains("CONSTRAINT \"chk_age\" CHECK")); - } - - #[test] - fn test_add_constraint_primary_key_sqlite_without_existing_check() { - // Test PrimaryKey addition when there are no existing CHECK constraints (line 84) - // This should hit the else branch: BuiltQuery::CreateTable(Box::new(create_temp_table)) - let constraint = TableConstraint::PrimaryKey { - columns: vec!["id".into()], - auto_increment: false, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], // No existing CHECK constraints - }]; - let result = build_add_constraint( - &DatabaseBackend::Sqlite, - "users", - &constraint, - ¤t_schema, - ); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(DatabaseBackend::Sqlite)) - .collect::>() - .join("\n"); - // Should create table without CHECK constraints (using BuiltQuery::CreateTable) - assert!(sql.contains("CREATE TABLE")); - assert!(sql.contains("PRIMARY KEY")); - } - - #[test] - fn test_add_constraint_foreign_key_sqlite_without_existing_check() { - // Test ForeignKey addition when there are no existing CHECK constraints (line 238) - // This should hit the else branch: BuiltQuery::CreateTable(Box::new(create_temp_table)) - let constraint = TableConstraint::ForeignKey { - name: Some("fk_user".into()), - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: None, - on_update: None, - }; - let current_schema = vec![TableDef { - name: "posts".into(), - columns: vec![ColumnDef { - name: "user_id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], // No existing CHECK constraints - }]; - let result = build_add_constraint( - &DatabaseBackend::Sqlite, - "posts", - &constraint, - ¤t_schema, - ); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(DatabaseBackend::Sqlite)) - .collect::>() - .join("\n"); - // Should create table without CHECK constraints (using BuiltQuery::CreateTable) - assert!(sql.contains("CREATE TABLE")); - assert!(sql.contains("FOREIGN KEY")); - } - - #[test] - fn test_add_constraint_check_sqlite_with_indexes() { - let constraint = TableConstraint::Check { - name: "chk_age".into(), - expr: "age > 0".into(), - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "age".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::Index { - name: Some("idx_age".into()), - columns: vec!["age".into()], - }], - }]; - let result = build_add_constraint( - &DatabaseBackend::Sqlite, - "users", - &constraint, - ¤t_schema, - ); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(DatabaseBackend::Sqlite)) - .collect::>() - .join("\n"); - // Should recreate index - assert!(sql.contains("CREATE INDEX")); - assert!(sql.contains("idx_age")); - } - - #[test] - fn test_add_constraint_check_sqlite_with_unique_constraint() { - // Note: Unique indexes are now TableConstraint::Unique - let constraint = TableConstraint::Check { - name: "chk_age".into(), - expr: "age > 0".into(), - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "age".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![TableConstraint::Unique { - name: Some("uq_age".into()), - columns: vec!["age".into()], - }], - }]; - let result = build_add_constraint( - &DatabaseBackend::Sqlite, - "users", - &constraint, - ¤t_schema, - ); - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(DatabaseBackend::Sqlite)) - .collect::>() - .join("\n"); - // Unique constraint should be in CREATE TABLE statement - assert!(sql.contains("CREATE TABLE")); - } - - #[test] - fn test_extract_check_clauses_with_mixed_constraints() { - // Test that extract_check_clauses filters out non-Check constraints - let constraints = vec![ - TableConstraint::Check { - name: "chk1".into(), - expr: "a > 0".into(), - }, - TableConstraint::PrimaryKey { - columns: vec!["id".into()], - auto_increment: false, - }, - TableConstraint::Check { - name: "chk2".into(), - expr: "b < 100".into(), - }, - TableConstraint::Unique { - name: Some("uq".into()), - columns: vec!["email".into()], - }, - ]; - let clauses = extract_check_clauses(&constraints); - assert_eq!(clauses.len(), 2); - assert!(clauses[0].contains("chk1")); - assert!(clauses[1].contains("chk2")); - } - - #[test] - fn test_extract_check_clauses_with_no_check_constraints() { - let constraints = vec![ - TableConstraint::PrimaryKey { - columns: vec!["id".into()], - auto_increment: false, - }, - TableConstraint::Unique { - name: None, - columns: vec!["email".into()], - }, - ]; - let clauses = extract_check_clauses(&constraints); - assert!(clauses.is_empty()); - } - - #[test] - fn test_build_create_with_checks_empty_clauses() { - use super::build_create_table_for_backend; - - let create_stmt = build_create_table_for_backend( - &DatabaseBackend::Sqlite, - "test_table", - &[ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - &[], - ); - - // Empty check_clauses should return CreateTable variant - let result = build_create_with_checks(&DatabaseBackend::Sqlite, &create_stmt, &[]); - let sql = result.build(DatabaseBackend::Sqlite); - assert!(sql.contains("CREATE TABLE")); - } - - #[test] - fn test_build_create_with_checks_with_clauses() { - use super::build_create_table_for_backend; - - let create_stmt = build_create_table_for_backend( - &DatabaseBackend::Sqlite, - "test_table", - &[ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - &[], - ); - - // Non-empty check_clauses should return Raw variant with embedded checks - let check_clauses = vec!["CONSTRAINT \"chk1\" CHECK (id > 0)".to_string()]; - let result = - build_create_with_checks(&DatabaseBackend::Sqlite, &create_stmt, &check_clauses); - let sql = result.build(DatabaseBackend::Sqlite); - assert!(sql.contains("CREATE TABLE")); - assert!(sql.contains("CONSTRAINT \"chk1\" CHECK (id > 0)")); - } -} +use sea_query::{Alias, ForeignKey, Index, Query, Table}; + +use vespertide_core::{TableConstraint, TableDef}; + +use super::create_table::build_create_table_for_backend; +use super::helpers::{build_schema_statement, to_sea_fk_action}; +use super::rename_table::build_rename_table; +use super::types::{BuiltQuery, DatabaseBackend}; +use crate::error::QueryError; +use crate::sql::RawSql; + +/// Extract CHECK constraint clauses from a list of constraints +fn extract_check_clauses(constraints: &[TableConstraint]) -> Vec { + constraints + .iter() + .filter_map(|c| { + if let TableConstraint::Check { name, expr } = c { + Some(format!("CONSTRAINT \"{}\" CHECK ({})", name, expr)) + } else { + None + } + }) + .collect() +} + +/// Build CREATE TABLE query with CHECK constraints properly embedded +fn build_create_with_checks( + backend: &DatabaseBackend, + create_stmt: &sea_query::TableCreateStatement, + check_clauses: &[String], +) -> BuiltQuery { + if check_clauses.is_empty() { + BuiltQuery::CreateTable(Box::new(create_stmt.clone())) + } else { + let base_sql = build_schema_statement(create_stmt, *backend); + let mut modified_sql = base_sql; + if let Some(pos) = modified_sql.rfind(')') { + let check_sql = check_clauses.join(", "); + modified_sql.insert_str(pos, &format!(", {}", check_sql)); + } + BuiltQuery::Raw(RawSql::per_backend( + modified_sql.clone(), + modified_sql.clone(), + modified_sql, + )) + } +} + +pub fn build_add_constraint( + backend: &DatabaseBackend, + table: &str, + constraint: &TableConstraint, + current_schema: &[TableDef], +) -> Result, QueryError> { + match constraint { + TableConstraint::PrimaryKey { columns, .. } => { + if *backend == DatabaseBackend::Sqlite { + // SQLite does not support ALTER TABLE ... ADD PRIMARY KEY + // Use temporary table approach + let table_def = current_schema + .iter() + .find(|t| t.name == table) + .ok_or_else(|| QueryError::Other(format!( + "Table '{}' not found in current schema. SQLite requires current schema information to add constraints.", + table + )))?; + + // Create new constraints with the added primary key constraint + let mut new_constraints = table_def.constraints.clone(); + new_constraints.push(constraint.clone()); + + // Generate temporary table name + let temp_table = format!("{}_temp", table); + + // 1. Create temporary table with new constraints + let create_temp_table = build_create_table_for_backend( + backend, + &temp_table, + &table_def.columns, + &new_constraints, + ); + + // Handle CHECK constraints (sea-query doesn't support them natively) + let check_clauses = extract_check_clauses(&new_constraints); + let create_query = + build_create_with_checks(backend, &create_temp_table, &check_clauses); + + // 2. Copy data + let column_aliases: Vec = table_def + .columns + .iter() + .map(|c| Alias::new(&c.name)) + .collect(); + let mut select_query = Query::select(); + for col_alias in &column_aliases { + select_query = select_query.column(col_alias.clone()).to_owned(); + } + select_query = select_query.from(Alias::new(table)).to_owned(); + + let insert_stmt = Query::insert() + .into_table(Alias::new(&temp_table)) + .columns(column_aliases.clone()) + .select_from(select_query) + .unwrap() + .to_owned(); + let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); + + // 3. Drop original table + let drop_table = Table::drop().table(Alias::new(table)).to_owned(); + let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); + + // 4. Rename temporary table + let rename_query = build_rename_table(&temp_table, table); + + // 5. Recreate indexes from Index constraints + let mut index_queries = Vec::new(); + for c in &table_def.constraints { + if let TableConstraint::Index { + name: idx_name, + columns: idx_cols, + } = c + { + let index_name = vespertide_naming::build_index_name( + table, + idx_cols, + idx_name.as_deref(), + ); + let mut idx_stmt = sea_query::Index::create(); + idx_stmt = idx_stmt.name(&index_name).to_owned(); + for col_name in idx_cols { + idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); + } + idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); + index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); + } + } + + let mut queries = vec![create_query, insert_query, drop_query, rename_query]; + queries.extend(index_queries); + Ok(queries) + } else { + // sea_query lacks ALTER TABLE ADD PRIMARY KEY; emit backend SQL + let pg_cols = columns + .iter() + .map(|c| format!("\"{}\"", c)) + .collect::>() + .join(", "); + let mysql_cols = columns + .iter() + .map(|c| format!("`{}`", c)) + .collect::>() + .join(", "); + let pg_sql = format!("ALTER TABLE \"{}\" ADD PRIMARY KEY ({})", table, pg_cols); + let mysql_sql = format!("ALTER TABLE `{}` ADD PRIMARY KEY ({})", table, mysql_cols); + Ok(vec![BuiltQuery::Raw(RawSql::per_backend( + pg_sql.clone(), + mysql_sql, + pg_sql, + ))]) + } + } + TableConstraint::Unique { name, columns } => { + // SQLite does not support ALTER TABLE ... ADD CONSTRAINT UNIQUE + // Always generate a proper name: uq_{table}_{key} or uq_{table}_{columns} + let index_name = + super::helpers::build_unique_constraint_name(table, columns, name.as_deref()); + let mut idx = Index::create() + .table(Alias::new(table)) + .name(&index_name) + .unique() + .to_owned(); + for col in columns { + idx = idx.col(Alias::new(col)).to_owned(); + } + Ok(vec![BuiltQuery::CreateIndex(Box::new(idx))]) + } + TableConstraint::ForeignKey { + name, + columns, + ref_table, + ref_columns, + on_delete, + on_update, + } => { + // SQLite does not support ALTER TABLE ... ADD CONSTRAINT FOREIGN KEY + if *backend == DatabaseBackend::Sqlite { + // Use temporary table approach for SQLite + let table_def = current_schema + .iter() + .find(|t| t.name == table) + .ok_or_else(|| QueryError::Other(format!( + "Table '{}' not found in current schema. SQLite requires current schema information to add constraints.", + table + )))?; + + // Create new constraints with the added foreign key constraint + let mut new_constraints = table_def.constraints.clone(); + new_constraints.push(constraint.clone()); + + // Generate temporary table name + let temp_table = format!("{}_temp", table); + + // 1. Create temporary table with new constraints + let create_temp_table = build_create_table_for_backend( + backend, + &temp_table, + &table_def.columns, + &new_constraints, + ); + + // Handle CHECK constraints (sea-query doesn't support them natively) + let check_clauses = extract_check_clauses(&new_constraints); + let create_query = + build_create_with_checks(backend, &create_temp_table, &check_clauses); + + // 2. Copy data (all columns) + let column_aliases: Vec = table_def + .columns + .iter() + .map(|c| Alias::new(&c.name)) + .collect(); + let mut select_query = Query::select(); + for col_alias in &column_aliases { + select_query = select_query.column(col_alias.clone()).to_owned(); + } + select_query = select_query.from(Alias::new(table)).to_owned(); + + let insert_stmt = Query::insert() + .into_table(Alias::new(&temp_table)) + .columns(column_aliases.clone()) + .select_from(select_query) + .unwrap() + .to_owned(); + let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); + + // 3. Drop original table + let drop_table = Table::drop().table(Alias::new(table)).to_owned(); + let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); + + // 4. Rename temporary table to original name + let rename_query = build_rename_table(&temp_table, table); + + // 5. Recreate indexes from Index constraints + let mut index_queries = Vec::new(); + for c in &table_def.constraints { + if let TableConstraint::Index { + name: idx_name, + columns: idx_cols, + } = c + { + let index_name = vespertide_naming::build_index_name( + table, + idx_cols, + idx_name.as_deref(), + ); + let mut idx_stmt = sea_query::Index::create(); + idx_stmt = idx_stmt.name(&index_name).to_owned(); + for col_name in idx_cols { + idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); + } + idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); + index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); + } + } + + let mut queries = vec![create_query, insert_query, drop_query, rename_query]; + queries.extend(index_queries); + Ok(queries) + } else { + // Build foreign key using ForeignKey::create + let fk_name = + vespertide_naming::build_foreign_key_name(table, columns, name.as_deref()); + let mut fk = ForeignKey::create(); + fk = fk.name(&fk_name).to_owned(); + fk = fk.from_tbl(Alias::new(table)).to_owned(); + for col in columns { + fk = fk.from_col(Alias::new(col)).to_owned(); + } + fk = fk.to_tbl(Alias::new(ref_table)).to_owned(); + for col in ref_columns { + fk = fk.to_col(Alias::new(col)).to_owned(); + } + if let Some(action) = on_delete { + fk = fk.on_delete(to_sea_fk_action(action)).to_owned(); + } + if let Some(action) = on_update { + fk = fk.on_update(to_sea_fk_action(action)).to_owned(); + } + Ok(vec![BuiltQuery::CreateForeignKey(Box::new(fk))]) + } + } + TableConstraint::Index { name, columns } => { + // Index constraints are simple CREATE INDEX statements for all backends + let index_name = vespertide_naming::build_index_name(table, columns, name.as_deref()); + let mut idx = Index::create() + .table(Alias::new(table)) + .name(&index_name) + .to_owned(); + for col in columns { + idx = idx.col(Alias::new(col)).to_owned(); + } + Ok(vec![BuiltQuery::CreateIndex(Box::new(idx))]) + } + TableConstraint::Check { name, expr } => { + // SQLite does not support ALTER TABLE ... ADD CONSTRAINT CHECK + if *backend == DatabaseBackend::Sqlite { + // Use temporary table approach for SQLite + let table_def = current_schema + .iter() + .find(|t| t.name == table) + .ok_or_else(|| QueryError::Other(format!( + "Table '{}' not found in current schema. SQLite requires current schema information to add constraints.", + table + )))?; + + // Create new constraints with the added check constraint + let mut new_constraints = table_def.constraints.clone(); + new_constraints.push(constraint.clone()); + + // Generate temporary table name + let temp_table = format!("{}_temp", table); + + // 1. Create temporary table with new constraints + let create_temp_table = build_create_table_for_backend( + backend, + &temp_table, + &table_def.columns, + &new_constraints, + ); + + // Handle CHECK constraints (sea-query doesn't support them natively) + let check_clauses = extract_check_clauses(&new_constraints); + let create_query = + build_create_with_checks(backend, &create_temp_table, &check_clauses); + + // 2. Copy data (all columns) + let column_aliases: Vec = table_def + .columns + .iter() + .map(|c| Alias::new(&c.name)) + .collect(); + let mut select_query = Query::select(); + for col_alias in &column_aliases { + select_query = select_query.column(col_alias.clone()).to_owned(); + } + select_query = select_query.from(Alias::new(table)).to_owned(); + + let insert_stmt = Query::insert() + .into_table(Alias::new(&temp_table)) + .columns(column_aliases.clone()) + .select_from(select_query) + .unwrap() + .to_owned(); + let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); + + // 3. Drop original table + let drop_table = Table::drop().table(Alias::new(table)).to_owned(); + let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); + + // 4. Rename temporary table to original name + let rename_query = build_rename_table(&temp_table, table); + + // 5. Recreate indexes from Index constraints + let mut index_queries = Vec::new(); + for c in &table_def.constraints { + if let TableConstraint::Index { + name: idx_name, + columns: idx_cols, + } = c + { + let index_name = vespertide_naming::build_index_name( + table, + idx_cols, + idx_name.as_deref(), + ); + let mut idx_stmt = sea_query::Index::create(); + idx_stmt = idx_stmt.name(&index_name).to_owned(); + for col_name in idx_cols { + idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); + } + idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); + index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); + } + } + + let mut queries = vec![create_query, insert_query, drop_query, rename_query]; + queries.extend(index_queries); + Ok(queries) + } else { + let pg_sql = format!( + "ALTER TABLE \"{}\" ADD CONSTRAINT \"{}\" CHECK ({})", + table, name, expr + ); + let mysql_sql = format!( + "ALTER TABLE `{}` ADD CONSTRAINT `{}` CHECK ({})", + table, name, expr + ); + Ok(vec![BuiltQuery::Raw(RawSql::per_backend( + pg_sql.clone(), + mysql_sql, + pg_sql, + ))]) + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::sql::types::DatabaseBackend; + use insta::{assert_snapshot, with_settings}; + use rstest::rstest; + use vespertide_core::{ + ColumnDef, ColumnType, ReferenceAction, SimpleColumnType, TableConstraint, TableDef, + }; + + #[rstest] + #[case::add_constraint_primary_key_postgres( + "add_constraint_primary_key_postgres", + DatabaseBackend::Postgres, + &["ALTER TABLE \"users\" ADD PRIMARY KEY (\"id\")"] + )] + #[case::add_constraint_primary_key_mysql( + "add_constraint_primary_key_mysql", + DatabaseBackend::MySql, + &["ALTER TABLE `users` ADD PRIMARY KEY (`id`)"] + )] + #[case::add_constraint_primary_key_sqlite( + "add_constraint_primary_key_sqlite", + DatabaseBackend::Sqlite, + &["CREATE TABLE \"users_temp\""] + )] + #[case::add_constraint_unique_named_postgres( + "add_constraint_unique_named_postgres", + DatabaseBackend::Postgres, + &["CREATE UNIQUE INDEX \"uq_users__uq_email\" ON \"users\" (\"email\")"] + )] + #[case::add_constraint_unique_named_mysql( + "add_constraint_unique_named_mysql", + DatabaseBackend::MySql, + &["CREATE UNIQUE INDEX `uq_users__uq_email` ON `users` (`email`)"] + )] + #[case::add_constraint_unique_named_sqlite( + "add_constraint_unique_named_sqlite", + DatabaseBackend::Sqlite, + &["CREATE UNIQUE INDEX \"uq_users__uq_email\" ON \"users\" (\"email\")"] + )] + #[case::add_constraint_foreign_key_postgres( + "add_constraint_foreign_key_postgres", + DatabaseBackend::Postgres, + &["FOREIGN KEY (\"user_id\")", "REFERENCES \"users\" (\"id\")", "ON DELETE CASCADE", "ON UPDATE RESTRICT"] + )] + #[case::add_constraint_foreign_key_mysql( + "add_constraint_foreign_key_mysql", + DatabaseBackend::MySql, + &["FOREIGN KEY (`user_id`)", "REFERENCES `users` (`id`)", "ON DELETE CASCADE", "ON UPDATE RESTRICT"] + )] + #[case::add_constraint_foreign_key_sqlite( + "add_constraint_foreign_key_sqlite", + DatabaseBackend::Sqlite, + &["CREATE TABLE \"users_temp\""] + )] + #[case::add_constraint_check_named_postgres( + "add_constraint_check_named_postgres", + DatabaseBackend::Postgres, + &["ADD CONSTRAINT \"chk_age\" CHECK (age > 0)"] + )] + #[case::add_constraint_check_named_mysql( + "add_constraint_check_named_mysql", + DatabaseBackend::MySql, + &["ADD CONSTRAINT `chk_age` CHECK (age > 0)"] + )] + #[case::add_constraint_check_named_sqlite( + "add_constraint_check_named_sqlite", + DatabaseBackend::Sqlite, + &["CREATE TABLE \"users_temp\""] + )] + fn test_add_constraint( + #[case] title: &str, + #[case] backend: DatabaseBackend, + #[case] expected: &[&str], + ) { + let constraint = if title.contains("primary_key") { + TableConstraint::PrimaryKey { + columns: vec!["id".into()], + auto_increment: false, + } + } else if title.contains("unique") { + TableConstraint::Unique { + name: Some("uq_email".into()), + columns: vec!["email".into()], + } + } else if title.contains("foreign_key") { + TableConstraint::ForeignKey { + name: Some("fk_user".into()), + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: Some(ReferenceAction::Cascade), + on_update: Some(ReferenceAction::Restrict), + } + } else { + TableConstraint::Check { + name: "chk_age".into(), + expr: "age > 0".into(), + } + }; + + // For SQLite, we need to provide current schema + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: if title.contains("foreign_key") { + vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "user_id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ] + } else { + vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: if title.contains("check") { + "age".into() + } else { + "email".into() + }, + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ] + }, + constraints: vec![], + }]; + + let result = build_add_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); + let sql = result[0].build(backend); + for exp in expected { + assert!( + sql.contains(exp), + "Expected SQL to contain '{}', got: {}", + exp, + sql + ); + } + + with_settings!({ snapshot_suffix => format!("add_constraint_{}", title) }, { + assert_snapshot!(result.iter().map(|q| q.build(backend)).collect::>().join("\n")); + }); + } + + #[test] + fn test_add_constraint_primary_key_sqlite_table_not_found() { + let constraint = TableConstraint::PrimaryKey { + columns: vec!["id".into()], + auto_increment: false, + }; + let current_schema = vec![]; // Empty schema - table not found + let result = build_add_constraint( + &DatabaseBackend::Sqlite, + "users", + &constraint, + ¤t_schema, + ); + assert!(result.is_err()); + let err_msg = result.unwrap_err().to_string(); + assert!(err_msg.contains("Table 'users' not found in current schema")); + } + + #[test] + fn test_add_constraint_primary_key_sqlite_with_check_constraints() { + let constraint = TableConstraint::PrimaryKey { + columns: vec!["id".into()], + auto_increment: false, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::Check { + name: "chk_id".into(), + expr: "id > 0".into(), + }], + }]; + let result = build_add_constraint( + &DatabaseBackend::Sqlite, + "users", + &constraint, + ¤t_schema, + ); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(DatabaseBackend::Sqlite)) + .collect::>() + .join("\n"); + // Should include CHECK constraint in CREATE TABLE + assert!(sql.contains("CONSTRAINT \"chk_id\" CHECK")); + } + + #[test] + fn test_add_constraint_primary_key_sqlite_with_indexes() { + let constraint = TableConstraint::PrimaryKey { + columns: vec!["id".into()], + auto_increment: false, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::Index { + name: Some("idx_id".into()), + columns: vec!["id".into()], + }], + }]; + let result = build_add_constraint( + &DatabaseBackend::Sqlite, + "users", + &constraint, + ¤t_schema, + ); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(DatabaseBackend::Sqlite)) + .collect::>() + .join("\n"); + // Should recreate index + assert!(sql.contains("CREATE INDEX")); + assert!(sql.contains("idx_id")); + } + + #[test] + fn test_add_constraint_primary_key_sqlite_with_unique_constraint() { + // Note: Unique indexes are now TableConstraint::Unique, not Index + // Index constraints don't have a unique flag - use Unique constraint instead + let constraint = TableConstraint::PrimaryKey { + columns: vec!["id".into()], + auto_increment: false, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::Unique { + name: Some("uq_email".into()), + columns: vec!["email".into()], + }], + }]; + let result = build_add_constraint( + &DatabaseBackend::Sqlite, + "users", + &constraint, + ¤t_schema, + ); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(DatabaseBackend::Sqlite)) + .collect::>() + .join("\n"); + // Unique constraint should be in CREATE TABLE statement (for SQLite temp table approach) + assert!(sql.contains("CREATE TABLE")); + } + + #[test] + fn test_add_constraint_foreign_key_sqlite_table_not_found() { + let constraint = TableConstraint::ForeignKey { + name: Some("fk_user".into()), + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: None, + on_update: None, + }; + let current_schema = vec![]; // Empty schema - table not found + let result = build_add_constraint( + &DatabaseBackend::Sqlite, + "posts", + &constraint, + ¤t_schema, + ); + assert!(result.is_err()); + let err_msg = result.unwrap_err().to_string(); + assert!(err_msg.contains("Table 'posts' not found in current schema")); + } + + #[test] + fn test_add_constraint_foreign_key_sqlite_with_check_constraints() { + let constraint = TableConstraint::ForeignKey { + name: Some("fk_user".into()), + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: None, + on_update: None, + }; + let current_schema = vec![TableDef { + name: "posts".into(), + description: None, + columns: vec![ColumnDef { + name: "user_id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::Check { + name: "chk_user_id".into(), + expr: "user_id > 0".into(), + }], + }]; + let result = build_add_constraint( + &DatabaseBackend::Sqlite, + "posts", + &constraint, + ¤t_schema, + ); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(DatabaseBackend::Sqlite)) + .collect::>() + .join("\n"); + // Should include CHECK constraint in CREATE TABLE + assert!(sql.contains("CONSTRAINT \"chk_user_id\" CHECK")); + } + + #[test] + fn test_add_constraint_foreign_key_sqlite_with_indexes() { + let constraint = TableConstraint::ForeignKey { + name: Some("fk_user".into()), + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: None, + on_update: None, + }; + let current_schema = vec![TableDef { + name: "posts".into(), + description: None, + columns: vec![ColumnDef { + name: "user_id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::Index { + name: Some("idx_user_id".into()), + columns: vec!["user_id".into()], + }], + }]; + let result = build_add_constraint( + &DatabaseBackend::Sqlite, + "posts", + &constraint, + ¤t_schema, + ); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(DatabaseBackend::Sqlite)) + .collect::>() + .join("\n"); + // Should recreate index + assert!(sql.contains("CREATE INDEX")); + assert!(sql.contains("idx_user_id")); + } + + #[test] + fn test_add_constraint_foreign_key_sqlite_with_unique_constraint() { + // Note: Unique indexes are now TableConstraint::Unique + let constraint = TableConstraint::ForeignKey { + name: Some("fk_user".into()), + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: None, + on_update: None, + }; + let current_schema = vec![TableDef { + name: "posts".into(), + description: None, + columns: vec![ColumnDef { + name: "user_id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::Unique { + name: Some("uq_user_id".into()), + columns: vec!["user_id".into()], + }], + }]; + let result = build_add_constraint( + &DatabaseBackend::Sqlite, + "posts", + &constraint, + ¤t_schema, + ); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(DatabaseBackend::Sqlite)) + .collect::>() + .join("\n"); + // Unique constraint should be in CREATE TABLE statement + assert!(sql.contains("CREATE TABLE")); + } + + #[test] + fn test_add_constraint_check_sqlite_table_not_found() { + let constraint = TableConstraint::Check { + name: "chk_age".into(), + expr: "age > 0".into(), + }; + let current_schema = vec![]; // Empty schema - table not found + let result = build_add_constraint( + &DatabaseBackend::Sqlite, + "users", + &constraint, + ¤t_schema, + ); + assert!(result.is_err()); + let err_msg = result.unwrap_err().to_string(); + assert!(err_msg.contains("Table 'users' not found in current schema")); + } + + #[test] + fn test_add_constraint_check_sqlite_without_existing_check() { + // Test when there are no existing CHECK constraints (line 376) + let constraint = TableConstraint::Check { + name: "chk_age".into(), + expr: "age > 0".into(), + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "age".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], // No existing CHECK constraints + }]; + let result = build_add_constraint( + &DatabaseBackend::Sqlite, + "users", + &constraint, + ¤t_schema, + ); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(DatabaseBackend::Sqlite)) + .collect::>() + .join("\n"); + // Should create table with CHECK constraint + assert!(sql.contains("CREATE TABLE")); + assert!(sql.contains("CONSTRAINT \"chk_age\" CHECK")); + } + + #[test] + fn test_add_constraint_primary_key_sqlite_without_existing_check() { + // Test PrimaryKey addition when there are no existing CHECK constraints (line 84) + // This should hit the else branch: BuiltQuery::CreateTable(Box::new(create_temp_table)) + let constraint = TableConstraint::PrimaryKey { + columns: vec!["id".into()], + auto_increment: false, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], // No existing CHECK constraints + }]; + let result = build_add_constraint( + &DatabaseBackend::Sqlite, + "users", + &constraint, + ¤t_schema, + ); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(DatabaseBackend::Sqlite)) + .collect::>() + .join("\n"); + // Should create table without CHECK constraints (using BuiltQuery::CreateTable) + assert!(sql.contains("CREATE TABLE")); + assert!(sql.contains("PRIMARY KEY")); + } + + #[test] + fn test_add_constraint_foreign_key_sqlite_without_existing_check() { + // Test ForeignKey addition when there are no existing CHECK constraints (line 238) + // This should hit the else branch: BuiltQuery::CreateTable(Box::new(create_temp_table)) + let constraint = TableConstraint::ForeignKey { + name: Some("fk_user".into()), + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: None, + on_update: None, + }; + let current_schema = vec![TableDef { + name: "posts".into(), + description: None, + columns: vec![ColumnDef { + name: "user_id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], // No existing CHECK constraints + }]; + let result = build_add_constraint( + &DatabaseBackend::Sqlite, + "posts", + &constraint, + ¤t_schema, + ); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(DatabaseBackend::Sqlite)) + .collect::>() + .join("\n"); + // Should create table without CHECK constraints (using BuiltQuery::CreateTable) + assert!(sql.contains("CREATE TABLE")); + assert!(sql.contains("FOREIGN KEY")); + } + + #[test] + fn test_add_constraint_check_sqlite_with_indexes() { + let constraint = TableConstraint::Check { + name: "chk_age".into(), + expr: "age > 0".into(), + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "age".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::Index { + name: Some("idx_age".into()), + columns: vec!["age".into()], + }], + }]; + let result = build_add_constraint( + &DatabaseBackend::Sqlite, + "users", + &constraint, + ¤t_schema, + ); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(DatabaseBackend::Sqlite)) + .collect::>() + .join("\n"); + // Should recreate index + assert!(sql.contains("CREATE INDEX")); + assert!(sql.contains("idx_age")); + } + + #[test] + fn test_add_constraint_check_sqlite_with_unique_constraint() { + // Note: Unique indexes are now TableConstraint::Unique + let constraint = TableConstraint::Check { + name: "chk_age".into(), + expr: "age > 0".into(), + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "age".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![TableConstraint::Unique { + name: Some("uq_age".into()), + columns: vec!["age".into()], + }], + }]; + let result = build_add_constraint( + &DatabaseBackend::Sqlite, + "users", + &constraint, + ¤t_schema, + ); + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(DatabaseBackend::Sqlite)) + .collect::>() + .join("\n"); + // Unique constraint should be in CREATE TABLE statement + assert!(sql.contains("CREATE TABLE")); + } + + #[test] + fn test_extract_check_clauses_with_mixed_constraints() { + // Test that extract_check_clauses filters out non-Check constraints + let constraints = vec![ + TableConstraint::Check { + name: "chk1".into(), + expr: "a > 0".into(), + }, + TableConstraint::PrimaryKey { + columns: vec!["id".into()], + auto_increment: false, + }, + TableConstraint::Check { + name: "chk2".into(), + expr: "b < 100".into(), + }, + TableConstraint::Unique { + name: Some("uq".into()), + columns: vec!["email".into()], + }, + ]; + let clauses = extract_check_clauses(&constraints); + assert_eq!(clauses.len(), 2); + assert!(clauses[0].contains("chk1")); + assert!(clauses[1].contains("chk2")); + } + + #[test] + fn test_extract_check_clauses_with_no_check_constraints() { + let constraints = vec![ + TableConstraint::PrimaryKey { + columns: vec!["id".into()], + auto_increment: false, + }, + TableConstraint::Unique { + name: None, + columns: vec!["email".into()], + }, + ]; + let clauses = extract_check_clauses(&constraints); + assert!(clauses.is_empty()); + } + + #[test] + fn test_build_create_with_checks_empty_clauses() { + use super::build_create_table_for_backend; + + let create_stmt = build_create_table_for_backend( + &DatabaseBackend::Sqlite, + "test_table", + &[ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + &[], + ); + + // Empty check_clauses should return CreateTable variant + let result = build_create_with_checks(&DatabaseBackend::Sqlite, &create_stmt, &[]); + let sql = result.build(DatabaseBackend::Sqlite); + assert!(sql.contains("CREATE TABLE")); + } + + #[test] + fn test_build_create_with_checks_with_clauses() { + use super::build_create_table_for_backend; + + let create_stmt = build_create_table_for_backend( + &DatabaseBackend::Sqlite, + "test_table", + &[ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + &[], + ); + + // Non-empty check_clauses should return Raw variant with embedded checks + let check_clauses = vec!["CONSTRAINT \"chk1\" CHECK (id > 0)".to_string()]; + let result = + build_create_with_checks(&DatabaseBackend::Sqlite, &create_stmt, &check_clauses); + let sql = result.build(DatabaseBackend::Sqlite); + assert!(sql.contains("CREATE TABLE")); + assert!(sql.contains("CONSTRAINT \"chk1\" CHECK (id > 0)")); + } +} diff --git a/crates/vespertide-query/src/sql/create_table.rs b/crates/vespertide-query/src/sql/create_table.rs index b7dfc11..ffa76ba 100644 --- a/crates/vespertide-query/src/sql/create_table.rs +++ b/crates/vespertide-query/src/sql/create_table.rs @@ -147,6 +147,7 @@ pub fn build_create_table( // Normalize the table to convert inline constraints to table-level // This ensures we don't have duplicate constraints if both inline and table-level are defined let table_def = vespertide_core::TableDef { + description: None, name: table.to_string(), columns: columns.to_vec(), constraints: constraints.to_vec(), diff --git a/crates/vespertide-query/src/sql/mod.rs b/crates/vespertide-query/src/sql/mod.rs index 0141e46..67abec5 100644 --- a/crates/vespertide-query/src/sql/mod.rs +++ b/crates/vespertide-query/src/sql/mod.rs @@ -1,1374 +1,1385 @@ -pub mod add_column; -pub mod add_constraint; -pub mod create_table; -pub mod delete_column; -pub mod delete_table; -pub mod helpers; -pub mod modify_column_comment; -pub mod modify_column_default; -pub mod modify_column_nullable; -pub mod modify_column_type; -pub mod raw_sql; -pub mod remove_constraint; -pub mod rename_column; -pub mod rename_table; -pub mod types; - -pub use helpers::*; -pub use types::{BuiltQuery, DatabaseBackend, RawSql}; - -use crate::error::QueryError; -use vespertide_core::{MigrationAction, TableDef}; - -use self::{ - add_column::build_add_column, add_constraint::build_add_constraint, - create_table::build_create_table, delete_column::build_delete_column, - delete_table::build_delete_table, modify_column_comment::build_modify_column_comment, - modify_column_default::build_modify_column_default, - modify_column_nullable::build_modify_column_nullable, - modify_column_type::build_modify_column_type, raw_sql::build_raw_sql, - remove_constraint::build_remove_constraint, rename_column::build_rename_column, - rename_table::build_rename_table, -}; - -pub fn build_action_queries( - backend: &DatabaseBackend, - action: &MigrationAction, - current_schema: &[TableDef], -) -> Result, QueryError> { - match action { - MigrationAction::CreateTable { - table, - columns, - constraints, - } => build_create_table(backend, table, columns, constraints), - - MigrationAction::DeleteTable { table } => Ok(vec![build_delete_table(table)]), - - MigrationAction::AddColumn { - table, - column, - fill_with, - } => build_add_column(backend, table, column, fill_with.as_deref(), current_schema), - - MigrationAction::RenameColumn { table, from, to } => { - Ok(vec![build_rename_column(table, from, to)]) - } - - MigrationAction::DeleteColumn { table, column } => { - // Find the column type from current schema for enum DROP TYPE support - let column_type = current_schema - .iter() - .find(|t| t.name == *table) - .and_then(|t| t.columns.iter().find(|c| c.name == *column)) - .map(|c| &c.r#type); - Ok(build_delete_column(table, column, column_type)) - } - - MigrationAction::ModifyColumnType { - table, - column, - new_type, - } => build_modify_column_type(backend, table, column, new_type, current_schema), - - MigrationAction::ModifyColumnNullable { - table, - column, - nullable, - fill_with, - } => build_modify_column_nullable( - backend, - table, - column, - *nullable, - fill_with.as_deref(), - current_schema, - ), - - MigrationAction::ModifyColumnDefault { - table, - column, - new_default, - } => build_modify_column_default( - backend, - table, - column, - new_default.as_deref(), - current_schema, - ), - - MigrationAction::ModifyColumnComment { - table, - column, - new_comment, - } => build_modify_column_comment( - backend, - table, - column, - new_comment.as_deref(), - current_schema, - ), - - MigrationAction::RenameTable { from, to } => Ok(vec![build_rename_table(from, to)]), - - MigrationAction::RawSql { sql } => Ok(vec![build_raw_sql(sql.clone())]), - - MigrationAction::AddConstraint { table, constraint } => { - build_add_constraint(backend, table, constraint, current_schema) - } - - MigrationAction::RemoveConstraint { table, constraint } => { - build_remove_constraint(backend, table, constraint, current_schema) - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use insta::{assert_snapshot, with_settings}; - use rstest::rstest; - use vespertide_core::schema::primary_key::PrimaryKeySyntax; - use vespertide_core::{ - ColumnDef, ColumnType, MigrationAction, ReferenceAction, SimpleColumnType, TableConstraint, - }; - - fn col(name: &str, ty: ColumnType) -> ColumnDef { - ColumnDef { - name: name.to_string(), - r#type: ty, - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - } - } - - #[test] - fn test_backend_specific_quoting() { - let action = MigrationAction::CreateTable { - table: "users".into(), - columns: vec![col("id", ColumnType::Simple(SimpleColumnType::Integer))], - constraints: vec![], - }; - let result = build_action_queries(&DatabaseBackend::Postgres, &action, &[]).unwrap(); - - // PostgreSQL uses double quotes - let pg_sql = result[0].build(DatabaseBackend::Postgres); - assert!(pg_sql.contains("\"users\"")); - - // MySQL uses backticks - let mysql_sql = result[0].build(DatabaseBackend::MySql); - assert!(mysql_sql.contains("`users`")); - - // SQLite uses double quotes - let sqlite_sql = result[0].build(DatabaseBackend::Sqlite); - assert!(sqlite_sql.contains("\"users\"")); - } - - #[rstest] - #[case::create_table_with_default_postgres( - "create_table_with_default_postgres", - MigrationAction::CreateTable { - table: "users".into(), - columns: vec![ColumnDef { - name: "status".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: Some("'active'".into()), - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }, - DatabaseBackend::Postgres, - &["DEFAULT", "'active'"] - )] - #[case::create_table_with_default_mysql( - "create_table_with_default_mysql", - MigrationAction::CreateTable { - table: "users".into(), - columns: vec![ColumnDef { - name: "status".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: Some("'active'".into()), - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }, - DatabaseBackend::Postgres, - &["DEFAULT", "'active'"] - )] - #[case::create_table_with_default_sqlite( - "create_table_with_default_sqlite", - MigrationAction::CreateTable { - table: "users".into(), - columns: vec![ColumnDef { - name: "status".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: Some("'active'".into()), - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }, - DatabaseBackend::Postgres, - &["DEFAULT", "'active'"] - )] - #[case::create_table_with_inline_primary_key_postgres( - "create_table_with_inline_primary_key_postgres", - MigrationAction::CreateTable { - table: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: Some(PrimaryKeySyntax::Bool(true)), - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }, - DatabaseBackend::Postgres, - &["PRIMARY KEY"] - )] - #[case::create_table_with_inline_primary_key_mysql( - "create_table_with_inline_primary_key_mysql", - MigrationAction::CreateTable { - table: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: Some(PrimaryKeySyntax::Bool(true)), - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }, - DatabaseBackend::Postgres, - &["PRIMARY KEY"] - )] - #[case::create_table_with_inline_primary_key_sqlite( - "create_table_with_inline_primary_key_sqlite", - MigrationAction::CreateTable { - table: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: Some(PrimaryKeySyntax::Bool(true)), - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }, - DatabaseBackend::Postgres, - &["PRIMARY KEY"] - )] - #[case::create_table_with_fk_postgres( - "create_table_with_fk_postgres", - MigrationAction::CreateTable { - table: "posts".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - col("user_id", ColumnType::Simple(SimpleColumnType::Integer)), - ], - constraints: vec![TableConstraint::ForeignKey { - name: Some("fk_user".into()), - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: Some(ReferenceAction::Cascade), - on_update: Some(ReferenceAction::Restrict), - }], - }, - DatabaseBackend::Postgres, - &["REFERENCES \"users\" (\"id\")", "ON DELETE CASCADE", "ON UPDATE RESTRICT"] - )] - #[case::create_table_with_fk_mysql( - "create_table_with_fk_mysql", - MigrationAction::CreateTable { - table: "posts".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - col("user_id", ColumnType::Simple(SimpleColumnType::Integer)), - ], - constraints: vec![TableConstraint::ForeignKey { - name: Some("fk_user".into()), - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: Some(ReferenceAction::Cascade), - on_update: Some(ReferenceAction::Restrict), - }], - }, - DatabaseBackend::Postgres, - &["REFERENCES \"users\" (\"id\")", "ON DELETE CASCADE", "ON UPDATE RESTRICT"] - )] - #[case::create_table_with_fk_sqlite( - "create_table_with_fk_sqlite", - MigrationAction::CreateTable { - table: "posts".into(), - columns: vec![ - col("id", ColumnType::Simple(SimpleColumnType::Integer)), - col("user_id", ColumnType::Simple(SimpleColumnType::Integer)), - ], - constraints: vec![TableConstraint::ForeignKey { - name: Some("fk_user".into()), - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: Some(ReferenceAction::Cascade), - on_update: Some(ReferenceAction::Restrict), - }], - }, - DatabaseBackend::Postgres, - &["REFERENCES \"users\" (\"id\")", "ON DELETE CASCADE", "ON UPDATE RESTRICT"] - )] - fn test_build_migration_action( - #[case] title: &str, - #[case] action: MigrationAction, - #[case] backend: DatabaseBackend, - #[case] expected: &[&str], - ) { - let result = build_action_queries(&backend, &action, &[]).unwrap(); - let sql = result[0].build(backend); - for exp in expected { - assert!( - sql.contains(exp), - "Expected SQL to contain '{}', got: {}", - exp, - sql - ); - } - - with_settings!({ snapshot_suffix => format!("build_migration_action_{}", title) }, { - assert_snapshot!(result.iter().map(|q| q.build(backend)).collect::>().join("\n")); - }); - } - - #[rstest] - #[case::rename_column_postgres(DatabaseBackend::Postgres)] - #[case::rename_column_mysql(DatabaseBackend::MySql)] - #[case::rename_column_sqlite(DatabaseBackend::Sqlite)] - fn test_build_action_queries_rename_column(#[case] backend: DatabaseBackend) { - // Test MigrationAction::RenameColumn (lines 51-52) - let action = MigrationAction::RenameColumn { - table: "users".into(), - from: "old_name".into(), - to: "new_name".into(), - }; - let result = build_action_queries(&backend, &action, &[]).unwrap(); - assert_eq!(result.len(), 1); - let sql = result[0].build(backend); - assert!(sql.contains("RENAME")); - assert!(sql.contains("old_name")); - assert!(sql.contains("new_name")); - - with_settings!({ snapshot_suffix => format!("rename_column_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::delete_column_postgres(DatabaseBackend::Postgres)] - #[case::delete_column_mysql(DatabaseBackend::MySql)] - #[case::delete_column_sqlite(DatabaseBackend::Sqlite)] - fn test_build_action_queries_delete_column(#[case] backend: DatabaseBackend) { - // Test MigrationAction::DeleteColumn (lines 55-56) - let action = MigrationAction::DeleteColumn { - table: "users".into(), - column: "email".into(), - }; - let result = build_action_queries(&backend, &action, &[]).unwrap(); - assert_eq!(result.len(), 1); - let sql = result[0].build(backend); - assert!(sql.contains("DROP COLUMN")); - assert!(sql.contains("email")); - - with_settings!({ snapshot_suffix => format!("delete_column_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::modify_column_type_postgres(DatabaseBackend::Postgres)] - #[case::modify_column_type_mysql(DatabaseBackend::MySql)] - #[case::modify_column_type_sqlite(DatabaseBackend::Sqlite)] - fn test_build_action_queries_modify_column_type(#[case] backend: DatabaseBackend) { - // Test MigrationAction::ModifyColumnType (lines 60-63) - let action = MigrationAction::ModifyColumnType { - table: "users".into(), - column: "age".into(), - new_type: ColumnType::Simple(SimpleColumnType::BigInt), - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "age".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }]; - let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); - assert!(!result.is_empty()); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - assert!(sql.contains("ALTER TABLE")); - - with_settings!({ snapshot_suffix => format!("modify_column_type_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_index_constraint_postgres(DatabaseBackend::Postgres)] - #[case::remove_index_constraint_mysql(DatabaseBackend::MySql)] - #[case::remove_index_constraint_sqlite(DatabaseBackend::Sqlite)] - fn test_build_action_queries_remove_index_constraint(#[case] backend: DatabaseBackend) { - // Test MigrationAction::RemoveConstraint with Index variant - let action = MigrationAction::RemoveConstraint { - table: "users".into(), - constraint: TableConstraint::Index { - name: Some("idx_email".into()), - columns: vec!["email".into()], - }, - }; - let result = build_action_queries(&backend, &action, &[]).unwrap(); - assert_eq!(result.len(), 1); - let sql = result[0].build(backend); - assert!(sql.contains("DROP INDEX")); - assert!(sql.contains("idx_email")); - - with_settings!({ snapshot_suffix => format!("remove_index_constraint_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::rename_table_postgres(DatabaseBackend::Postgres)] - #[case::rename_table_mysql(DatabaseBackend::MySql)] - #[case::rename_table_sqlite(DatabaseBackend::Sqlite)] - fn test_build_action_queries_rename_table(#[case] backend: DatabaseBackend) { - // Test MigrationAction::RenameTable (line 69) - let action = MigrationAction::RenameTable { - from: "old_table".into(), - to: "new_table".into(), - }; - let result = build_action_queries(&backend, &action, &[]).unwrap(); - assert_eq!(result.len(), 1); - let sql = result[0].build(backend); - assert!(sql.contains("RENAME")); - assert!(sql.contains("old_table")); - assert!(sql.contains("new_table")); - - with_settings!({ snapshot_suffix => format!("rename_table_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::add_constraint_postgres(DatabaseBackend::Postgres)] - #[case::add_constraint_mysql(DatabaseBackend::MySql)] - #[case::add_constraint_sqlite(DatabaseBackend::Sqlite)] - fn test_build_action_queries_add_constraint(#[case] backend: DatabaseBackend) { - // Test MigrationAction::AddConstraint (lines 73-74) - let action = MigrationAction::AddConstraint { - table: "users".into(), - constraint: TableConstraint::Unique { - name: Some("uq_email".into()), - columns: vec!["email".into()], - }, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![], - }]; - let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); - assert!(!result.is_empty()); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - assert!(sql.contains("UNIQUE") || sql.contains("uq_email")); - - with_settings!({ snapshot_suffix => format!("add_constraint_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_constraint_postgres(DatabaseBackend::Postgres)] - #[case::remove_constraint_mysql(DatabaseBackend::MySql)] - #[case::remove_constraint_sqlite(DatabaseBackend::Sqlite)] - fn test_build_action_queries_remove_constraint(#[case] backend: DatabaseBackend) { - // Test MigrationAction::RemoveConstraint (lines 77-78) - let action = MigrationAction::RemoveConstraint { - table: "users".into(), - constraint: TableConstraint::Unique { - name: Some("uq_email".into()), - columns: vec!["email".into()], - }, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![TableConstraint::Unique { - name: Some("uq_email".into()), - columns: vec!["email".into()], - }], - }]; - let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); - assert!(!result.is_empty()); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - assert!(sql.contains("DROP") || sql.contains("CONSTRAINT")); - - with_settings!({ snapshot_suffix => format!("remove_constraint_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::add_column_postgres(DatabaseBackend::Postgres)] - #[case::add_column_mysql(DatabaseBackend::MySql)] - #[case::add_column_sqlite(DatabaseBackend::Sqlite)] - fn test_build_action_queries_add_column(#[case] backend: DatabaseBackend) { - // Test MigrationAction::AddColumn (lines 46-49) - let action = MigrationAction::AddColumn { - table: "users".into(), - column: Box::new(ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }), - fill_with: None, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }]; - let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); - assert!(!result.is_empty()); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - assert!(sql.contains("ALTER TABLE")); - assert!(sql.contains("ADD COLUMN") || sql.contains("ADD")); - - with_settings!({ snapshot_suffix => format!("add_column_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::add_index_constraint_postgres(DatabaseBackend::Postgres)] - #[case::add_index_constraint_mysql(DatabaseBackend::MySql)] - #[case::add_index_constraint_sqlite(DatabaseBackend::Sqlite)] - fn test_build_action_queries_add_index_constraint(#[case] backend: DatabaseBackend) { - // Test MigrationAction::AddConstraint with Index variant - let action = MigrationAction::AddConstraint { - table: "users".into(), - constraint: TableConstraint::Index { - name: Some("idx_email".into()), - columns: vec!["email".into()], - }, - }; - let result = build_action_queries(&backend, &action, &[]).unwrap(); - assert_eq!(result.len(), 1); - let sql = result[0].build(backend); - assert!(sql.contains("CREATE INDEX")); - assert!(sql.contains("idx_email")); - - with_settings!({ snapshot_suffix => format!("add_index_constraint_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::raw_sql_postgres(DatabaseBackend::Postgres)] - #[case::raw_sql_mysql(DatabaseBackend::MySql)] - #[case::raw_sql_sqlite(DatabaseBackend::Sqlite)] - fn test_build_action_queries_raw_sql(#[case] backend: DatabaseBackend) { - // Test MigrationAction::RawSql (line 71) - let action = MigrationAction::RawSql { - sql: "SELECT 1;".into(), - }; - let result = build_action_queries(&backend, &action, &[]).unwrap(); - assert_eq!(result.len(), 1); - let sql = result[0].build(backend); - assert_eq!(sql, "SELECT 1;"); - - with_settings!({ snapshot_suffix => format!("raw_sql_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - // Comprehensive index naming tests - #[rstest] - #[case::add_index_with_custom_name_postgres( - DatabaseBackend::Postgres, - "hello", - vec!["email", "password"] - )] - #[case::add_index_with_custom_name_mysql( - DatabaseBackend::MySql, - "hello", - vec!["email", "password"] - )] - #[case::add_index_with_custom_name_sqlite( - DatabaseBackend::Sqlite, - "hello", - vec!["email", "password"] - )] - #[case::add_index_single_column_postgres( - DatabaseBackend::Postgres, - "email_idx", - vec!["email"] - )] - #[case::add_index_single_column_mysql( - DatabaseBackend::MySql, - "email_idx", - vec!["email"] - )] - #[case::add_index_single_column_sqlite( - DatabaseBackend::Sqlite, - "email_idx", - vec!["email"] - )] - fn test_add_index_with_custom_name( - #[case] backend: DatabaseBackend, - #[case] index_name: &str, - #[case] columns: Vec<&str>, - ) { - // Test that custom index names follow ix_table__name pattern - let action = MigrationAction::AddConstraint { - table: "user".into(), - constraint: TableConstraint::Index { - name: Some(index_name.into()), - columns: columns.iter().map(|s| s.to_string()).collect(), - }, - }; - let result = build_action_queries(&backend, &action, &[]).unwrap(); - let sql = result[0].build(backend); - - // Should use ix_table__name pattern - let expected_name = format!("ix_user__{}", index_name); - assert!( - sql.contains(&expected_name), - "Expected index name '{}' in SQL: {}", - expected_name, - sql - ); - - with_settings!({ snapshot_suffix => format!("add_index_custom_{}_{:?}", index_name, backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::add_unnamed_index_single_column_postgres( - DatabaseBackend::Postgres, - vec!["email"] - )] - #[case::add_unnamed_index_single_column_mysql( - DatabaseBackend::MySql, - vec!["email"] - )] - #[case::add_unnamed_index_single_column_sqlite( - DatabaseBackend::Sqlite, - vec!["email"] - )] - #[case::add_unnamed_index_multiple_columns_postgres( - DatabaseBackend::Postgres, - vec!["email", "password"] - )] - #[case::add_unnamed_index_multiple_columns_mysql( - DatabaseBackend::MySql, - vec!["email", "password"] - )] - #[case::add_unnamed_index_multiple_columns_sqlite( - DatabaseBackend::Sqlite, - vec!["email", "password"] - )] - fn test_add_unnamed_index(#[case] backend: DatabaseBackend, #[case] columns: Vec<&str>) { - // Test that unnamed indexes follow ix_table__col1_col2 pattern - let action = MigrationAction::AddConstraint { - table: "user".into(), - constraint: TableConstraint::Index { - name: None, - columns: columns.iter().map(|s| s.to_string()).collect(), - }, - }; - let result = build_action_queries(&backend, &action, &[]).unwrap(); - let sql = result[0].build(backend); - - // Should use ix_table__col1_col2... pattern - let expected_name = format!("ix_user__{}", columns.join("_")); - assert!( - sql.contains(&expected_name), - "Expected index name '{}' in SQL: {}", - expected_name, - sql - ); - - with_settings!({ snapshot_suffix => format!("add_unnamed_index_{}_{:?}", columns.join("_"), backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_index_with_custom_name_postgres( - DatabaseBackend::Postgres, - "hello", - vec!["email", "password"] - )] - #[case::remove_index_with_custom_name_mysql( - DatabaseBackend::MySql, - "hello", - vec!["email", "password"] - )] - #[case::remove_index_with_custom_name_sqlite( - DatabaseBackend::Sqlite, - "hello", - vec!["email", "password"] - )] - fn test_remove_index_with_custom_name( - #[case] backend: DatabaseBackend, - #[case] index_name: &str, - #[case] columns: Vec<&str>, - ) { - // Test that removing custom index uses ix_table__name pattern - let action = MigrationAction::RemoveConstraint { - table: "user".into(), - constraint: TableConstraint::Index { - name: Some(index_name.into()), - columns: columns.iter().map(|s| s.to_string()).collect(), - }, - }; - let result = build_action_queries(&backend, &action, &[]).unwrap(); - let sql = result[0].build(backend); - - // Should use ix_table__name pattern - let expected_name = format!("ix_user__{}", index_name); - assert!( - sql.contains(&expected_name), - "Expected index name '{}' in SQL: {}", - expected_name, - sql - ); - - with_settings!({ snapshot_suffix => format!("remove_index_custom_{}_{:?}", index_name, backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_unnamed_index_single_column_postgres( - DatabaseBackend::Postgres, - vec!["email"] - )] - #[case::remove_unnamed_index_single_column_mysql( - DatabaseBackend::MySql, - vec!["email"] - )] - #[case::remove_unnamed_index_single_column_sqlite( - DatabaseBackend::Sqlite, - vec!["email"] - )] - #[case::remove_unnamed_index_multiple_columns_postgres( - DatabaseBackend::Postgres, - vec!["email", "password"] - )] - #[case::remove_unnamed_index_multiple_columns_mysql( - DatabaseBackend::MySql, - vec!["email", "password"] - )] - #[case::remove_unnamed_index_multiple_columns_sqlite( - DatabaseBackend::Sqlite, - vec!["email", "password"] - )] - fn test_remove_unnamed_index(#[case] backend: DatabaseBackend, #[case] columns: Vec<&str>) { - // Test that removing unnamed indexes uses ix_table__col1_col2 pattern - let action = MigrationAction::RemoveConstraint { - table: "user".into(), - constraint: TableConstraint::Index { - name: None, - columns: columns.iter().map(|s| s.to_string()).collect(), - }, - }; - let result = build_action_queries(&backend, &action, &[]).unwrap(); - let sql = result[0].build(backend); - - // Should use ix_table__col1_col2... pattern - let expected_name = format!("ix_user__{}", columns.join("_")); - assert!( - sql.contains(&expected_name), - "Expected index name '{}' in SQL: {}", - expected_name, - sql - ); - - with_settings!({ snapshot_suffix => format!("remove_unnamed_index_{}_{:?}", columns.join("_"), backend) }, { - assert_snapshot!(sql); - }); - } - - // Comprehensive unique constraint naming tests - #[rstest] - #[case::add_unique_with_custom_name_postgres( - DatabaseBackend::Postgres, - "email_unique", - vec!["email"] - )] - #[case::add_unique_with_custom_name_mysql( - DatabaseBackend::MySql, - "email_unique", - vec!["email"] - )] - #[case::add_unique_with_custom_name_sqlite( - DatabaseBackend::Sqlite, - "email_unique", - vec!["email"] - )] - fn test_add_unique_with_custom_name( - #[case] backend: DatabaseBackend, - #[case] constraint_name: &str, - #[case] columns: Vec<&str>, - ) { - // Test that custom unique constraint names follow uq_table__name pattern - let action = MigrationAction::AddConstraint { - table: "user".into(), - constraint: TableConstraint::Unique { - name: Some(constraint_name.into()), - columns: columns.iter().map(|s| s.to_string()).collect(), - }, - }; - - let current_schema = vec![TableDef { - name: "user".into(), - columns: vec![ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }]; - - let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - // Should use uq_table__name pattern - let expected_name = format!("uq_user__{}", constraint_name); - assert!( - sql.contains(&expected_name), - "Expected unique constraint name '{}' in SQL: {}", - expected_name, - sql - ); - - with_settings!({ snapshot_suffix => format!("add_unique_custom_{}_{:?}", constraint_name, backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::add_unnamed_unique_single_column_postgres( - DatabaseBackend::Postgres, - vec!["email"] - )] - #[case::add_unnamed_unique_single_column_mysql( - DatabaseBackend::MySql, - vec!["email"] - )] - #[case::add_unnamed_unique_single_column_sqlite( - DatabaseBackend::Sqlite, - vec!["email"] - )] - #[case::add_unnamed_unique_multiple_columns_postgres( - DatabaseBackend::Postgres, - vec!["email", "username"] - )] - #[case::add_unnamed_unique_multiple_columns_mysql( - DatabaseBackend::MySql, - vec!["email", "username"] - )] - #[case::add_unnamed_unique_multiple_columns_sqlite( - DatabaseBackend::Sqlite, - vec!["email", "username"] - )] - fn test_add_unnamed_unique(#[case] backend: DatabaseBackend, #[case] columns: Vec<&str>) { - // Test that unnamed unique constraints follow uq_table__col1_col2 pattern - let action = MigrationAction::AddConstraint { - table: "user".into(), - constraint: TableConstraint::Unique { - name: None, - columns: columns.iter().map(|s| s.to_string()).collect(), - }, - }; - - let schema_columns: Vec = columns - .iter() - .map(|col| ColumnDef { - name: col.to_string(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }) - .collect(); - - let current_schema = vec![TableDef { - name: "user".into(), - columns: schema_columns, - constraints: vec![], - }]; - - let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - // Should use uq_table__col1_col2... pattern - let expected_name = format!("uq_user__{}", columns.join("_")); - assert!( - sql.contains(&expected_name), - "Expected unique constraint name '{}' in SQL: {}", - expected_name, - sql - ); - - with_settings!({ snapshot_suffix => format!("add_unnamed_unique_{}_{:?}", columns.join("_"), backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_unique_with_custom_name_postgres( - DatabaseBackend::Postgres, - "email_unique", - vec!["email"] - )] - #[case::remove_unique_with_custom_name_mysql( - DatabaseBackend::MySql, - "email_unique", - vec!["email"] - )] - #[case::remove_unique_with_custom_name_sqlite( - DatabaseBackend::Sqlite, - "email_unique", - vec!["email"] - )] - fn test_remove_unique_with_custom_name( - #[case] backend: DatabaseBackend, - #[case] constraint_name: &str, - #[case] columns: Vec<&str>, - ) { - // Test that removing custom unique constraint uses uq_table__name pattern - let constraint = TableConstraint::Unique { - name: Some(constraint_name.into()), - columns: columns.iter().map(|s| s.to_string()).collect(), - }; - - let current_schema = vec![TableDef { - name: "user".into(), - columns: vec![ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![constraint.clone()], - }]; - - let action = MigrationAction::RemoveConstraint { - table: "user".into(), - constraint, - }; - - let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - // Should use uq_table__name pattern (for Postgres/MySQL, not SQLite which rebuilds table) - if backend != DatabaseBackend::Sqlite { - let expected_name = format!("uq_user__{}", constraint_name); - assert!( - sql.contains(&expected_name), - "Expected unique constraint name '{}' in SQL: {}", - expected_name, - sql - ); - } - - with_settings!({ snapshot_suffix => format!("remove_unique_custom_{}_{:?}", constraint_name, backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_unnamed_unique_single_column_postgres( - DatabaseBackend::Postgres, - vec!["email"] - )] - #[case::remove_unnamed_unique_single_column_mysql( - DatabaseBackend::MySql, - vec!["email"] - )] - #[case::remove_unnamed_unique_single_column_sqlite( - DatabaseBackend::Sqlite, - vec!["email"] - )] - #[case::remove_unnamed_unique_multiple_columns_postgres( - DatabaseBackend::Postgres, - vec!["email", "username"] - )] - #[case::remove_unnamed_unique_multiple_columns_mysql( - DatabaseBackend::MySql, - vec!["email", "username"] - )] - #[case::remove_unnamed_unique_multiple_columns_sqlite( - DatabaseBackend::Sqlite, - vec!["email", "username"] - )] - fn test_remove_unnamed_unique(#[case] backend: DatabaseBackend, #[case] columns: Vec<&str>) { - // Test that removing unnamed unique constraints uses uq_table__col1_col2 pattern - let constraint = TableConstraint::Unique { - name: None, - columns: columns.iter().map(|s| s.to_string()).collect(), - }; - - let schema_columns: Vec = columns - .iter() - .map(|col| ColumnDef { - name: col.to_string(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }) - .collect(); - - let current_schema = vec![TableDef { - name: "user".into(), - columns: schema_columns, - constraints: vec![constraint.clone()], - }]; - - let action = MigrationAction::RemoveConstraint { - table: "user".into(), - constraint, - }; - - let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - // Should use uq_table__col1_col2... pattern (for Postgres/MySQL, not SQLite which rebuilds table) - if backend != DatabaseBackend::Sqlite { - let expected_name = format!("uq_user__{}", columns.join("_")); - assert!( - sql.contains(&expected_name), - "Expected unique constraint name '{}' in SQL: {}", - expected_name, - sql - ); - } - - with_settings!({ snapshot_suffix => format!("remove_unnamed_unique_{}_{:?}", columns.join("_"), backend) }, { - assert_snapshot!(sql); - }); - } - - /// Test build_action_queries for ModifyColumnNullable - #[rstest] - #[case::postgres_modify_nullable(DatabaseBackend::Postgres)] - #[case::mysql_modify_nullable(DatabaseBackend::MySql)] - #[case::sqlite_modify_nullable(DatabaseBackend::Sqlite)] - fn test_build_action_queries_modify_column_nullable(#[case] backend: DatabaseBackend) { - let action = MigrationAction::ModifyColumnNullable { - table: "users".into(), - column: "email".into(), - nullable: false, - fill_with: Some("'unknown'".into()), - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }]; - let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); - assert!(!result.is_empty()); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - // Should contain UPDATE for fill_with and ALTER for nullable change - assert!(sql.contains("UPDATE")); - assert!(sql.contains("unknown")); - - let suffix = format!( - "{}_modify_nullable", - match backend { - DatabaseBackend::Postgres => "postgres", - DatabaseBackend::MySql => "mysql", - DatabaseBackend::Sqlite => "sqlite", - } - ); - - with_settings!({ snapshot_suffix => suffix }, { - assert_snapshot!(sql); - }); - } - - /// Test build_action_queries for ModifyColumnDefault - #[rstest] - #[case::postgres_modify_default(DatabaseBackend::Postgres)] - #[case::mysql_modify_default(DatabaseBackend::MySql)] - #[case::sqlite_modify_default(DatabaseBackend::Sqlite)] - fn test_build_action_queries_modify_column_default(#[case] backend: DatabaseBackend) { - let action = MigrationAction::ModifyColumnDefault { - table: "users".into(), - column: "status".into(), - new_default: Some("'active'".into()), - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "status".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }]; - let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); - assert!(!result.is_empty()); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - // Should contain DEFAULT and 'active' - assert!(sql.contains("DEFAULT") || sql.contains("active")); - - let suffix = format!( - "{}_modify_default", - match backend { - DatabaseBackend::Postgres => "postgres", - DatabaseBackend::MySql => "mysql", - DatabaseBackend::Sqlite => "sqlite", - } - ); - - with_settings!({ snapshot_suffix => suffix }, { - assert_snapshot!(sql); - }); - } - - /// Test build_action_queries for ModifyColumnComment - #[rstest] - #[case::postgres_modify_comment(DatabaseBackend::Postgres)] - #[case::mysql_modify_comment(DatabaseBackend::MySql)] - #[case::sqlite_modify_comment(DatabaseBackend::Sqlite)] - fn test_build_action_queries_modify_column_comment(#[case] backend: DatabaseBackend) { - let action = MigrationAction::ModifyColumnComment { - table: "users".into(), - column: "email".into(), - new_comment: Some("User email address".into()), - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }]; - let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - // Postgres and MySQL should have comment, SQLite returns empty - if backend != DatabaseBackend::Sqlite { - assert!(sql.contains("COMMENT") || sql.contains("User email address")); - } - - let suffix = format!( - "{}_modify_comment", - match backend { - DatabaseBackend::Postgres => "postgres", - DatabaseBackend::MySql => "mysql", - DatabaseBackend::Sqlite => "sqlite", - } - ); - - with_settings!({ snapshot_suffix => suffix }, { - assert_snapshot!(sql); - }); - } -} +pub mod add_column; +pub mod add_constraint; +pub mod create_table; +pub mod delete_column; +pub mod delete_table; +pub mod helpers; +pub mod modify_column_comment; +pub mod modify_column_default; +pub mod modify_column_nullable; +pub mod modify_column_type; +pub mod raw_sql; +pub mod remove_constraint; +pub mod rename_column; +pub mod rename_table; +pub mod types; + +pub use helpers::*; +pub use types::{BuiltQuery, DatabaseBackend, RawSql}; + +use crate::error::QueryError; +use vespertide_core::{MigrationAction, TableDef}; + +use self::{ + add_column::build_add_column, add_constraint::build_add_constraint, + create_table::build_create_table, delete_column::build_delete_column, + delete_table::build_delete_table, modify_column_comment::build_modify_column_comment, + modify_column_default::build_modify_column_default, + modify_column_nullable::build_modify_column_nullable, + modify_column_type::build_modify_column_type, raw_sql::build_raw_sql, + remove_constraint::build_remove_constraint, rename_column::build_rename_column, + rename_table::build_rename_table, +}; + +pub fn build_action_queries( + backend: &DatabaseBackend, + action: &MigrationAction, + current_schema: &[TableDef], +) -> Result, QueryError> { + match action { + MigrationAction::CreateTable { + table, + columns, + constraints, + } => build_create_table(backend, table, columns, constraints), + + MigrationAction::DeleteTable { table } => Ok(vec![build_delete_table(table)]), + + MigrationAction::AddColumn { + table, + column, + fill_with, + } => build_add_column(backend, table, column, fill_with.as_deref(), current_schema), + + MigrationAction::RenameColumn { table, from, to } => { + Ok(vec![build_rename_column(table, from, to)]) + } + + MigrationAction::DeleteColumn { table, column } => { + // Find the column type from current schema for enum DROP TYPE support + let column_type = current_schema + .iter() + .find(|t| t.name == *table) + .and_then(|t| t.columns.iter().find(|c| c.name == *column)) + .map(|c| &c.r#type); + Ok(build_delete_column(table, column, column_type)) + } + + MigrationAction::ModifyColumnType { + table, + column, + new_type, + } => build_modify_column_type(backend, table, column, new_type, current_schema), + + MigrationAction::ModifyColumnNullable { + table, + column, + nullable, + fill_with, + } => build_modify_column_nullable( + backend, + table, + column, + *nullable, + fill_with.as_deref(), + current_schema, + ), + + MigrationAction::ModifyColumnDefault { + table, + column, + new_default, + } => build_modify_column_default( + backend, + table, + column, + new_default.as_deref(), + current_schema, + ), + + MigrationAction::ModifyColumnComment { + table, + column, + new_comment, + } => build_modify_column_comment( + backend, + table, + column, + new_comment.as_deref(), + current_schema, + ), + + MigrationAction::RenameTable { from, to } => Ok(vec![build_rename_table(from, to)]), + + MigrationAction::RawSql { sql } => Ok(vec![build_raw_sql(sql.clone())]), + + MigrationAction::AddConstraint { table, constraint } => { + build_add_constraint(backend, table, constraint, current_schema) + } + + MigrationAction::RemoveConstraint { table, constraint } => { + build_remove_constraint(backend, table, constraint, current_schema) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use insta::{assert_snapshot, with_settings}; + use rstest::rstest; + use vespertide_core::schema::primary_key::PrimaryKeySyntax; + use vespertide_core::{ + ColumnDef, ColumnType, MigrationAction, ReferenceAction, SimpleColumnType, TableConstraint, + }; + + fn col(name: &str, ty: ColumnType) -> ColumnDef { + ColumnDef { + name: name.to_string(), + r#type: ty, + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + } + } + + #[test] + fn test_backend_specific_quoting() { + let action = MigrationAction::CreateTable { + table: "users".into(), + columns: vec![col("id", ColumnType::Simple(SimpleColumnType::Integer))], + constraints: vec![], + }; + let result = build_action_queries(&DatabaseBackend::Postgres, &action, &[]).unwrap(); + + // PostgreSQL uses double quotes + let pg_sql = result[0].build(DatabaseBackend::Postgres); + assert!(pg_sql.contains("\"users\"")); + + // MySQL uses backticks + let mysql_sql = result[0].build(DatabaseBackend::MySql); + assert!(mysql_sql.contains("`users`")); + + // SQLite uses double quotes + let sqlite_sql = result[0].build(DatabaseBackend::Sqlite); + assert!(sqlite_sql.contains("\"users\"")); + } + + #[rstest] + #[case::create_table_with_default_postgres( + "create_table_with_default_postgres", + MigrationAction::CreateTable { + table: "users".into(), + columns: vec![ColumnDef { + name: "status".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: Some("'active'".into()), + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }, + DatabaseBackend::Postgres, + &["DEFAULT", "'active'"] + )] + #[case::create_table_with_default_mysql( + "create_table_with_default_mysql", + MigrationAction::CreateTable { + table: "users".into(), + columns: vec![ColumnDef { + name: "status".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: Some("'active'".into()), + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }, + DatabaseBackend::Postgres, + &["DEFAULT", "'active'"] + )] + #[case::create_table_with_default_sqlite( + "create_table_with_default_sqlite", + MigrationAction::CreateTable { + table: "users".into(), + columns: vec![ColumnDef { + name: "status".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: Some("'active'".into()), + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }, + DatabaseBackend::Postgres, + &["DEFAULT", "'active'"] + )] + #[case::create_table_with_inline_primary_key_postgres( + "create_table_with_inline_primary_key_postgres", + MigrationAction::CreateTable { + table: "users".into(), + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: Some(PrimaryKeySyntax::Bool(true)), + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }, + DatabaseBackend::Postgres, + &["PRIMARY KEY"] + )] + #[case::create_table_with_inline_primary_key_mysql( + "create_table_with_inline_primary_key_mysql", + MigrationAction::CreateTable { + table: "users".into(), + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: Some(PrimaryKeySyntax::Bool(true)), + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }, + DatabaseBackend::Postgres, + &["PRIMARY KEY"] + )] + #[case::create_table_with_inline_primary_key_sqlite( + "create_table_with_inline_primary_key_sqlite", + MigrationAction::CreateTable { + table: "users".into(), + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: Some(PrimaryKeySyntax::Bool(true)), + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }, + DatabaseBackend::Postgres, + &["PRIMARY KEY"] + )] + #[case::create_table_with_fk_postgres( + "create_table_with_fk_postgres", + MigrationAction::CreateTable { + table: "posts".into(), + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + col("user_id", ColumnType::Simple(SimpleColumnType::Integer)), + ], + constraints: vec![TableConstraint::ForeignKey { + name: Some("fk_user".into()), + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: Some(ReferenceAction::Cascade), + on_update: Some(ReferenceAction::Restrict), + }], + }, + DatabaseBackend::Postgres, + &["REFERENCES \"users\" (\"id\")", "ON DELETE CASCADE", "ON UPDATE RESTRICT"] + )] + #[case::create_table_with_fk_mysql( + "create_table_with_fk_mysql", + MigrationAction::CreateTable { + table: "posts".into(), + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + col("user_id", ColumnType::Simple(SimpleColumnType::Integer)), + ], + constraints: vec![TableConstraint::ForeignKey { + name: Some("fk_user".into()), + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: Some(ReferenceAction::Cascade), + on_update: Some(ReferenceAction::Restrict), + }], + }, + DatabaseBackend::Postgres, + &["REFERENCES \"users\" (\"id\")", "ON DELETE CASCADE", "ON UPDATE RESTRICT"] + )] + #[case::create_table_with_fk_sqlite( + "create_table_with_fk_sqlite", + MigrationAction::CreateTable { + table: "posts".into(), + columns: vec![ + col("id", ColumnType::Simple(SimpleColumnType::Integer)), + col("user_id", ColumnType::Simple(SimpleColumnType::Integer)), + ], + constraints: vec![TableConstraint::ForeignKey { + name: Some("fk_user".into()), + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: Some(ReferenceAction::Cascade), + on_update: Some(ReferenceAction::Restrict), + }], + }, + DatabaseBackend::Postgres, + &["REFERENCES \"users\" (\"id\")", "ON DELETE CASCADE", "ON UPDATE RESTRICT"] + )] + fn test_build_migration_action( + #[case] title: &str, + #[case] action: MigrationAction, + #[case] backend: DatabaseBackend, + #[case] expected: &[&str], + ) { + let result = build_action_queries(&backend, &action, &[]).unwrap(); + let sql = result[0].build(backend); + for exp in expected { + assert!( + sql.contains(exp), + "Expected SQL to contain '{}', got: {}", + exp, + sql + ); + } + + with_settings!({ snapshot_suffix => format!("build_migration_action_{}", title) }, { + assert_snapshot!(result.iter().map(|q| q.build(backend)).collect::>().join("\n")); + }); + } + + #[rstest] + #[case::rename_column_postgres(DatabaseBackend::Postgres)] + #[case::rename_column_mysql(DatabaseBackend::MySql)] + #[case::rename_column_sqlite(DatabaseBackend::Sqlite)] + fn test_build_action_queries_rename_column(#[case] backend: DatabaseBackend) { + // Test MigrationAction::RenameColumn (lines 51-52) + let action = MigrationAction::RenameColumn { + table: "users".into(), + from: "old_name".into(), + to: "new_name".into(), + }; + let result = build_action_queries(&backend, &action, &[]).unwrap(); + assert_eq!(result.len(), 1); + let sql = result[0].build(backend); + assert!(sql.contains("RENAME")); + assert!(sql.contains("old_name")); + assert!(sql.contains("new_name")); + + with_settings!({ snapshot_suffix => format!("rename_column_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::delete_column_postgres(DatabaseBackend::Postgres)] + #[case::delete_column_mysql(DatabaseBackend::MySql)] + #[case::delete_column_sqlite(DatabaseBackend::Sqlite)] + fn test_build_action_queries_delete_column(#[case] backend: DatabaseBackend) { + // Test MigrationAction::DeleteColumn (lines 55-56) + let action = MigrationAction::DeleteColumn { + table: "users".into(), + column: "email".into(), + }; + let result = build_action_queries(&backend, &action, &[]).unwrap(); + assert_eq!(result.len(), 1); + let sql = result[0].build(backend); + assert!(sql.contains("DROP COLUMN")); + assert!(sql.contains("email")); + + with_settings!({ snapshot_suffix => format!("delete_column_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::modify_column_type_postgres(DatabaseBackend::Postgres)] + #[case::modify_column_type_mysql(DatabaseBackend::MySql)] + #[case::modify_column_type_sqlite(DatabaseBackend::Sqlite)] + fn test_build_action_queries_modify_column_type(#[case] backend: DatabaseBackend) { + // Test MigrationAction::ModifyColumnType (lines 60-63) + let action = MigrationAction::ModifyColumnType { + table: "users".into(), + column: "age".into(), + new_type: ColumnType::Simple(SimpleColumnType::BigInt), + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "age".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }]; + let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); + assert!(!result.is_empty()); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + assert!(sql.contains("ALTER TABLE")); + + with_settings!({ snapshot_suffix => format!("modify_column_type_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_index_constraint_postgres(DatabaseBackend::Postgres)] + #[case::remove_index_constraint_mysql(DatabaseBackend::MySql)] + #[case::remove_index_constraint_sqlite(DatabaseBackend::Sqlite)] + fn test_build_action_queries_remove_index_constraint(#[case] backend: DatabaseBackend) { + // Test MigrationAction::RemoveConstraint with Index variant + let action = MigrationAction::RemoveConstraint { + table: "users".into(), + constraint: TableConstraint::Index { + name: Some("idx_email".into()), + columns: vec!["email".into()], + }, + }; + let result = build_action_queries(&backend, &action, &[]).unwrap(); + assert_eq!(result.len(), 1); + let sql = result[0].build(backend); + assert!(sql.contains("DROP INDEX")); + assert!(sql.contains("idx_email")); + + with_settings!({ snapshot_suffix => format!("remove_index_constraint_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::rename_table_postgres(DatabaseBackend::Postgres)] + #[case::rename_table_mysql(DatabaseBackend::MySql)] + #[case::rename_table_sqlite(DatabaseBackend::Sqlite)] + fn test_build_action_queries_rename_table(#[case] backend: DatabaseBackend) { + // Test MigrationAction::RenameTable (line 69) + let action = MigrationAction::RenameTable { + from: "old_table".into(), + to: "new_table".into(), + }; + let result = build_action_queries(&backend, &action, &[]).unwrap(); + assert_eq!(result.len(), 1); + let sql = result[0].build(backend); + assert!(sql.contains("RENAME")); + assert!(sql.contains("old_table")); + assert!(sql.contains("new_table")); + + with_settings!({ snapshot_suffix => format!("rename_table_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::add_constraint_postgres(DatabaseBackend::Postgres)] + #[case::add_constraint_mysql(DatabaseBackend::MySql)] + #[case::add_constraint_sqlite(DatabaseBackend::Sqlite)] + fn test_build_action_queries_add_constraint(#[case] backend: DatabaseBackend) { + // Test MigrationAction::AddConstraint (lines 73-74) + let action = MigrationAction::AddConstraint { + table: "users".into(), + constraint: TableConstraint::Unique { + name: Some("uq_email".into()), + columns: vec!["email".into()], + }, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![], + }]; + let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); + assert!(!result.is_empty()); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + assert!(sql.contains("UNIQUE") || sql.contains("uq_email")); + + with_settings!({ snapshot_suffix => format!("add_constraint_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_constraint_postgres(DatabaseBackend::Postgres)] + #[case::remove_constraint_mysql(DatabaseBackend::MySql)] + #[case::remove_constraint_sqlite(DatabaseBackend::Sqlite)] + fn test_build_action_queries_remove_constraint(#[case] backend: DatabaseBackend) { + // Test MigrationAction::RemoveConstraint (lines 77-78) + let action = MigrationAction::RemoveConstraint { + table: "users".into(), + constraint: TableConstraint::Unique { + name: Some("uq_email".into()), + columns: vec!["email".into()], + }, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![TableConstraint::Unique { + name: Some("uq_email".into()), + columns: vec!["email".into()], + }], + }]; + let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); + assert!(!result.is_empty()); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + assert!(sql.contains("DROP") || sql.contains("CONSTRAINT")); + + with_settings!({ snapshot_suffix => format!("remove_constraint_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::add_column_postgres(DatabaseBackend::Postgres)] + #[case::add_column_mysql(DatabaseBackend::MySql)] + #[case::add_column_sqlite(DatabaseBackend::Sqlite)] + fn test_build_action_queries_add_column(#[case] backend: DatabaseBackend) { + // Test MigrationAction::AddColumn (lines 46-49) + let action = MigrationAction::AddColumn { + table: "users".into(), + column: Box::new(ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }), + fill_with: None, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }]; + let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); + assert!(!result.is_empty()); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + assert!(sql.contains("ALTER TABLE")); + assert!(sql.contains("ADD COLUMN") || sql.contains("ADD")); + + with_settings!({ snapshot_suffix => format!("add_column_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::add_index_constraint_postgres(DatabaseBackend::Postgres)] + #[case::add_index_constraint_mysql(DatabaseBackend::MySql)] + #[case::add_index_constraint_sqlite(DatabaseBackend::Sqlite)] + fn test_build_action_queries_add_index_constraint(#[case] backend: DatabaseBackend) { + // Test MigrationAction::AddConstraint with Index variant + let action = MigrationAction::AddConstraint { + table: "users".into(), + constraint: TableConstraint::Index { + name: Some("idx_email".into()), + columns: vec!["email".into()], + }, + }; + let result = build_action_queries(&backend, &action, &[]).unwrap(); + assert_eq!(result.len(), 1); + let sql = result[0].build(backend); + assert!(sql.contains("CREATE INDEX")); + assert!(sql.contains("idx_email")); + + with_settings!({ snapshot_suffix => format!("add_index_constraint_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::raw_sql_postgres(DatabaseBackend::Postgres)] + #[case::raw_sql_mysql(DatabaseBackend::MySql)] + #[case::raw_sql_sqlite(DatabaseBackend::Sqlite)] + fn test_build_action_queries_raw_sql(#[case] backend: DatabaseBackend) { + // Test MigrationAction::RawSql (line 71) + let action = MigrationAction::RawSql { + sql: "SELECT 1;".into(), + }; + let result = build_action_queries(&backend, &action, &[]).unwrap(); + assert_eq!(result.len(), 1); + let sql = result[0].build(backend); + assert_eq!(sql, "SELECT 1;"); + + with_settings!({ snapshot_suffix => format!("raw_sql_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + // Comprehensive index naming tests + #[rstest] + #[case::add_index_with_custom_name_postgres( + DatabaseBackend::Postgres, + "hello", + vec!["email", "password"] + )] + #[case::add_index_with_custom_name_mysql( + DatabaseBackend::MySql, + "hello", + vec!["email", "password"] + )] + #[case::add_index_with_custom_name_sqlite( + DatabaseBackend::Sqlite, + "hello", + vec!["email", "password"] + )] + #[case::add_index_single_column_postgres( + DatabaseBackend::Postgres, + "email_idx", + vec!["email"] + )] + #[case::add_index_single_column_mysql( + DatabaseBackend::MySql, + "email_idx", + vec!["email"] + )] + #[case::add_index_single_column_sqlite( + DatabaseBackend::Sqlite, + "email_idx", + vec!["email"] + )] + fn test_add_index_with_custom_name( + #[case] backend: DatabaseBackend, + #[case] index_name: &str, + #[case] columns: Vec<&str>, + ) { + // Test that custom index names follow ix_table__name pattern + let action = MigrationAction::AddConstraint { + table: "user".into(), + constraint: TableConstraint::Index { + name: Some(index_name.into()), + columns: columns.iter().map(|s| s.to_string()).collect(), + }, + }; + let result = build_action_queries(&backend, &action, &[]).unwrap(); + let sql = result[0].build(backend); + + // Should use ix_table__name pattern + let expected_name = format!("ix_user__{}", index_name); + assert!( + sql.contains(&expected_name), + "Expected index name '{}' in SQL: {}", + expected_name, + sql + ); + + with_settings!({ snapshot_suffix => format!("add_index_custom_{}_{:?}", index_name, backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::add_unnamed_index_single_column_postgres( + DatabaseBackend::Postgres, + vec!["email"] + )] + #[case::add_unnamed_index_single_column_mysql( + DatabaseBackend::MySql, + vec!["email"] + )] + #[case::add_unnamed_index_single_column_sqlite( + DatabaseBackend::Sqlite, + vec!["email"] + )] + #[case::add_unnamed_index_multiple_columns_postgres( + DatabaseBackend::Postgres, + vec!["email", "password"] + )] + #[case::add_unnamed_index_multiple_columns_mysql( + DatabaseBackend::MySql, + vec!["email", "password"] + )] + #[case::add_unnamed_index_multiple_columns_sqlite( + DatabaseBackend::Sqlite, + vec!["email", "password"] + )] + fn test_add_unnamed_index(#[case] backend: DatabaseBackend, #[case] columns: Vec<&str>) { + // Test that unnamed indexes follow ix_table__col1_col2 pattern + let action = MigrationAction::AddConstraint { + table: "user".into(), + constraint: TableConstraint::Index { + name: None, + columns: columns.iter().map(|s| s.to_string()).collect(), + }, + }; + let result = build_action_queries(&backend, &action, &[]).unwrap(); + let sql = result[0].build(backend); + + // Should use ix_table__col1_col2... pattern + let expected_name = format!("ix_user__{}", columns.join("_")); + assert!( + sql.contains(&expected_name), + "Expected index name '{}' in SQL: {}", + expected_name, + sql + ); + + with_settings!({ snapshot_suffix => format!("add_unnamed_index_{}_{:?}", columns.join("_"), backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_index_with_custom_name_postgres( + DatabaseBackend::Postgres, + "hello", + vec!["email", "password"] + )] + #[case::remove_index_with_custom_name_mysql( + DatabaseBackend::MySql, + "hello", + vec!["email", "password"] + )] + #[case::remove_index_with_custom_name_sqlite( + DatabaseBackend::Sqlite, + "hello", + vec!["email", "password"] + )] + fn test_remove_index_with_custom_name( + #[case] backend: DatabaseBackend, + #[case] index_name: &str, + #[case] columns: Vec<&str>, + ) { + // Test that removing custom index uses ix_table__name pattern + let action = MigrationAction::RemoveConstraint { + table: "user".into(), + constraint: TableConstraint::Index { + name: Some(index_name.into()), + columns: columns.iter().map(|s| s.to_string()).collect(), + }, + }; + let result = build_action_queries(&backend, &action, &[]).unwrap(); + let sql = result[0].build(backend); + + // Should use ix_table__name pattern + let expected_name = format!("ix_user__{}", index_name); + assert!( + sql.contains(&expected_name), + "Expected index name '{}' in SQL: {}", + expected_name, + sql + ); + + with_settings!({ snapshot_suffix => format!("remove_index_custom_{}_{:?}", index_name, backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_unnamed_index_single_column_postgres( + DatabaseBackend::Postgres, + vec!["email"] + )] + #[case::remove_unnamed_index_single_column_mysql( + DatabaseBackend::MySql, + vec!["email"] + )] + #[case::remove_unnamed_index_single_column_sqlite( + DatabaseBackend::Sqlite, + vec!["email"] + )] + #[case::remove_unnamed_index_multiple_columns_postgres( + DatabaseBackend::Postgres, + vec!["email", "password"] + )] + #[case::remove_unnamed_index_multiple_columns_mysql( + DatabaseBackend::MySql, + vec!["email", "password"] + )] + #[case::remove_unnamed_index_multiple_columns_sqlite( + DatabaseBackend::Sqlite, + vec!["email", "password"] + )] + fn test_remove_unnamed_index(#[case] backend: DatabaseBackend, #[case] columns: Vec<&str>) { + // Test that removing unnamed indexes uses ix_table__col1_col2 pattern + let action = MigrationAction::RemoveConstraint { + table: "user".into(), + constraint: TableConstraint::Index { + name: None, + columns: columns.iter().map(|s| s.to_string()).collect(), + }, + }; + let result = build_action_queries(&backend, &action, &[]).unwrap(); + let sql = result[0].build(backend); + + // Should use ix_table__col1_col2... pattern + let expected_name = format!("ix_user__{}", columns.join("_")); + assert!( + sql.contains(&expected_name), + "Expected index name '{}' in SQL: {}", + expected_name, + sql + ); + + with_settings!({ snapshot_suffix => format!("remove_unnamed_index_{}_{:?}", columns.join("_"), backend) }, { + assert_snapshot!(sql); + }); + } + + // Comprehensive unique constraint naming tests + #[rstest] + #[case::add_unique_with_custom_name_postgres( + DatabaseBackend::Postgres, + "email_unique", + vec!["email"] + )] + #[case::add_unique_with_custom_name_mysql( + DatabaseBackend::MySql, + "email_unique", + vec!["email"] + )] + #[case::add_unique_with_custom_name_sqlite( + DatabaseBackend::Sqlite, + "email_unique", + vec!["email"] + )] + fn test_add_unique_with_custom_name( + #[case] backend: DatabaseBackend, + #[case] constraint_name: &str, + #[case] columns: Vec<&str>, + ) { + // Test that custom unique constraint names follow uq_table__name pattern + let action = MigrationAction::AddConstraint { + table: "user".into(), + constraint: TableConstraint::Unique { + name: Some(constraint_name.into()), + columns: columns.iter().map(|s| s.to_string()).collect(), + }, + }; + + let current_schema = vec![TableDef { + name: "user".into(), + description: None, + columns: vec![ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }]; + + let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + // Should use uq_table__name pattern + let expected_name = format!("uq_user__{}", constraint_name); + assert!( + sql.contains(&expected_name), + "Expected unique constraint name '{}' in SQL: {}", + expected_name, + sql + ); + + with_settings!({ snapshot_suffix => format!("add_unique_custom_{}_{:?}", constraint_name, backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::add_unnamed_unique_single_column_postgres( + DatabaseBackend::Postgres, + vec!["email"] + )] + #[case::add_unnamed_unique_single_column_mysql( + DatabaseBackend::MySql, + vec!["email"] + )] + #[case::add_unnamed_unique_single_column_sqlite( + DatabaseBackend::Sqlite, + vec!["email"] + )] + #[case::add_unnamed_unique_multiple_columns_postgres( + DatabaseBackend::Postgres, + vec!["email", "username"] + )] + #[case::add_unnamed_unique_multiple_columns_mysql( + DatabaseBackend::MySql, + vec!["email", "username"] + )] + #[case::add_unnamed_unique_multiple_columns_sqlite( + DatabaseBackend::Sqlite, + vec!["email", "username"] + )] + fn test_add_unnamed_unique(#[case] backend: DatabaseBackend, #[case] columns: Vec<&str>) { + // Test that unnamed unique constraints follow uq_table__col1_col2 pattern + let action = MigrationAction::AddConstraint { + table: "user".into(), + constraint: TableConstraint::Unique { + name: None, + columns: columns.iter().map(|s| s.to_string()).collect(), + }, + }; + + let schema_columns: Vec = columns + .iter() + .map(|col| ColumnDef { + name: col.to_string(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }) + .collect(); + + let current_schema = vec![TableDef { + name: "user".into(), + description: None, + columns: schema_columns, + constraints: vec![], + }]; + + let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + // Should use uq_table__col1_col2... pattern + let expected_name = format!("uq_user__{}", columns.join("_")); + assert!( + sql.contains(&expected_name), + "Expected unique constraint name '{}' in SQL: {}", + expected_name, + sql + ); + + with_settings!({ snapshot_suffix => format!("add_unnamed_unique_{}_{:?}", columns.join("_"), backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_unique_with_custom_name_postgres( + DatabaseBackend::Postgres, + "email_unique", + vec!["email"] + )] + #[case::remove_unique_with_custom_name_mysql( + DatabaseBackend::MySql, + "email_unique", + vec!["email"] + )] + #[case::remove_unique_with_custom_name_sqlite( + DatabaseBackend::Sqlite, + "email_unique", + vec!["email"] + )] + fn test_remove_unique_with_custom_name( + #[case] backend: DatabaseBackend, + #[case] constraint_name: &str, + #[case] columns: Vec<&str>, + ) { + // Test that removing custom unique constraint uses uq_table__name pattern + let constraint = TableConstraint::Unique { + name: Some(constraint_name.into()), + columns: columns.iter().map(|s| s.to_string()).collect(), + }; + + let current_schema = vec![TableDef { + name: "user".into(), + description: None, + columns: vec![ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![constraint.clone()], + }]; + + let action = MigrationAction::RemoveConstraint { + table: "user".into(), + constraint, + }; + + let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + // Should use uq_table__name pattern (for Postgres/MySQL, not SQLite which rebuilds table) + if backend != DatabaseBackend::Sqlite { + let expected_name = format!("uq_user__{}", constraint_name); + assert!( + sql.contains(&expected_name), + "Expected unique constraint name '{}' in SQL: {}", + expected_name, + sql + ); + } + + with_settings!({ snapshot_suffix => format!("remove_unique_custom_{}_{:?}", constraint_name, backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_unnamed_unique_single_column_postgres( + DatabaseBackend::Postgres, + vec!["email"] + )] + #[case::remove_unnamed_unique_single_column_mysql( + DatabaseBackend::MySql, + vec!["email"] + )] + #[case::remove_unnamed_unique_single_column_sqlite( + DatabaseBackend::Sqlite, + vec!["email"] + )] + #[case::remove_unnamed_unique_multiple_columns_postgres( + DatabaseBackend::Postgres, + vec!["email", "username"] + )] + #[case::remove_unnamed_unique_multiple_columns_mysql( + DatabaseBackend::MySql, + vec!["email", "username"] + )] + #[case::remove_unnamed_unique_multiple_columns_sqlite( + DatabaseBackend::Sqlite, + vec!["email", "username"] + )] + fn test_remove_unnamed_unique(#[case] backend: DatabaseBackend, #[case] columns: Vec<&str>) { + // Test that removing unnamed unique constraints uses uq_table__col1_col2 pattern + let constraint = TableConstraint::Unique { + name: None, + columns: columns.iter().map(|s| s.to_string()).collect(), + }; + + let schema_columns: Vec = columns + .iter() + .map(|col| ColumnDef { + name: col.to_string(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }) + .collect(); + + let current_schema = vec![TableDef { + name: "user".into(), + description: None, + columns: schema_columns, + constraints: vec![constraint.clone()], + }]; + + let action = MigrationAction::RemoveConstraint { + table: "user".into(), + constraint, + }; + + let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + // Should use uq_table__col1_col2... pattern (for Postgres/MySQL, not SQLite which rebuilds table) + if backend != DatabaseBackend::Sqlite { + let expected_name = format!("uq_user__{}", columns.join("_")); + assert!( + sql.contains(&expected_name), + "Expected unique constraint name '{}' in SQL: {}", + expected_name, + sql + ); + } + + with_settings!({ snapshot_suffix => format!("remove_unnamed_unique_{}_{:?}", columns.join("_"), backend) }, { + assert_snapshot!(sql); + }); + } + + /// Test build_action_queries for ModifyColumnNullable + #[rstest] + #[case::postgres_modify_nullable(DatabaseBackend::Postgres)] + #[case::mysql_modify_nullable(DatabaseBackend::MySql)] + #[case::sqlite_modify_nullable(DatabaseBackend::Sqlite)] + fn test_build_action_queries_modify_column_nullable(#[case] backend: DatabaseBackend) { + let action = MigrationAction::ModifyColumnNullable { + table: "users".into(), + column: "email".into(), + nullable: false, + fill_with: Some("'unknown'".into()), + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }]; + let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); + assert!(!result.is_empty()); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + // Should contain UPDATE for fill_with and ALTER for nullable change + assert!(sql.contains("UPDATE")); + assert!(sql.contains("unknown")); + + let suffix = format!( + "{}_modify_nullable", + match backend { + DatabaseBackend::Postgres => "postgres", + DatabaseBackend::MySql => "mysql", + DatabaseBackend::Sqlite => "sqlite", + } + ); + + with_settings!({ snapshot_suffix => suffix }, { + assert_snapshot!(sql); + }); + } + + /// Test build_action_queries for ModifyColumnDefault + #[rstest] + #[case::postgres_modify_default(DatabaseBackend::Postgres)] + #[case::mysql_modify_default(DatabaseBackend::MySql)] + #[case::sqlite_modify_default(DatabaseBackend::Sqlite)] + fn test_build_action_queries_modify_column_default(#[case] backend: DatabaseBackend) { + let action = MigrationAction::ModifyColumnDefault { + table: "users".into(), + column: "status".into(), + new_default: Some("'active'".into()), + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "status".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }]; + let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); + assert!(!result.is_empty()); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + // Should contain DEFAULT and 'active' + assert!(sql.contains("DEFAULT") || sql.contains("active")); + + let suffix = format!( + "{}_modify_default", + match backend { + DatabaseBackend::Postgres => "postgres", + DatabaseBackend::MySql => "mysql", + DatabaseBackend::Sqlite => "sqlite", + } + ); + + with_settings!({ snapshot_suffix => suffix }, { + assert_snapshot!(sql); + }); + } + + /// Test build_action_queries for ModifyColumnComment + #[rstest] + #[case::postgres_modify_comment(DatabaseBackend::Postgres)] + #[case::mysql_modify_comment(DatabaseBackend::MySql)] + #[case::sqlite_modify_comment(DatabaseBackend::Sqlite)] + fn test_build_action_queries_modify_column_comment(#[case] backend: DatabaseBackend) { + let action = MigrationAction::ModifyColumnComment { + table: "users".into(), + column: "email".into(), + new_comment: Some("User email address".into()), + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }]; + let result = build_action_queries(&backend, &action, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + // Postgres and MySQL should have comment, SQLite returns empty + if backend != DatabaseBackend::Sqlite { + assert!(sql.contains("COMMENT") || sql.contains("User email address")); + } + + let suffix = format!( + "{}_modify_comment", + match backend { + DatabaseBackend::Postgres => "postgres", + DatabaseBackend::MySql => "mysql", + DatabaseBackend::Sqlite => "sqlite", + } + ); + + with_settings!({ snapshot_suffix => suffix }, { + assert_snapshot!(sql); + }); + } +} diff --git a/crates/vespertide-query/src/sql/modify_column_comment.rs b/crates/vespertide-query/src/sql/modify_column_comment.rs index 7dbab9d..71517bc 100644 --- a/crates/vespertide-query/src/sql/modify_column_comment.rs +++ b/crates/vespertide-query/src/sql/modify_column_comment.rs @@ -118,6 +118,7 @@ mod tests { ) -> TableDef { TableDef { name: name.to_string(), + description: None, columns, constraints, } diff --git a/crates/vespertide-query/src/sql/modify_column_default.rs b/crates/vespertide-query/src/sql/modify_column_default.rs index 3d83aae..6e955dd 100644 --- a/crates/vespertide-query/src/sql/modify_column_default.rs +++ b/crates/vespertide-query/src/sql/modify_column_default.rs @@ -174,6 +174,7 @@ mod tests { ) -> TableDef { TableDef { name: name.to_string(), + description: None, columns, constraints, } diff --git a/crates/vespertide-query/src/sql/modify_column_nullable.rs b/crates/vespertide-query/src/sql/modify_column_nullable.rs index eefb431..c53dbc7 100644 --- a/crates/vespertide-query/src/sql/modify_column_nullable.rs +++ b/crates/vespertide-query/src/sql/modify_column_nullable.rs @@ -200,6 +200,7 @@ mod tests { ) -> TableDef { TableDef { name: name.to_string(), + description: None, columns, constraints, } diff --git a/crates/vespertide-query/src/sql/modify_column_type.rs b/crates/vespertide-query/src/sql/modify_column_type.rs index b540eae..929cfd2 100644 --- a/crates/vespertide-query/src/sql/modify_column_type.rs +++ b/crates/vespertide-query/src/sql/modify_column_type.rs @@ -1,765 +1,770 @@ -use sea_query::{Alias, ColumnDef as SeaColumnDef, Query, Table}; - -use vespertide_core::{ColumnType, ComplexColumnType, TableDef}; - -use super::create_table::build_create_table_for_backend; -use super::helpers::{apply_column_type_with_table, build_create_enum_type_sql}; -use super::rename_table::build_rename_table; -use super::types::{BuiltQuery, DatabaseBackend}; -use crate::error::QueryError; - -pub fn build_modify_column_type( - backend: &DatabaseBackend, - table: &str, - column: &str, - new_type: &ColumnType, - current_schema: &[TableDef], -) -> Result, QueryError> { - // SQLite does not support direct column type modification, so use temporary table approach - if *backend == DatabaseBackend::Sqlite { - // Current schema information is required - let table_def = current_schema - .iter() - .find(|t| t.name == table) - .ok_or_else(|| QueryError::Other(format!( - "Table '{}' not found in current schema. SQLite requires current schema information to modify column types.", - table - )))?; - - // Create new column definitions with the modified column - let mut new_columns = table_def.columns.clone(); - let col_index = new_columns - .iter() - .position(|c| c.name == column) - .ok_or_else(|| { - QueryError::Other(format!( - "Column '{}' not found in table '{}'", - column, table - )) - })?; - - new_columns[col_index].r#type = new_type.clone(); - - // Generate temporary table name - let temp_table = format!("{}_temp", table); - - // 1. Create temporary table with new column types - let create_temp_table = build_create_table_for_backend( - backend, - &temp_table, - &new_columns, - &table_def.constraints, - ); - let create_query = BuiltQuery::CreateTable(Box::new(create_temp_table)); - - // 2. Copy data (all columns) - Use INSERT INTO ... SELECT - let column_aliases: Vec = new_columns.iter().map(|c| Alias::new(&c.name)).collect(); - - // Build SELECT query - let mut select_query = Query::select(); - for col_alias in &column_aliases { - select_query = select_query.column(col_alias.clone()).to_owned(); - } - select_query = select_query.from(Alias::new(table)).to_owned(); - - // Build INSERT query - let insert_stmt = Query::insert() - .into_table(Alias::new(&temp_table)) - .columns(column_aliases.clone()) - .select_from(select_query) - .unwrap() - .to_owned(); - - let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); - - // 3. Drop original table - let drop_table = Table::drop().table(Alias::new(table)).to_owned(); - let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); - - // 4. Rename temporary table to original name - let rename_query = build_rename_table(&temp_table, table); - - // 5. Recreate indexes from Index constraints - let mut index_queries = Vec::new(); - for constraint in &table_def.constraints { - if let vespertide_core::TableConstraint::Index { name, columns } = constraint { - let index_name = - vespertide_naming::build_index_name(table, columns, name.as_deref()); - let mut idx_stmt = sea_query::Index::create(); - idx_stmt = idx_stmt.name(&index_name).to_owned(); - for col_name in columns { - idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); - } - idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); - index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); - } - } - - let mut queries = vec![create_query, insert_query, drop_query, rename_query]; - queries.extend(index_queries); - - Ok(queries) - } else { - // PostgreSQL, MySQL, etc. can use ALTER TABLE directly - let mut queries = Vec::new(); - - // Get the old column type to check if we need special enum handling - let old_type = current_schema - .iter() - .find(|t| t.name == table) - .and_then(|t| t.columns.iter().find(|c| c.name == column)) - .map(|c| &c.r#type); - - // Check if this is an enum-to-enum migration that needs special handling (PostgreSQL only) - let needs_enum_migration = if *backend == DatabaseBackend::Postgres { - matches!( - (old_type, new_type), - ( - Some(ColumnType::Complex(ComplexColumnType::Enum { name: old_name, values: old_values })), - ColumnType::Complex(ComplexColumnType::Enum { name: new_name, values: new_values }) - ) if old_name == new_name && old_values != new_values - ) - } else { - false - }; - - if needs_enum_migration { - // Use the safe temp type + USING + RENAME approach for enum value changes - if let ( - Some(ColumnType::Complex(ComplexColumnType::Enum { - name: enum_name, .. - })), - ColumnType::Complex(ComplexColumnType::Enum { - values: new_values, .. - }), - ) = (old_type, new_type) - { - // Use table-prefixed enum type names - let type_name = super::helpers::build_enum_type_name(table, enum_name); - let temp_type_name = format!("{}_new", type_name); - - // 1. CREATE TYPE {table}_{enum}_new AS ENUM (new values) - let create_temp_values = new_values.to_sql_values().join(", "); - queries.push(BuiltQuery::Raw(super::types::RawSql::per_backend( - format!( - "CREATE TYPE \"{}\" AS ENUM ({})", - temp_type_name, create_temp_values - ), - String::new(), - String::new(), - ))); - - // 2. ALTER TABLE ... ALTER COLUMN ... TYPE {table}_{enum}_new USING {column}::text::{table}_{enum}_new - queries.push(BuiltQuery::Raw(super::types::RawSql::per_backend( - format!( - "ALTER TABLE \"{}\" ALTER COLUMN \"{}\" TYPE \"{}\" USING \"{}\"::text::\"{}\"", - table, column, temp_type_name, column, temp_type_name - ), - String::new(), - String::new(), - ))); - - // 3. DROP TYPE {table}_{enum} - queries.push(BuiltQuery::Raw(super::types::RawSql::per_backend( - format!("DROP TYPE \"{}\"", type_name), - String::new(), - String::new(), - ))); - - // 4. ALTER TYPE {table}_{enum}_new RENAME TO {table}_{enum} - queries.push(BuiltQuery::Raw(super::types::RawSql::per_backend( - format!( - "ALTER TYPE \"{}\" RENAME TO \"{}\"", - temp_type_name, type_name - ), - String::new(), - String::new(), - ))); - } - } else { - // Standard column type modification - - // If new type is an enum and different from old, create the type first (PostgreSQL only) - if let ColumnType::Complex(ComplexColumnType::Enum { name: new_name, .. }) = new_type { - // Determine if we need to create a new enum type - // - If old type was a different enum, we need to create the new one - // - If old type was not an enum, we need to create the enum type - let should_create = if let Some(ColumnType::Complex(ComplexColumnType::Enum { - name: old_name, - .. - })) = old_type - { - old_name != new_name - } else { - // Either old_type is None or it wasn't an enum - need to create enum type - true - }; - - if should_create - && let Some(create_type_sql) = build_create_enum_type_sql(table, new_type) - { - queries.push(BuiltQuery::Raw(create_type_sql)); - } - } - - let mut col = SeaColumnDef::new(Alias::new(column)); - apply_column_type_with_table(&mut col, new_type, table); - - let stmt = Table::alter() - .table(Alias::new(table)) - .modify_column(col) - .to_owned(); - queries.push(BuiltQuery::AlterTable(Box::new(stmt))); - - // If old type was an enum and new type is different, drop the old enum type - if let Some(ColumnType::Complex(ComplexColumnType::Enum { name: old_name, .. })) = - old_type - { - let should_drop = match new_type { - ColumnType::Complex(ComplexColumnType::Enum { name: new_name, .. }) => { - old_name != new_name - } - _ => true, // New type is not an enum - }; - - if should_drop { - // Use table-prefixed enum type name - let old_type_name = super::helpers::build_enum_type_name(table, old_name); - queries.push(BuiltQuery::Raw(super::types::RawSql::per_backend( - format!("DROP TYPE IF EXISTS \"{}\"", old_type_name), - String::new(), - String::new(), - ))); - } - } - } - - Ok(queries) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use insta::{assert_snapshot, with_settings}; - use rstest::rstest; - use vespertide_core::{ - ColumnDef, ColumnType, ComplexColumnType, EnumValues, SimpleColumnType, TableDef, - }; - - #[rstest] - #[case::modify_column_type_postgres( - "modify_column_type_postgres", - DatabaseBackend::Postgres, - &["ALTER TABLE \"users\"", "\"age\""] - )] - #[case::modify_column_type_mysql( - "modify_column_type_mysql", - DatabaseBackend::MySql, - &["ALTER TABLE `users` MODIFY COLUMN `age` varchar(50)"] - )] - #[case::modify_column_type_sqlite( - "modify_column_type_sqlite", - DatabaseBackend::Sqlite, - &[] - )] - fn test_modify_column_type( - #[case] title: &str, - #[case] backend: DatabaseBackend, - #[case] expected: &[&str], - ) { - // For SQLite, we need to provide current schema - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "age".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![], - }]; - - let result = build_modify_column_type( - &backend, - "users", - "age", - &ColumnType::Complex(ComplexColumnType::Varchar { length: 50 }), - ¤t_schema, - ); - - // SQLite may return multiple queries - let sql = result - .unwrap() - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join(";\n"); - - for exp in expected { - assert!( - sql.contains(exp), - "Expected SQL to contain '{}', got: {}", - exp, - sql - ); - } - println!("sql: {}", sql); - - with_settings!({ snapshot_suffix => format!("modify_column_type_{}", title) }, { - assert_snapshot!(sql); - }); - } - - #[test] - fn test_modify_column_type_table_not_found() { - let result = build_modify_column_type( - &DatabaseBackend::Sqlite, - "nonexistent_table", - "age", - &ColumnType::Simple(SimpleColumnType::BigInt), - &[], - ); - assert!(result.is_err()); - assert!( - result - .unwrap_err() - .to_string() - .contains("Table 'nonexistent_table' not found") - ); - } - - #[test] - fn test_modify_column_type_column_not_found() { - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }]; - let result = build_modify_column_type( - &DatabaseBackend::Sqlite, - "users", - "nonexistent_column", - &ColumnType::Simple(SimpleColumnType::BigInt), - ¤t_schema, - ); - assert!(result.is_err()); - assert!( - result - .unwrap_err() - .to_string() - .contains("Column 'nonexistent_column' not found") - ); - } - - #[rstest] - #[case::modify_column_type_with_index_postgres( - "modify_column_type_with_index_postgres", - DatabaseBackend::Postgres - )] - #[case::modify_column_type_with_index_mysql( - "modify_column_type_with_index_mysql", - DatabaseBackend::MySql - )] - #[case::modify_column_type_with_index_sqlite( - "modify_column_type_with_index_sqlite", - DatabaseBackend::Sqlite - )] - fn test_modify_column_type_with_index(#[case] title: &str, #[case] backend: DatabaseBackend) { - // Test modify column type with indexes - use vespertide_core::TableConstraint; - - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "age".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![TableConstraint::Index { - name: Some("idx_age".into()), - columns: vec!["age".into()], - }], - }]; - - let result = build_modify_column_type( - &backend, - "users", - "age", - &ColumnType::Simple(SimpleColumnType::BigInt), - ¤t_schema, - ) - .unwrap(); - - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join(";\n"); - - // For SQLite, should recreate index - if matches!(backend, DatabaseBackend::Sqlite) { - assert!(sql.contains("CREATE INDEX")); - assert!(sql.contains("idx_age")); - } - - with_settings!({ snapshot_suffix => format!("modify_column_type_with_index_{}", title) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::modify_column_type_with_unique_constraint_postgres( - "modify_column_type_with_unique_constraint_postgres", - DatabaseBackend::Postgres - )] - #[case::modify_column_type_with_unique_constraint_mysql( - "modify_column_type_with_unique_constraint_mysql", - DatabaseBackend::MySql - )] - #[case::modify_column_type_with_unique_constraint_sqlite( - "modify_column_type_with_unique_constraint_sqlite", - DatabaseBackend::Sqlite - )] - fn test_modify_column_type_with_unique_constraint( - #[case] title: &str, - #[case] backend: DatabaseBackend, - ) { - // Test modify column type with unique constraint - use vespertide_core::TableConstraint; - - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![TableConstraint::Unique { - name: Some("uq_email".into()), - columns: vec!["email".into()], - }], - }]; - - let result = build_modify_column_type( - &backend, - "users", - "email", - &ColumnType::Complex(ComplexColumnType::Varchar { length: 255 }), - ¤t_schema, - ) - .unwrap(); - - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join(";\n"); - - // For SQLite, unique constraint should be in CREATE TABLE statement - if matches!(backend, DatabaseBackend::Sqlite) { - assert!(sql.contains("CREATE TABLE")); - } - - with_settings!({ snapshot_suffix => format!("modify_column_type_with_unique_constraint_{}", title) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::enum_values_changed_postgres( - "enum_values_changed_postgres", - DatabaseBackend::Postgres, - ColumnType::Complex(ComplexColumnType::Enum { - name: "status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }), - ColumnType::Complex(ComplexColumnType::Enum { - name: "status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into(), "pending".into()]), - }) - )] - #[case::enum_values_changed_mysql( - "enum_values_changed_mysql", - DatabaseBackend::MySql, - ColumnType::Complex(ComplexColumnType::Enum { - name: "status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }), - ColumnType::Complex(ComplexColumnType::Enum { - name: "status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into(), "pending".into()]), - }) - )] - #[case::enum_values_changed_sqlite( - "enum_values_changed_sqlite", - DatabaseBackend::Sqlite, - ColumnType::Complex(ComplexColumnType::Enum { - name: "status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }), - ColumnType::Complex(ComplexColumnType::Enum { - name: "status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into(), "pending".into()]), - }) - )] - #[case::enum_same_values_postgres( - "enum_same_values_postgres", - DatabaseBackend::Postgres, - ColumnType::Complex(ComplexColumnType::Enum { - name: "status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }), - ColumnType::Complex(ComplexColumnType::Enum { - name: "status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }) - )] - #[case::enum_same_values_mysql( - "enum_same_values_mysql", - DatabaseBackend::MySql, - ColumnType::Complex(ComplexColumnType::Enum { - name: "status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }), - ColumnType::Complex(ComplexColumnType::Enum { - name: "status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }) - )] - #[case::enum_same_values_sqlite( - "enum_same_values_sqlite", - DatabaseBackend::Sqlite, - ColumnType::Complex(ComplexColumnType::Enum { - name: "status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }), - ColumnType::Complex(ComplexColumnType::Enum { - name: "status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }) - )] - #[case::enum_name_changed_postgres( - "enum_name_changed_postgres", - DatabaseBackend::Postgres, - ColumnType::Complex(ComplexColumnType::Enum { - name: "old_status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }), - ColumnType::Complex(ComplexColumnType::Enum { - name: "new_status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }) - )] - #[case::enum_name_changed_mysql( - "enum_name_changed_mysql", - DatabaseBackend::MySql, - ColumnType::Complex(ComplexColumnType::Enum { - name: "old_status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }), - ColumnType::Complex(ComplexColumnType::Enum { - name: "new_status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }) - )] - #[case::enum_name_changed_sqlite( - "enum_name_changed_sqlite", - DatabaseBackend::Sqlite, - ColumnType::Complex(ComplexColumnType::Enum { - name: "old_status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }), - ColumnType::Complex(ComplexColumnType::Enum { - name: "new_status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }) - )] - #[case::text_to_enum_postgres( - "text_to_enum_postgres", - DatabaseBackend::Postgres, - ColumnType::Simple(SimpleColumnType::Text), - ColumnType::Complex(ComplexColumnType::Enum { - name: "user_status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }) - )] - #[case::text_to_enum_mysql( - "text_to_enum_mysql", - DatabaseBackend::MySql, - ColumnType::Simple(SimpleColumnType::Text), - ColumnType::Complex(ComplexColumnType::Enum { - name: "user_status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }) - )] - #[case::text_to_enum_sqlite( - "text_to_enum_sqlite", - DatabaseBackend::Sqlite, - ColumnType::Simple(SimpleColumnType::Text), - ColumnType::Complex(ComplexColumnType::Enum { - name: "user_status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }) - )] - #[case::enum_to_text_postgres( - "enum_to_text_postgres", - DatabaseBackend::Postgres, - ColumnType::Complex(ComplexColumnType::Enum { - name: "user_status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }), - ColumnType::Simple(SimpleColumnType::Text) - )] - #[case::enum_to_text_mysql( - "enum_to_text_mysql", - DatabaseBackend::MySql, - ColumnType::Complex(ComplexColumnType::Enum { - name: "user_status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }), - ColumnType::Simple(SimpleColumnType::Text) - )] - #[case::enum_to_text_sqlite( - "enum_to_text_sqlite", - DatabaseBackend::Sqlite, - ColumnType::Complex(ComplexColumnType::Enum { - name: "user_status".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }), - ColumnType::Simple(SimpleColumnType::Text) - )] - fn test_modify_enum_types( - #[case] title: &str, - #[case] backend: DatabaseBackend, - #[case] old_type: ColumnType, - #[case] new_type: ColumnType, - ) { - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "status".into(), - r#type: old_type, - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![], - }]; - - let result = - build_modify_column_type(&backend, "users", "status", &new_type, ¤t_schema) - .unwrap(); - - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join(";\n"); - - with_settings!({ snapshot_suffix => format!("modify_enum_types_{}", title) }, { - assert_snapshot!(sql); - }); - } - - #[test] - fn test_modify_column_type_to_enum_with_empty_schema() { - // Test the None branch in line 195-200 - // When current_schema is empty, old_type will be None - use vespertide_core::ComplexColumnType; - - let result = build_modify_column_type( - &DatabaseBackend::Postgres, - "users", - "status", - &ColumnType::Complex(ComplexColumnType::Enum { - name: "status_type".into(), - values: EnumValues::String(vec!["active".into(), "inactive".into()]), - }), - &[], // Empty schema - old_type will be None - ); - - assert!(result.is_ok()); - let queries = result.unwrap(); - let sql = queries - .iter() - .map(|q| q.build(DatabaseBackend::Postgres)) - .collect::>() - .join(";\n"); - - // Should create the enum type since old_type is None - assert!(sql.contains("CREATE TYPE")); - assert!(sql.contains("status_type")); - assert!(sql.contains("ALTER TABLE")); - } -} +use sea_query::{Alias, ColumnDef as SeaColumnDef, Query, Table}; + +use vespertide_core::{ColumnType, ComplexColumnType, TableDef}; + +use super::create_table::build_create_table_for_backend; +use super::helpers::{apply_column_type_with_table, build_create_enum_type_sql}; +use super::rename_table::build_rename_table; +use super::types::{BuiltQuery, DatabaseBackend}; +use crate::error::QueryError; + +pub fn build_modify_column_type( + backend: &DatabaseBackend, + table: &str, + column: &str, + new_type: &ColumnType, + current_schema: &[TableDef], +) -> Result, QueryError> { + // SQLite does not support direct column type modification, so use temporary table approach + if *backend == DatabaseBackend::Sqlite { + // Current schema information is required + let table_def = current_schema + .iter() + .find(|t| t.name == table) + .ok_or_else(|| QueryError::Other(format!( + "Table '{}' not found in current schema. SQLite requires current schema information to modify column types.", + table + )))?; + + // Create new column definitions with the modified column + let mut new_columns = table_def.columns.clone(); + let col_index = new_columns + .iter() + .position(|c| c.name == column) + .ok_or_else(|| { + QueryError::Other(format!( + "Column '{}' not found in table '{}'", + column, table + )) + })?; + + new_columns[col_index].r#type = new_type.clone(); + + // Generate temporary table name + let temp_table = format!("{}_temp", table); + + // 1. Create temporary table with new column types + let create_temp_table = build_create_table_for_backend( + backend, + &temp_table, + &new_columns, + &table_def.constraints, + ); + let create_query = BuiltQuery::CreateTable(Box::new(create_temp_table)); + + // 2. Copy data (all columns) - Use INSERT INTO ... SELECT + let column_aliases: Vec = new_columns.iter().map(|c| Alias::new(&c.name)).collect(); + + // Build SELECT query + let mut select_query = Query::select(); + for col_alias in &column_aliases { + select_query = select_query.column(col_alias.clone()).to_owned(); + } + select_query = select_query.from(Alias::new(table)).to_owned(); + + // Build INSERT query + let insert_stmt = Query::insert() + .into_table(Alias::new(&temp_table)) + .columns(column_aliases.clone()) + .select_from(select_query) + .unwrap() + .to_owned(); + + let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); + + // 3. Drop original table + let drop_table = Table::drop().table(Alias::new(table)).to_owned(); + let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); + + // 4. Rename temporary table to original name + let rename_query = build_rename_table(&temp_table, table); + + // 5. Recreate indexes from Index constraints + let mut index_queries = Vec::new(); + for constraint in &table_def.constraints { + if let vespertide_core::TableConstraint::Index { name, columns } = constraint { + let index_name = + vespertide_naming::build_index_name(table, columns, name.as_deref()); + let mut idx_stmt = sea_query::Index::create(); + idx_stmt = idx_stmt.name(&index_name).to_owned(); + for col_name in columns { + idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); + } + idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); + index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); + } + } + + let mut queries = vec![create_query, insert_query, drop_query, rename_query]; + queries.extend(index_queries); + + Ok(queries) + } else { + // PostgreSQL, MySQL, etc. can use ALTER TABLE directly + let mut queries = Vec::new(); + + // Get the old column type to check if we need special enum handling + let old_type = current_schema + .iter() + .find(|t| t.name == table) + .and_then(|t| t.columns.iter().find(|c| c.name == column)) + .map(|c| &c.r#type); + + // Check if this is an enum-to-enum migration that needs special handling (PostgreSQL only) + let needs_enum_migration = if *backend == DatabaseBackend::Postgres { + matches!( + (old_type, new_type), + ( + Some(ColumnType::Complex(ComplexColumnType::Enum { name: old_name, values: old_values })), + ColumnType::Complex(ComplexColumnType::Enum { name: new_name, values: new_values }) + ) if old_name == new_name && old_values != new_values + ) + } else { + false + }; + + if needs_enum_migration { + // Use the safe temp type + USING + RENAME approach for enum value changes + if let ( + Some(ColumnType::Complex(ComplexColumnType::Enum { + name: enum_name, .. + })), + ColumnType::Complex(ComplexColumnType::Enum { + values: new_values, .. + }), + ) = (old_type, new_type) + { + // Use table-prefixed enum type names + let type_name = super::helpers::build_enum_type_name(table, enum_name); + let temp_type_name = format!("{}_new", type_name); + + // 1. CREATE TYPE {table}_{enum}_new AS ENUM (new values) + let create_temp_values = new_values.to_sql_values().join(", "); + queries.push(BuiltQuery::Raw(super::types::RawSql::per_backend( + format!( + "CREATE TYPE \"{}\" AS ENUM ({})", + temp_type_name, create_temp_values + ), + String::new(), + String::new(), + ))); + + // 2. ALTER TABLE ... ALTER COLUMN ... TYPE {table}_{enum}_new USING {column}::text::{table}_{enum}_new + queries.push(BuiltQuery::Raw(super::types::RawSql::per_backend( + format!( + "ALTER TABLE \"{}\" ALTER COLUMN \"{}\" TYPE \"{}\" USING \"{}\"::text::\"{}\"", + table, column, temp_type_name, column, temp_type_name + ), + String::new(), + String::new(), + ))); + + // 3. DROP TYPE {table}_{enum} + queries.push(BuiltQuery::Raw(super::types::RawSql::per_backend( + format!("DROP TYPE \"{}\"", type_name), + String::new(), + String::new(), + ))); + + // 4. ALTER TYPE {table}_{enum}_new RENAME TO {table}_{enum} + queries.push(BuiltQuery::Raw(super::types::RawSql::per_backend( + format!( + "ALTER TYPE \"{}\" RENAME TO \"{}\"", + temp_type_name, type_name + ), + String::new(), + String::new(), + ))); + } + } else { + // Standard column type modification + + // If new type is an enum and different from old, create the type first (PostgreSQL only) + if let ColumnType::Complex(ComplexColumnType::Enum { name: new_name, .. }) = new_type { + // Determine if we need to create a new enum type + // - If old type was a different enum, we need to create the new one + // - If old type was not an enum, we need to create the enum type + let should_create = if let Some(ColumnType::Complex(ComplexColumnType::Enum { + name: old_name, + .. + })) = old_type + { + old_name != new_name + } else { + // Either old_type is None or it wasn't an enum - need to create enum type + true + }; + + if should_create + && let Some(create_type_sql) = build_create_enum_type_sql(table, new_type) + { + queries.push(BuiltQuery::Raw(create_type_sql)); + } + } + + let mut col = SeaColumnDef::new(Alias::new(column)); + apply_column_type_with_table(&mut col, new_type, table); + + let stmt = Table::alter() + .table(Alias::new(table)) + .modify_column(col) + .to_owned(); + queries.push(BuiltQuery::AlterTable(Box::new(stmt))); + + // If old type was an enum and new type is different, drop the old enum type + if let Some(ColumnType::Complex(ComplexColumnType::Enum { name: old_name, .. })) = + old_type + { + let should_drop = match new_type { + ColumnType::Complex(ComplexColumnType::Enum { name: new_name, .. }) => { + old_name != new_name + } + _ => true, // New type is not an enum + }; + + if should_drop { + // Use table-prefixed enum type name + let old_type_name = super::helpers::build_enum_type_name(table, old_name); + queries.push(BuiltQuery::Raw(super::types::RawSql::per_backend( + format!("DROP TYPE IF EXISTS \"{}\"", old_type_name), + String::new(), + String::new(), + ))); + } + } + } + + Ok(queries) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use insta::{assert_snapshot, with_settings}; + use rstest::rstest; + use vespertide_core::{ + ColumnDef, ColumnType, ComplexColumnType, EnumValues, SimpleColumnType, TableDef, + }; + + #[rstest] + #[case::modify_column_type_postgres( + "modify_column_type_postgres", + DatabaseBackend::Postgres, + &["ALTER TABLE \"users\"", "\"age\""] + )] + #[case::modify_column_type_mysql( + "modify_column_type_mysql", + DatabaseBackend::MySql, + &["ALTER TABLE `users` MODIFY COLUMN `age` varchar(50)"] + )] + #[case::modify_column_type_sqlite( + "modify_column_type_sqlite", + DatabaseBackend::Sqlite, + &[] + )] + fn test_modify_column_type( + #[case] title: &str, + #[case] backend: DatabaseBackend, + #[case] expected: &[&str], + ) { + // For SQLite, we need to provide current schema + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "age".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![], + }]; + + let result = build_modify_column_type( + &backend, + "users", + "age", + &ColumnType::Complex(ComplexColumnType::Varchar { length: 50 }), + ¤t_schema, + ); + + // SQLite may return multiple queries + let sql = result + .unwrap() + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join(";\n"); + + for exp in expected { + assert!( + sql.contains(exp), + "Expected SQL to contain '{}', got: {}", + exp, + sql + ); + } + println!("sql: {}", sql); + + with_settings!({ snapshot_suffix => format!("modify_column_type_{}", title) }, { + assert_snapshot!(sql); + }); + } + + #[test] + fn test_modify_column_type_table_not_found() { + let result = build_modify_column_type( + &DatabaseBackend::Sqlite, + "nonexistent_table", + "age", + &ColumnType::Simple(SimpleColumnType::BigInt), + &[], + ); + assert!(result.is_err()); + assert!( + result + .unwrap_err() + .to_string() + .contains("Table 'nonexistent_table' not found") + ); + } + + #[test] + fn test_modify_column_type_column_not_found() { + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }]; + let result = build_modify_column_type( + &DatabaseBackend::Sqlite, + "users", + "nonexistent_column", + &ColumnType::Simple(SimpleColumnType::BigInt), + ¤t_schema, + ); + assert!(result.is_err()); + assert!( + result + .unwrap_err() + .to_string() + .contains("Column 'nonexistent_column' not found") + ); + } + + #[rstest] + #[case::modify_column_type_with_index_postgres( + "modify_column_type_with_index_postgres", + DatabaseBackend::Postgres + )] + #[case::modify_column_type_with_index_mysql( + "modify_column_type_with_index_mysql", + DatabaseBackend::MySql + )] + #[case::modify_column_type_with_index_sqlite( + "modify_column_type_with_index_sqlite", + DatabaseBackend::Sqlite + )] + fn test_modify_column_type_with_index(#[case] title: &str, #[case] backend: DatabaseBackend) { + // Test modify column type with indexes + use vespertide_core::TableConstraint; + + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "age".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![TableConstraint::Index { + name: Some("idx_age".into()), + columns: vec!["age".into()], + }], + }]; + + let result = build_modify_column_type( + &backend, + "users", + "age", + &ColumnType::Simple(SimpleColumnType::BigInt), + ¤t_schema, + ) + .unwrap(); + + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join(";\n"); + + // For SQLite, should recreate index + if matches!(backend, DatabaseBackend::Sqlite) { + assert!(sql.contains("CREATE INDEX")); + assert!(sql.contains("idx_age")); + } + + with_settings!({ snapshot_suffix => format!("modify_column_type_with_index_{}", title) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::modify_column_type_with_unique_constraint_postgres( + "modify_column_type_with_unique_constraint_postgres", + DatabaseBackend::Postgres + )] + #[case::modify_column_type_with_unique_constraint_mysql( + "modify_column_type_with_unique_constraint_mysql", + DatabaseBackend::MySql + )] + #[case::modify_column_type_with_unique_constraint_sqlite( + "modify_column_type_with_unique_constraint_sqlite", + DatabaseBackend::Sqlite + )] + fn test_modify_column_type_with_unique_constraint( + #[case] title: &str, + #[case] backend: DatabaseBackend, + ) { + // Test modify column type with unique constraint + use vespertide_core::TableConstraint; + + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![TableConstraint::Unique { + name: Some("uq_email".into()), + columns: vec!["email".into()], + }], + }]; + + let result = build_modify_column_type( + &backend, + "users", + "email", + &ColumnType::Complex(ComplexColumnType::Varchar { length: 255 }), + ¤t_schema, + ) + .unwrap(); + + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join(";\n"); + + // For SQLite, unique constraint should be in CREATE TABLE statement + if matches!(backend, DatabaseBackend::Sqlite) { + assert!(sql.contains("CREATE TABLE")); + } + + with_settings!({ snapshot_suffix => format!("modify_column_type_with_unique_constraint_{}", title) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::enum_values_changed_postgres( + "enum_values_changed_postgres", + DatabaseBackend::Postgres, + ColumnType::Complex(ComplexColumnType::Enum { + name: "status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }), + ColumnType::Complex(ComplexColumnType::Enum { + name: "status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into(), "pending".into()]), + }) + )] + #[case::enum_values_changed_mysql( + "enum_values_changed_mysql", + DatabaseBackend::MySql, + ColumnType::Complex(ComplexColumnType::Enum { + name: "status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }), + ColumnType::Complex(ComplexColumnType::Enum { + name: "status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into(), "pending".into()]), + }) + )] + #[case::enum_values_changed_sqlite( + "enum_values_changed_sqlite", + DatabaseBackend::Sqlite, + ColumnType::Complex(ComplexColumnType::Enum { + name: "status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }), + ColumnType::Complex(ComplexColumnType::Enum { + name: "status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into(), "pending".into()]), + }) + )] + #[case::enum_same_values_postgres( + "enum_same_values_postgres", + DatabaseBackend::Postgres, + ColumnType::Complex(ComplexColumnType::Enum { + name: "status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }), + ColumnType::Complex(ComplexColumnType::Enum { + name: "status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }) + )] + #[case::enum_same_values_mysql( + "enum_same_values_mysql", + DatabaseBackend::MySql, + ColumnType::Complex(ComplexColumnType::Enum { + name: "status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }), + ColumnType::Complex(ComplexColumnType::Enum { + name: "status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }) + )] + #[case::enum_same_values_sqlite( + "enum_same_values_sqlite", + DatabaseBackend::Sqlite, + ColumnType::Complex(ComplexColumnType::Enum { + name: "status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }), + ColumnType::Complex(ComplexColumnType::Enum { + name: "status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }) + )] + #[case::enum_name_changed_postgres( + "enum_name_changed_postgres", + DatabaseBackend::Postgres, + ColumnType::Complex(ComplexColumnType::Enum { + name: "old_status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }), + ColumnType::Complex(ComplexColumnType::Enum { + name: "new_status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }) + )] + #[case::enum_name_changed_mysql( + "enum_name_changed_mysql", + DatabaseBackend::MySql, + ColumnType::Complex(ComplexColumnType::Enum { + name: "old_status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }), + ColumnType::Complex(ComplexColumnType::Enum { + name: "new_status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }) + )] + #[case::enum_name_changed_sqlite( + "enum_name_changed_sqlite", + DatabaseBackend::Sqlite, + ColumnType::Complex(ComplexColumnType::Enum { + name: "old_status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }), + ColumnType::Complex(ComplexColumnType::Enum { + name: "new_status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }) + )] + #[case::text_to_enum_postgres( + "text_to_enum_postgres", + DatabaseBackend::Postgres, + ColumnType::Simple(SimpleColumnType::Text), + ColumnType::Complex(ComplexColumnType::Enum { + name: "user_status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }) + )] + #[case::text_to_enum_mysql( + "text_to_enum_mysql", + DatabaseBackend::MySql, + ColumnType::Simple(SimpleColumnType::Text), + ColumnType::Complex(ComplexColumnType::Enum { + name: "user_status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }) + )] + #[case::text_to_enum_sqlite( + "text_to_enum_sqlite", + DatabaseBackend::Sqlite, + ColumnType::Simple(SimpleColumnType::Text), + ColumnType::Complex(ComplexColumnType::Enum { + name: "user_status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }) + )] + #[case::enum_to_text_postgres( + "enum_to_text_postgres", + DatabaseBackend::Postgres, + ColumnType::Complex(ComplexColumnType::Enum { + name: "user_status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }), + ColumnType::Simple(SimpleColumnType::Text) + )] + #[case::enum_to_text_mysql( + "enum_to_text_mysql", + DatabaseBackend::MySql, + ColumnType::Complex(ComplexColumnType::Enum { + name: "user_status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }), + ColumnType::Simple(SimpleColumnType::Text) + )] + #[case::enum_to_text_sqlite( + "enum_to_text_sqlite", + DatabaseBackend::Sqlite, + ColumnType::Complex(ComplexColumnType::Enum { + name: "user_status".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }), + ColumnType::Simple(SimpleColumnType::Text) + )] + fn test_modify_enum_types( + #[case] title: &str, + #[case] backend: DatabaseBackend, + #[case] old_type: ColumnType, + #[case] new_type: ColumnType, + ) { + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "status".into(), + r#type: old_type, + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![], + }]; + + let result = + build_modify_column_type(&backend, "users", "status", &new_type, ¤t_schema) + .unwrap(); + + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join(";\n"); + + with_settings!({ snapshot_suffix => format!("modify_enum_types_{}", title) }, { + assert_snapshot!(sql); + }); + } + + #[test] + fn test_modify_column_type_to_enum_with_empty_schema() { + // Test the None branch in line 195-200 + // When current_schema is empty, old_type will be None + use vespertide_core::ComplexColumnType; + + let result = build_modify_column_type( + &DatabaseBackend::Postgres, + "users", + "status", + &ColumnType::Complex(ComplexColumnType::Enum { + name: "status_type".into(), + values: EnumValues::String(vec!["active".into(), "inactive".into()]), + }), + &[], // Empty schema - old_type will be None + ); + + assert!(result.is_ok()); + let queries = result.unwrap(); + let sql = queries + .iter() + .map(|q| q.build(DatabaseBackend::Postgres)) + .collect::>() + .join(";\n"); + + // Should create the enum type since old_type is None + assert!(sql.contains("CREATE TYPE")); + assert!(sql.contains("status_type")); + assert!(sql.contains("ALTER TABLE")); + } +} diff --git a/crates/vespertide-query/src/sql/remove_constraint.rs b/crates/vespertide-query/src/sql/remove_constraint.rs index 4c487dd..e078b0c 100644 --- a/crates/vespertide-query/src/sql/remove_constraint.rs +++ b/crates/vespertide-query/src/sql/remove_constraint.rs @@ -1,1590 +1,1605 @@ -use sea_query::{Alias, ForeignKey, Query, Table}; - -use vespertide_core::{TableConstraint, TableDef}; - -use super::create_table::build_create_table_for_backend; -use super::rename_table::build_rename_table; -use super::types::{BuiltQuery, DatabaseBackend}; -use crate::error::QueryError; -use crate::sql::RawSql; - -pub fn build_remove_constraint( - backend: &DatabaseBackend, - table: &str, - constraint: &TableConstraint, - current_schema: &[TableDef], -) -> Result, QueryError> { - match constraint { - TableConstraint::PrimaryKey { .. } => { - if *backend == DatabaseBackend::Sqlite { - // SQLite does not support dropping primary key constraints, use temp table approach - let table_def = current_schema - .iter() - .find(|t| t.name == table) - .ok_or_else(|| QueryError::Other(format!( - "Table '{}' not found in current schema. SQLite requires current schema information to remove constraints.", - table - )))?; - - // Remove the primary key constraint - let mut new_constraints = table_def.constraints.clone(); - new_constraints.retain(|c| !matches!(c, TableConstraint::PrimaryKey { .. })); - - // Generate temporary table name - let temp_table = format!("{}_temp", table); - - // 1. Create temporary table without primary key constraint - let create_temp_table = build_create_table_for_backend( - backend, - &temp_table, - &table_def.columns, - &new_constraints, - ); - let create_query = BuiltQuery::CreateTable(Box::new(create_temp_table)); - - // 2. Copy data (all columns) - let column_aliases: Vec = table_def - .columns - .iter() - .map(|c| Alias::new(&c.name)) - .collect(); - let mut select_query = Query::select(); - for col_alias in &column_aliases { - select_query = select_query.column(col_alias.clone()).to_owned(); - } - select_query = select_query.from(Alias::new(table)).to_owned(); - - let insert_stmt = Query::insert() - .into_table(Alias::new(&temp_table)) - .columns(column_aliases.clone()) - .select_from(select_query) - .unwrap() - .to_owned(); - let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); - - // 3. Drop original table - let drop_table = Table::drop().table(Alias::new(table)).to_owned(); - let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); - - // 4. Rename temporary table to original name - let rename_query = build_rename_table(&temp_table, table); - - // 5. Recreate indexes from Index constraints - let mut index_queries = Vec::new(); - for constraint in &table_def.constraints { - if let TableConstraint::Index { - name: idx_name, - columns: idx_cols, - } = constraint - { - let index_name = vespertide_naming::build_index_name( - table, - idx_cols, - idx_name.as_deref(), - ); - let mut idx_stmt = sea_query::Index::create(); - idx_stmt = idx_stmt.name(&index_name).to_owned(); - for col_name in idx_cols { - idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); - } - idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); - index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); - } - } - - let mut queries = vec![create_query, insert_query, drop_query, rename_query]; - queries.extend(index_queries); - Ok(queries) - } else { - // Other backends: use raw SQL - let pg_sql = format!( - "ALTER TABLE \"{}\" DROP CONSTRAINT \"{}_pkey\"", - table, table - ); - let mysql_sql = format!("ALTER TABLE `{}` DROP PRIMARY KEY", table); - Ok(vec![BuiltQuery::Raw(RawSql::per_backend( - pg_sql.clone(), - mysql_sql, - pg_sql, - ))]) - } - } - TableConstraint::Unique { name, columns } => { - // SQLite does not support ALTER TABLE ... DROP CONSTRAINT UNIQUE - if *backend == DatabaseBackend::Sqlite { - // Use temporary table approach for SQLite - let table_def = current_schema - .iter() - .find(|t| t.name == table) - .ok_or_else(|| QueryError::Other(format!( - "Table '{}' not found in current schema. SQLite requires current schema information to remove constraints.", - table - )))?; - - // Create new constraints without the removed unique constraint - let mut new_constraints = table_def.constraints.clone(); - new_constraints.retain(|c| { - match (c, constraint) { - ( - TableConstraint::Unique { - name: c_name, - columns: c_cols, - }, - TableConstraint::Unique { - name: r_name, - columns: r_cols, - }, - ) => { - // Remove if names match, or if no name and columns match - if let (Some(cn), Some(rn)) = (c_name, r_name) { - cn != rn - } else { - c_cols != r_cols - } - } - _ => true, - } - }); - - // Generate temporary table name - let temp_table = format!("{}_temp", table); - - // 1. Create temporary table without the removed constraint - let create_temp_table = build_create_table_for_backend( - backend, - &temp_table, - &table_def.columns, - &new_constraints, - ); - let create_query = BuiltQuery::CreateTable(Box::new(create_temp_table)); - - // 2. Copy data (all columns) - let column_aliases: Vec = table_def - .columns - .iter() - .map(|c| Alias::new(&c.name)) - .collect(); - let mut select_query = Query::select(); - for col_alias in &column_aliases { - select_query = select_query.column(col_alias.clone()).to_owned(); - } - select_query = select_query.from(Alias::new(table)).to_owned(); - - let insert_stmt = Query::insert() - .into_table(Alias::new(&temp_table)) - .columns(column_aliases.clone()) - .select_from(select_query) - .unwrap() - .to_owned(); - let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); - - // 3. Drop original table - let drop_table = Table::drop().table(Alias::new(table)).to_owned(); - let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); - - // 4. Rename temporary table to original name - let rename_query = build_rename_table(&temp_table, table); - - // 5. Recreate indexes from Index constraints - let mut index_queries = Vec::new(); - for c in &table_def.constraints { - if let TableConstraint::Index { - name: idx_name, - columns: idx_cols, - } = c - { - let index_name = vespertide_naming::build_index_name( - table, - idx_cols, - idx_name.as_deref(), - ); - let mut idx_stmt = sea_query::Index::create(); - idx_stmt = idx_stmt.name(&index_name).to_owned(); - for col_name in idx_cols { - idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); - } - idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); - index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); - } - } - - let mut queries = vec![create_query, insert_query, drop_query, rename_query]; - queries.extend(index_queries); - Ok(queries) - } else { - // For unique constraints, PostgreSQL uses DROP CONSTRAINT, MySQL uses DROP INDEX - // sea_query 0.32 doesn't support dropping unique constraint via Table::alter() directly - // We'll use Index::drop() which generates DROP INDEX for both backends - // However, PostgreSQL expects DROP CONSTRAINT, so we need to use Table::alter() - // Since drop_constraint() doesn't exist, we'll use Index::drop() for now - // Note: This may not match PostgreSQL's DROP CONSTRAINT syntax - let constraint_name = vespertide_naming::build_unique_constraint_name( - table, - columns, - name.as_deref(), - ); - // Try using Table::alter() with drop_constraint if available - // If not, use Index::drop() as fallback - // For PostgreSQL, we need DROP CONSTRAINT, but sea_query doesn't support this - // We'll use raw SQL for PostgreSQL and Index::drop() for MySQL - let pg_sql = format!( - "ALTER TABLE \"{}\" DROP CONSTRAINT \"{}\"", - table, constraint_name - ); - let mysql_sql = format!("ALTER TABLE `{}` DROP INDEX `{}`", table, constraint_name); - Ok(vec![BuiltQuery::Raw(RawSql::per_backend( - pg_sql.clone(), - mysql_sql, - pg_sql, - ))]) - } - } - TableConstraint::ForeignKey { name, columns, .. } => { - // SQLite does not support ALTER TABLE ... DROP CONSTRAINT FOREIGN KEY - if *backend == DatabaseBackend::Sqlite { - // Use temporary table approach for SQLite - let table_def = current_schema - .iter() - .find(|t| t.name == table) - .ok_or_else(|| QueryError::Other(format!( - "Table '{}' not found in current schema. SQLite requires current schema information to remove constraints.", - table - )))?; - - // Create new constraints without the removed foreign key constraint - let mut new_constraints = table_def.constraints.clone(); - new_constraints.retain(|c| { - match (c, constraint) { - ( - TableConstraint::ForeignKey { - name: c_name, - columns: c_cols, - .. - }, - TableConstraint::ForeignKey { - name: r_name, - columns: r_cols, - .. - }, - ) => { - // Remove if names match, or if no name and columns match - if let (Some(cn), Some(rn)) = (c_name, r_name) { - cn != rn - } else { - c_cols != r_cols - } - } - _ => true, - } - }); - - // Generate temporary table name - let temp_table = format!("{}_temp", table); - - // 1. Create temporary table without the removed constraint - let create_temp_table = build_create_table_for_backend( - backend, - &temp_table, - &table_def.columns, - &new_constraints, - ); - let create_query = BuiltQuery::CreateTable(Box::new(create_temp_table)); - - // 2. Copy data (all columns) - let column_aliases: Vec = table_def - .columns - .iter() - .map(|c| Alias::new(&c.name)) - .collect(); - let mut select_query = Query::select(); - for col_alias in &column_aliases { - select_query = select_query.column(col_alias.clone()).to_owned(); - } - select_query = select_query.from(Alias::new(table)).to_owned(); - - let insert_stmt = Query::insert() - .into_table(Alias::new(&temp_table)) - .columns(column_aliases.clone()) - .select_from(select_query) - .unwrap() - .to_owned(); - let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); - - // 3. Drop original table - let drop_table = Table::drop().table(Alias::new(table)).to_owned(); - let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); - - // 4. Rename temporary table to original name - let rename_query = build_rename_table(&temp_table, table); - - // 5. Recreate indexes from Index constraints - let mut index_queries = Vec::new(); - for c in &table_def.constraints { - if let TableConstraint::Index { - name: idx_name, - columns: idx_cols, - } = c - { - let index_name = vespertide_naming::build_index_name( - table, - idx_cols, - idx_name.as_deref(), - ); - let mut idx_stmt = sea_query::Index::create(); - idx_stmt = idx_stmt.name(&index_name).to_owned(); - for col_name in idx_cols { - idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); - } - idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); - index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); - } - } - - let mut queries = vec![create_query, insert_query, drop_query, rename_query]; - queries.extend(index_queries); - Ok(queries) - } else { - // Build foreign key drop using ForeignKey::drop() - let constraint_name = - vespertide_naming::build_foreign_key_name(table, columns, name.as_deref()); - let fk_drop = ForeignKey::drop() - .name(&constraint_name) - .table(Alias::new(table)) - .to_owned(); - Ok(vec![BuiltQuery::DropForeignKey(Box::new(fk_drop))]) - } - } - TableConstraint::Index { name, columns } => { - // Index constraints are simple DROP INDEX statements for all backends - let index_name = if let Some(n) = name { - // Use naming convention for named indexes - vespertide_naming::build_index_name(table, columns, Some(n)) - } else { - // Generate name from table and columns for unnamed indexes - vespertide_naming::build_index_name(table, columns, None) - }; - let idx_drop = sea_query::Index::drop() - .table(Alias::new(table)) - .name(&index_name) - .to_owned(); - Ok(vec![BuiltQuery::DropIndex(Box::new(idx_drop))]) - } - TableConstraint::Check { name, .. } => { - // SQLite does not support ALTER TABLE ... DROP CONSTRAINT CHECK - if *backend == DatabaseBackend::Sqlite { - // Use temporary table approach for SQLite - let table_def = current_schema - .iter() - .find(|t| t.name == table) - .ok_or_else(|| QueryError::Other(format!( - "Table '{}' not found in current schema. SQLite requires current schema information to remove constraints.", - table - )))?; - - // Create new constraints without the removed check constraint - let mut new_constraints = table_def.constraints.clone(); - new_constraints.retain(|c| match (c, constraint) { - ( - TableConstraint::Check { name: c_name, .. }, - TableConstraint::Check { name: r_name, .. }, - ) => c_name != r_name, - _ => true, - }); - - // Generate temporary table name - let temp_table = format!("{}_temp", table); - - // 1. Create temporary table without the removed constraint - let create_temp_table = build_create_table_for_backend( - backend, - &temp_table, - &table_def.columns, - &new_constraints, - ); - let create_query = BuiltQuery::CreateTable(Box::new(create_temp_table)); - - // 2. Copy data (all columns) - let column_aliases: Vec = table_def - .columns - .iter() - .map(|c| Alias::new(&c.name)) - .collect(); - let mut select_query = Query::select(); - for col_alias in &column_aliases { - select_query = select_query.column(col_alias.clone()).to_owned(); - } - select_query = select_query.from(Alias::new(table)).to_owned(); - - let insert_stmt = Query::insert() - .into_table(Alias::new(&temp_table)) - .columns(column_aliases.clone()) - .select_from(select_query) - .unwrap() - .to_owned(); - let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); - - // 3. Drop original table - let drop_table = Table::drop().table(Alias::new(table)).to_owned(); - let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); - - // 4. Rename temporary table to original name - let rename_query = build_rename_table(&temp_table, table); - - // 5. Recreate indexes from Index constraints - let mut index_queries = Vec::new(); - for c in &table_def.constraints { - if let TableConstraint::Index { - name: idx_name, - columns: idx_cols, - } = c - { - let index_name = vespertide_naming::build_index_name( - table, - idx_cols, - idx_name.as_deref(), - ); - let mut idx_stmt = sea_query::Index::create(); - idx_stmt = idx_stmt.name(&index_name).to_owned(); - for col_name in idx_cols { - idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); - } - idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); - index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); - } - } - - let mut queries = vec![create_query, insert_query, drop_query, rename_query]; - queries.extend(index_queries); - Ok(queries) - } else { - let pg_sql = format!("ALTER TABLE \"{}\" DROP CONSTRAINT \"{}\"", table, name); - let mysql_sql = format!("ALTER TABLE `{}` DROP CHECK `{}`", table, name); - Ok(vec![BuiltQuery::Raw(RawSql::per_backend( - pg_sql.clone(), - mysql_sql, - pg_sql, - ))]) - } - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::sql::types::DatabaseBackend; - use insta::{assert_snapshot, with_settings}; - use rstest::rstest; - use vespertide_core::{ColumnDef, ColumnType, SimpleColumnType, TableConstraint, TableDef}; - - #[rstest] - #[case::remove_constraint_primary_key_postgres( - "remove_constraint_primary_key_postgres", - DatabaseBackend::Postgres, - &["DROP CONSTRAINT \"users_pkey\""] - )] - #[case::remove_constraint_primary_key_mysql( - "remove_constraint_primary_key_mysql", - DatabaseBackend::MySql, - &["DROP PRIMARY KEY"] - )] - #[case::remove_constraint_primary_key_sqlite( - "remove_constraint_primary_key_sqlite", - DatabaseBackend::Sqlite, - &["CREATE TABLE \"users_temp\""] - )] - #[case::remove_constraint_unique_named_postgres( - "remove_constraint_unique_named_postgres", - DatabaseBackend::Postgres, - &["DROP CONSTRAINT \"uq_users__uq_email\""] - )] - #[case::remove_constraint_unique_named_mysql( - "remove_constraint_unique_named_mysql", - DatabaseBackend::MySql, - &["DROP INDEX `uq_users__uq_email`"] - )] - #[case::remove_constraint_unique_named_sqlite( - "remove_constraint_unique_named_sqlite", - DatabaseBackend::Sqlite, - &["CREATE TABLE \"users_temp\""] - )] - #[case::remove_constraint_foreign_key_named_postgres( - "remove_constraint_foreign_key_named_postgres", - DatabaseBackend::Postgres, - &["DROP CONSTRAINT \"fk_users__fk_user\""] - )] - #[case::remove_constraint_foreign_key_named_mysql( - "remove_constraint_foreign_key_named_mysql", - DatabaseBackend::MySql, - &["DROP FOREIGN KEY `fk_users__fk_user`"] - )] - #[case::remove_constraint_foreign_key_named_sqlite( - "remove_constraint_foreign_key_named_sqlite", - DatabaseBackend::Sqlite, - &["CREATE TABLE \"users_temp\""] - )] - #[case::remove_constraint_check_named_postgres( - "remove_constraint_check_named_postgres", - DatabaseBackend::Postgres, - &["DROP CONSTRAINT \"chk_age\""] - )] - #[case::remove_constraint_check_named_mysql( - "remove_constraint_check_named_mysql", - DatabaseBackend::MySql, - &["DROP CHECK `chk_age`"] - )] - #[case::remove_constraint_check_named_sqlite( - "remove_constraint_check_named_sqlite", - DatabaseBackend::Sqlite, - &["CREATE TABLE \"users_temp\""] - )] - fn test_remove_constraint( - #[case] title: &str, - #[case] backend: DatabaseBackend, - #[case] expected: &[&str], - ) { - let constraint = if title.contains("primary_key") { - TableConstraint::PrimaryKey { - columns: vec!["id".into()], - auto_increment: false, - } - } else if title.contains("unique") { - TableConstraint::Unique { - name: Some("uq_email".into()), - columns: vec!["email".into()], - } - } else if title.contains("foreign_key") { - TableConstraint::ForeignKey { - name: Some("fk_user".into()), - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: None, - on_update: None, - } - } else { - TableConstraint::Check { - name: "chk_age".into(), - expr: "age > 0".into(), - } - }; - - // For SQLite, we need to provide current schema with the constraint to be removed - let current_schema = vec![TableDef { - name: "users".into(), - columns: if title.contains("check") { - vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "age".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ] - } else if title.contains("foreign_key") { - vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "user_id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ] - } else { - // primary key / unique cases - vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }] - }, - constraints: vec![constraint.clone()], - }]; - - let result = - build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); - let sql = result[0].build(backend); - for exp in expected { - assert!( - sql.contains(exp), - "Expected SQL to contain '{}', got: {}", - exp, - sql - ); - } - - with_settings!({ snapshot_suffix => format!("remove_constraint_{}", title) }, { - assert_snapshot!(result.iter().map(|q| q.build(backend)).collect::>().join("\n")); - }); - } - - #[test] - fn test_remove_constraint_primary_key_sqlite_table_not_found() { - // Test error when table is not found (line 25) - let constraint = TableConstraint::PrimaryKey { - columns: vec!["id".into()], - auto_increment: false, - }; - let result = build_remove_constraint( - &DatabaseBackend::Sqlite, - "nonexistent_table", - &constraint, - &[], // Empty schema - ); - assert!(result.is_err()); - let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("Table 'nonexistent_table' not found in current schema")); - } - - #[rstest] - #[case::remove_primary_key_with_index_postgres(DatabaseBackend::Postgres)] - #[case::remove_primary_key_with_index_mysql(DatabaseBackend::MySql)] - #[case::remove_primary_key_with_index_sqlite(DatabaseBackend::Sqlite)] - fn test_remove_constraint_primary_key_with_index(#[case] backend: DatabaseBackend) { - // Test PrimaryKey removal with indexes - let constraint = TableConstraint::PrimaryKey { - columns: vec!["id".into()], - auto_increment: false, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![ - constraint.clone(), - TableConstraint::Index { - name: Some("idx_id".into()), - columns: vec!["id".into()], - }, - ], - }]; - - let result = - build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - if matches!(backend, DatabaseBackend::Sqlite) { - assert!(sql.contains("CREATE INDEX")); - assert!(sql.contains("ix_users__idx_id")); - } - - with_settings!({ snapshot_suffix => format!("remove_primary_key_with_index_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_primary_key_with_unique_constraint_postgres(DatabaseBackend::Postgres)] - #[case::remove_primary_key_with_unique_constraint_mysql(DatabaseBackend::MySql)] - #[case::remove_primary_key_with_unique_constraint_sqlite(DatabaseBackend::Sqlite)] - fn test_remove_constraint_primary_key_with_unique_constraint(#[case] backend: DatabaseBackend) { - // Test PrimaryKey removal with unique constraint - let constraint = TableConstraint::PrimaryKey { - columns: vec!["id".into()], - auto_increment: false, - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }], - constraints: vec![ - constraint.clone(), - TableConstraint::Unique { - name: Some("uq_email".into()), - columns: vec!["email".into()], - }, - ], - }]; - - let result = - build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - if matches!(backend, DatabaseBackend::Sqlite) { - // Unique constraint should be in the temp table definition - assert!(sql.contains("CREATE TABLE")); - } - - with_settings!({ snapshot_suffix => format!("remove_primary_key_with_unique_constraint_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[test] - fn test_remove_constraint_unique_sqlite_table_not_found() { - // Test error when table is not found (line 112) - let constraint = TableConstraint::Unique { - name: Some("uq_email".into()), - columns: vec!["email".into()], - }; - let result = build_remove_constraint( - &DatabaseBackend::Sqlite, - "nonexistent_table", - &constraint, - &[], // Empty schema - ); - assert!(result.is_err()); - let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("Table 'nonexistent_table' not found in current schema")); - } - - #[rstest] - #[case::remove_unique_without_name_postgres(DatabaseBackend::Postgres)] - #[case::remove_unique_without_name_mysql(DatabaseBackend::MySql)] - #[case::remove_unique_without_name_sqlite(DatabaseBackend::Sqlite)] - fn test_remove_constraint_unique_without_name(#[case] backend: DatabaseBackend) { - // Test Unique removal without name (lines 134, 137, 210) - let constraint = TableConstraint::Unique { - name: None, - columns: vec!["email".into()], - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![constraint.clone()], - }]; - - let result = - build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - // Should generate default constraint name - if !matches!(backend, DatabaseBackend::Sqlite) { - assert!(sql.contains("users_email_key") || sql.contains("email")); - } - - with_settings!({ snapshot_suffix => format!("remove_unique_without_name_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_unique_with_index_postgres(DatabaseBackend::Postgres)] - #[case::remove_unique_with_index_mysql(DatabaseBackend::MySql)] - #[case::remove_unique_with_index_sqlite(DatabaseBackend::Sqlite)] - fn test_remove_constraint_unique_with_index(#[case] backend: DatabaseBackend) { - // Test Unique removal with indexes - let constraint = TableConstraint::Unique { - name: Some("uq_email".into()), - columns: vec!["email".into()], - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![ - constraint.clone(), - TableConstraint::Index { - name: Some("idx_id".into()), - columns: vec!["id".into()], - }, - ], - }]; - - let result = - build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - if matches!(backend, DatabaseBackend::Sqlite) { - assert!(sql.contains("CREATE INDEX")); - assert!(sql.contains("ix_users__idx_id")); - } - - with_settings!({ snapshot_suffix => format!("remove_unique_with_index_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_unique_with_other_unique_constraint_postgres(DatabaseBackend::Postgres)] - #[case::remove_unique_with_other_unique_constraint_mysql(DatabaseBackend::MySql)] - #[case::remove_unique_with_other_unique_constraint_sqlite(DatabaseBackend::Sqlite)] - fn test_remove_constraint_unique_with_other_unique_constraint( - #[case] backend: DatabaseBackend, - ) { - // Test Unique removal with another unique constraint - let constraint = TableConstraint::Unique { - name: Some("uq_email".into()), - columns: vec!["email".into()], - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![ - constraint.clone(), - TableConstraint::Unique { - name: Some("uq_name".into()), - columns: vec!["name".into()], - }, - ], - }]; - - let result = - build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - if matches!(backend, DatabaseBackend::Sqlite) { - // The remaining unique constraint should be preserved - assert!(sql.contains("CREATE TABLE")); - } - - with_settings!({ snapshot_suffix => format!("remove_unique_with_other_unique_constraint_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[test] - fn test_remove_constraint_foreign_key_sqlite_table_not_found() { - // Test error when table is not found (line 236) - let constraint = TableConstraint::ForeignKey { - name: Some("fk_user".into()), - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: None, - on_update: None, - }; - let result = build_remove_constraint( - &DatabaseBackend::Sqlite, - "nonexistent_table", - &constraint, - &[], // Empty schema - ); - assert!(result.is_err()); - let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("Table 'nonexistent_table' not found in current schema")); - } - - #[rstest] - #[case::remove_foreign_key_without_name_postgres(DatabaseBackend::Postgres)] - #[case::remove_foreign_key_without_name_mysql(DatabaseBackend::MySql)] - #[case::remove_foreign_key_without_name_sqlite(DatabaseBackend::Sqlite)] - fn test_remove_constraint_foreign_key_without_name(#[case] backend: DatabaseBackend) { - // Test ForeignKey removal without name (lines 260, 263, 329) - let constraint = TableConstraint::ForeignKey { - name: None, - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: None, - on_update: None, - }; - let current_schema = vec![TableDef { - name: "posts".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "user_id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![constraint.clone()], - }]; - - let result = - build_remove_constraint(&backend, "posts", &constraint, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - // Should generate default constraint name - if !matches!(backend, DatabaseBackend::Sqlite) { - assert!(sql.contains("posts_user_id_fkey") || sql.contains("user_id")); - } - - with_settings!({ snapshot_suffix => format!("remove_foreign_key_without_name_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_foreign_key_with_index_postgres(DatabaseBackend::Postgres)] - #[case::remove_foreign_key_with_index_mysql(DatabaseBackend::MySql)] - #[case::remove_foreign_key_with_index_sqlite(DatabaseBackend::Sqlite)] - fn test_remove_constraint_foreign_key_with_index(#[case] backend: DatabaseBackend) { - // Test ForeignKey removal with indexes - let constraint = TableConstraint::ForeignKey { - name: Some("fk_user".into()), - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: None, - on_update: None, - }; - let current_schema = vec![TableDef { - name: "posts".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "user_id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![ - constraint.clone(), - TableConstraint::Index { - name: Some("idx_user_id".into()), - columns: vec!["user_id".into()], - }, - ], - }]; - - let result = - build_remove_constraint(&backend, "posts", &constraint, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - if matches!(backend, DatabaseBackend::Sqlite) { - assert!(sql.contains("CREATE INDEX")); - assert!(sql.contains("idx_user_id")); - } - - with_settings!({ snapshot_suffix => format!("remove_foreign_key_with_index_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_foreign_key_with_unique_constraint_postgres(DatabaseBackend::Postgres)] - #[case::remove_foreign_key_with_unique_constraint_mysql(DatabaseBackend::MySql)] - #[case::remove_foreign_key_with_unique_constraint_sqlite(DatabaseBackend::Sqlite)] - fn test_remove_constraint_foreign_key_with_unique_constraint(#[case] backend: DatabaseBackend) { - // Test ForeignKey removal with unique constraint - let constraint = TableConstraint::ForeignKey { - name: Some("fk_user".into()), - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: None, - on_update: None, - }; - let current_schema = vec![TableDef { - name: "posts".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "user_id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![ - constraint.clone(), - TableConstraint::Unique { - name: Some("uq_user_id".into()), - columns: vec!["user_id".into()], - }, - ], - }]; - - let result = - build_remove_constraint(&backend, "posts", &constraint, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - if matches!(backend, DatabaseBackend::Sqlite) { - // Unique constraint should be preserved in the temp table - assert!(sql.contains("CREATE TABLE")); - } - - with_settings!({ snapshot_suffix => format!("remove_foreign_key_with_unique_constraint_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[test] - fn test_remove_constraint_check_sqlite_table_not_found() { - // Test error when table is not found (line 346) - let constraint = TableConstraint::Check { - name: "chk_age".into(), - expr: "age > 0".into(), - }; - let result = build_remove_constraint( - &DatabaseBackend::Sqlite, - "nonexistent_table", - &constraint, - &[], // Empty schema - ); - assert!(result.is_err()); - let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("Table 'nonexistent_table' not found in current schema")); - } - - #[rstest] - #[case::remove_check_with_index_postgres(DatabaseBackend::Postgres)] - #[case::remove_check_with_index_mysql(DatabaseBackend::MySql)] - #[case::remove_check_with_index_sqlite(DatabaseBackend::Sqlite)] - fn test_remove_constraint_check_with_index(#[case] backend: DatabaseBackend) { - // Test Check removal with indexes - let constraint = TableConstraint::Check { - name: "chk_age".into(), - expr: "age > 0".into(), - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "age".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![ - constraint.clone(), - TableConstraint::Index { - name: Some("idx_age".into()), - columns: vec!["age".into()], - }, - ], - }]; - - let result = - build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - if matches!(backend, DatabaseBackend::Sqlite) { - assert!(sql.contains("CREATE INDEX")); - assert!(sql.contains("idx_age")); - } - - with_settings!({ snapshot_suffix => format!("remove_check_with_index_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_check_with_unique_constraint_postgres(DatabaseBackend::Postgres)] - #[case::remove_check_with_unique_constraint_mysql(DatabaseBackend::MySql)] - #[case::remove_check_with_unique_constraint_sqlite(DatabaseBackend::Sqlite)] - fn test_remove_constraint_check_with_unique_constraint(#[case] backend: DatabaseBackend) { - // Test Check removal with unique constraint - let constraint = TableConstraint::Check { - name: "chk_age".into(), - expr: "age > 0".into(), - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "age".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![ - constraint.clone(), - TableConstraint::Unique { - name: Some("uq_age".into()), - columns: vec!["age".into()], - }, - ], - }]; - - let result = - build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - if matches!(backend, DatabaseBackend::Sqlite) { - // Unique constraint should be preserved in the temp table - assert!(sql.contains("CREATE TABLE")); - } - - with_settings!({ snapshot_suffix => format!("remove_check_with_unique_constraint_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_unique_with_other_constraints_postgres(DatabaseBackend::Postgres)] - #[case::remove_unique_with_other_constraints_mysql(DatabaseBackend::MySql)] - #[case::remove_unique_with_other_constraints_sqlite(DatabaseBackend::Sqlite)] - fn test_remove_constraint_unique_with_other_constraints(#[case] backend: DatabaseBackend) { - // Test Unique removal with other constraint types (line 137) - let constraint = TableConstraint::Unique { - name: Some("uq_email".into()), - columns: vec!["email".into()], - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "email".into(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![ - TableConstraint::PrimaryKey { - columns: vec!["id".into()], - auto_increment: false, - }, - constraint.clone(), - TableConstraint::Check { - name: "chk_email".into(), - expr: "email IS NOT NULL".into(), - }, - ], - }]; - - let result = - build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - // Should still work with other constraint types present - assert!(sql.contains("DROP") || sql.contains("CREATE TABLE")); - - with_settings!({ snapshot_suffix => format!("remove_unique_with_other_constraints_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_foreign_key_with_other_constraints_postgres(DatabaseBackend::Postgres)] - #[case::remove_foreign_key_with_other_constraints_mysql(DatabaseBackend::MySql)] - #[case::remove_foreign_key_with_other_constraints_sqlite(DatabaseBackend::Sqlite)] - fn test_remove_constraint_foreign_key_with_other_constraints(#[case] backend: DatabaseBackend) { - // Test ForeignKey removal with other constraint types (line 263) - let constraint = TableConstraint::ForeignKey { - name: Some("fk_user".into()), - columns: vec!["user_id".into()], - ref_table: "users".into(), - ref_columns: vec!["id".into()], - on_delete: None, - on_update: None, - }; - let current_schema = vec![TableDef { - name: "posts".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "user_id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![ - TableConstraint::PrimaryKey { - columns: vec!["id".into()], - auto_increment: false, - }, - constraint.clone(), - TableConstraint::Unique { - name: Some("uq_user_id".into()), - columns: vec!["user_id".into()], - }, - TableConstraint::Check { - name: "chk_user_id".into(), - expr: "user_id > 0".into(), - }, - ], - }]; - - let result = - build_remove_constraint(&backend, "posts", &constraint, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - // Should still work with other constraint types present - assert!(sql.contains("DROP") || sql.contains("CREATE TABLE")); - - with_settings!({ snapshot_suffix => format!("remove_foreign_key_with_other_constraints_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_check_with_other_constraints_postgres(DatabaseBackend::Postgres)] - #[case::remove_check_with_other_constraints_mysql(DatabaseBackend::MySql)] - #[case::remove_check_with_other_constraints_sqlite(DatabaseBackend::Sqlite)] - fn test_remove_constraint_check_with_other_constraints(#[case] backend: DatabaseBackend) { - // Test Check removal with other constraint types (line 357) - let constraint = TableConstraint::Check { - name: "chk_age".into(), - expr: "age > 0".into(), - }; - let current_schema = vec![TableDef { - name: "users".into(), - columns: vec![ - ColumnDef { - name: "id".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: false, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ColumnDef { - name: "age".into(), - r#type: ColumnType::Simple(SimpleColumnType::Integer), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: None, - foreign_key: None, - }, - ], - constraints: vec![ - TableConstraint::PrimaryKey { - columns: vec!["id".into()], - auto_increment: false, - }, - TableConstraint::Unique { - name: Some("uq_age".into()), - columns: vec!["age".into()], - }, - constraint.clone(), - ], - }]; - - let result = - build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); - let sql = result - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - // Should still work with other constraint types present - assert!(sql.contains("DROP") || sql.contains("CREATE TABLE")); - - with_settings!({ snapshot_suffix => format!("remove_check_with_other_constraints_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } - - #[rstest] - #[case::remove_index_with_custom_inline_name_postgres(DatabaseBackend::Postgres)] - #[case::remove_index_with_custom_inline_name_mysql(DatabaseBackend::MySql)] - #[case::remove_index_with_custom_inline_name_sqlite(DatabaseBackend::Sqlite)] - fn test_remove_constraint_index_with_custom_inline_name(#[case] backend: DatabaseBackend) { - // Test Index removal with a custom name from inline index field - // This tests the scenario where index: "custom_idx_name" is used - let constraint = TableConstraint::Index { - name: Some("custom_idx_email".into()), - columns: vec!["email".into()], - }; - - let schema = vec![TableDef { - name: "users".to_string(), - columns: vec![ColumnDef { - name: "email".to_string(), - r#type: ColumnType::Simple(SimpleColumnType::Text), - nullable: true, - default: None, - comment: None, - primary_key: None, - unique: None, - index: Some(vespertide_core::StrOrBoolOrArray::Str( - "custom_idx_email".into(), - )), - foreign_key: None, - }], - constraints: vec![], - }]; - - let result = build_remove_constraint(&backend, "users", &constraint, &schema); - assert!(result.is_ok()); - let sql = result - .unwrap() - .iter() - .map(|q| q.build(backend)) - .collect::>() - .join("\n"); - - // Should use the custom index name - assert!(sql.contains("custom_idx_email")); - - with_settings!({ snapshot_suffix => format!("remove_index_custom_name_{:?}", backend) }, { - assert_snapshot!(sql); - }); - } -} +use sea_query::{Alias, ForeignKey, Query, Table}; + +use vespertide_core::{TableConstraint, TableDef}; + +use super::create_table::build_create_table_for_backend; +use super::rename_table::build_rename_table; +use super::types::{BuiltQuery, DatabaseBackend}; +use crate::error::QueryError; +use crate::sql::RawSql; + +pub fn build_remove_constraint( + backend: &DatabaseBackend, + table: &str, + constraint: &TableConstraint, + current_schema: &[TableDef], +) -> Result, QueryError> { + match constraint { + TableConstraint::PrimaryKey { .. } => { + if *backend == DatabaseBackend::Sqlite { + // SQLite does not support dropping primary key constraints, use temp table approach + let table_def = current_schema + .iter() + .find(|t| t.name == table) + .ok_or_else(|| QueryError::Other(format!( + "Table '{}' not found in current schema. SQLite requires current schema information to remove constraints.", + table + )))?; + + // Remove the primary key constraint + let mut new_constraints = table_def.constraints.clone(); + new_constraints.retain(|c| !matches!(c, TableConstraint::PrimaryKey { .. })); + + // Generate temporary table name + let temp_table = format!("{}_temp", table); + + // 1. Create temporary table without primary key constraint + let create_temp_table = build_create_table_for_backend( + backend, + &temp_table, + &table_def.columns, + &new_constraints, + ); + let create_query = BuiltQuery::CreateTable(Box::new(create_temp_table)); + + // 2. Copy data (all columns) + let column_aliases: Vec = table_def + .columns + .iter() + .map(|c| Alias::new(&c.name)) + .collect(); + let mut select_query = Query::select(); + for col_alias in &column_aliases { + select_query = select_query.column(col_alias.clone()).to_owned(); + } + select_query = select_query.from(Alias::new(table)).to_owned(); + + let insert_stmt = Query::insert() + .into_table(Alias::new(&temp_table)) + .columns(column_aliases.clone()) + .select_from(select_query) + .unwrap() + .to_owned(); + let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); + + // 3. Drop original table + let drop_table = Table::drop().table(Alias::new(table)).to_owned(); + let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); + + // 4. Rename temporary table to original name + let rename_query = build_rename_table(&temp_table, table); + + // 5. Recreate indexes from Index constraints + let mut index_queries = Vec::new(); + for constraint in &table_def.constraints { + if let TableConstraint::Index { + name: idx_name, + columns: idx_cols, + } = constraint + { + let index_name = vespertide_naming::build_index_name( + table, + idx_cols, + idx_name.as_deref(), + ); + let mut idx_stmt = sea_query::Index::create(); + idx_stmt = idx_stmt.name(&index_name).to_owned(); + for col_name in idx_cols { + idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); + } + idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); + index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); + } + } + + let mut queries = vec![create_query, insert_query, drop_query, rename_query]; + queries.extend(index_queries); + Ok(queries) + } else { + // Other backends: use raw SQL + let pg_sql = format!( + "ALTER TABLE \"{}\" DROP CONSTRAINT \"{}_pkey\"", + table, table + ); + let mysql_sql = format!("ALTER TABLE `{}` DROP PRIMARY KEY", table); + Ok(vec![BuiltQuery::Raw(RawSql::per_backend( + pg_sql.clone(), + mysql_sql, + pg_sql, + ))]) + } + } + TableConstraint::Unique { name, columns } => { + // SQLite does not support ALTER TABLE ... DROP CONSTRAINT UNIQUE + if *backend == DatabaseBackend::Sqlite { + // Use temporary table approach for SQLite + let table_def = current_schema + .iter() + .find(|t| t.name == table) + .ok_or_else(|| QueryError::Other(format!( + "Table '{}' not found in current schema. SQLite requires current schema information to remove constraints.", + table + )))?; + + // Create new constraints without the removed unique constraint + let mut new_constraints = table_def.constraints.clone(); + new_constraints.retain(|c| { + match (c, constraint) { + ( + TableConstraint::Unique { + name: c_name, + columns: c_cols, + }, + TableConstraint::Unique { + name: r_name, + columns: r_cols, + }, + ) => { + // Remove if names match, or if no name and columns match + if let (Some(cn), Some(rn)) = (c_name, r_name) { + cn != rn + } else { + c_cols != r_cols + } + } + _ => true, + } + }); + + // Generate temporary table name + let temp_table = format!("{}_temp", table); + + // 1. Create temporary table without the removed constraint + let create_temp_table = build_create_table_for_backend( + backend, + &temp_table, + &table_def.columns, + &new_constraints, + ); + let create_query = BuiltQuery::CreateTable(Box::new(create_temp_table)); + + // 2. Copy data (all columns) + let column_aliases: Vec = table_def + .columns + .iter() + .map(|c| Alias::new(&c.name)) + .collect(); + let mut select_query = Query::select(); + for col_alias in &column_aliases { + select_query = select_query.column(col_alias.clone()).to_owned(); + } + select_query = select_query.from(Alias::new(table)).to_owned(); + + let insert_stmt = Query::insert() + .into_table(Alias::new(&temp_table)) + .columns(column_aliases.clone()) + .select_from(select_query) + .unwrap() + .to_owned(); + let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); + + // 3. Drop original table + let drop_table = Table::drop().table(Alias::new(table)).to_owned(); + let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); + + // 4. Rename temporary table to original name + let rename_query = build_rename_table(&temp_table, table); + + // 5. Recreate indexes from Index constraints + let mut index_queries = Vec::new(); + for c in &table_def.constraints { + if let TableConstraint::Index { + name: idx_name, + columns: idx_cols, + } = c + { + let index_name = vespertide_naming::build_index_name( + table, + idx_cols, + idx_name.as_deref(), + ); + let mut idx_stmt = sea_query::Index::create(); + idx_stmt = idx_stmt.name(&index_name).to_owned(); + for col_name in idx_cols { + idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); + } + idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); + index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); + } + } + + let mut queries = vec![create_query, insert_query, drop_query, rename_query]; + queries.extend(index_queries); + Ok(queries) + } else { + // For unique constraints, PostgreSQL uses DROP CONSTRAINT, MySQL uses DROP INDEX + // sea_query 0.32 doesn't support dropping unique constraint via Table::alter() directly + // We'll use Index::drop() which generates DROP INDEX for both backends + // However, PostgreSQL expects DROP CONSTRAINT, so we need to use Table::alter() + // Since drop_constraint() doesn't exist, we'll use Index::drop() for now + // Note: This may not match PostgreSQL's DROP CONSTRAINT syntax + let constraint_name = vespertide_naming::build_unique_constraint_name( + table, + columns, + name.as_deref(), + ); + // Try using Table::alter() with drop_constraint if available + // If not, use Index::drop() as fallback + // For PostgreSQL, we need DROP CONSTRAINT, but sea_query doesn't support this + // We'll use raw SQL for PostgreSQL and Index::drop() for MySQL + let pg_sql = format!( + "ALTER TABLE \"{}\" DROP CONSTRAINT \"{}\"", + table, constraint_name + ); + let mysql_sql = format!("ALTER TABLE `{}` DROP INDEX `{}`", table, constraint_name); + Ok(vec![BuiltQuery::Raw(RawSql::per_backend( + pg_sql.clone(), + mysql_sql, + pg_sql, + ))]) + } + } + TableConstraint::ForeignKey { name, columns, .. } => { + // SQLite does not support ALTER TABLE ... DROP CONSTRAINT FOREIGN KEY + if *backend == DatabaseBackend::Sqlite { + // Use temporary table approach for SQLite + let table_def = current_schema + .iter() + .find(|t| t.name == table) + .ok_or_else(|| QueryError::Other(format!( + "Table '{}' not found in current schema. SQLite requires current schema information to remove constraints.", + table + )))?; + + // Create new constraints without the removed foreign key constraint + let mut new_constraints = table_def.constraints.clone(); + new_constraints.retain(|c| { + match (c, constraint) { + ( + TableConstraint::ForeignKey { + name: c_name, + columns: c_cols, + .. + }, + TableConstraint::ForeignKey { + name: r_name, + columns: r_cols, + .. + }, + ) => { + // Remove if names match, or if no name and columns match + if let (Some(cn), Some(rn)) = (c_name, r_name) { + cn != rn + } else { + c_cols != r_cols + } + } + _ => true, + } + }); + + // Generate temporary table name + let temp_table = format!("{}_temp", table); + + // 1. Create temporary table without the removed constraint + let create_temp_table = build_create_table_for_backend( + backend, + &temp_table, + &table_def.columns, + &new_constraints, + ); + let create_query = BuiltQuery::CreateTable(Box::new(create_temp_table)); + + // 2. Copy data (all columns) + let column_aliases: Vec = table_def + .columns + .iter() + .map(|c| Alias::new(&c.name)) + .collect(); + let mut select_query = Query::select(); + for col_alias in &column_aliases { + select_query = select_query.column(col_alias.clone()).to_owned(); + } + select_query = select_query.from(Alias::new(table)).to_owned(); + + let insert_stmt = Query::insert() + .into_table(Alias::new(&temp_table)) + .columns(column_aliases.clone()) + .select_from(select_query) + .unwrap() + .to_owned(); + let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); + + // 3. Drop original table + let drop_table = Table::drop().table(Alias::new(table)).to_owned(); + let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); + + // 4. Rename temporary table to original name + let rename_query = build_rename_table(&temp_table, table); + + // 5. Recreate indexes from Index constraints + let mut index_queries = Vec::new(); + for c in &table_def.constraints { + if let TableConstraint::Index { + name: idx_name, + columns: idx_cols, + } = c + { + let index_name = vespertide_naming::build_index_name( + table, + idx_cols, + idx_name.as_deref(), + ); + let mut idx_stmt = sea_query::Index::create(); + idx_stmt = idx_stmt.name(&index_name).to_owned(); + for col_name in idx_cols { + idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); + } + idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); + index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); + } + } + + let mut queries = vec![create_query, insert_query, drop_query, rename_query]; + queries.extend(index_queries); + Ok(queries) + } else { + // Build foreign key drop using ForeignKey::drop() + let constraint_name = + vespertide_naming::build_foreign_key_name(table, columns, name.as_deref()); + let fk_drop = ForeignKey::drop() + .name(&constraint_name) + .table(Alias::new(table)) + .to_owned(); + Ok(vec![BuiltQuery::DropForeignKey(Box::new(fk_drop))]) + } + } + TableConstraint::Index { name, columns } => { + // Index constraints are simple DROP INDEX statements for all backends + let index_name = if let Some(n) = name { + // Use naming convention for named indexes + vespertide_naming::build_index_name(table, columns, Some(n)) + } else { + // Generate name from table and columns for unnamed indexes + vespertide_naming::build_index_name(table, columns, None) + }; + let idx_drop = sea_query::Index::drop() + .table(Alias::new(table)) + .name(&index_name) + .to_owned(); + Ok(vec![BuiltQuery::DropIndex(Box::new(idx_drop))]) + } + TableConstraint::Check { name, .. } => { + // SQLite does not support ALTER TABLE ... DROP CONSTRAINT CHECK + if *backend == DatabaseBackend::Sqlite { + // Use temporary table approach for SQLite + let table_def = current_schema + .iter() + .find(|t| t.name == table) + .ok_or_else(|| QueryError::Other(format!( + "Table '{}' not found in current schema. SQLite requires current schema information to remove constraints.", + table + )))?; + + // Create new constraints without the removed check constraint + let mut new_constraints = table_def.constraints.clone(); + new_constraints.retain(|c| match (c, constraint) { + ( + TableConstraint::Check { name: c_name, .. }, + TableConstraint::Check { name: r_name, .. }, + ) => c_name != r_name, + _ => true, + }); + + // Generate temporary table name + let temp_table = format!("{}_temp", table); + + // 1. Create temporary table without the removed constraint + let create_temp_table = build_create_table_for_backend( + backend, + &temp_table, + &table_def.columns, + &new_constraints, + ); + let create_query = BuiltQuery::CreateTable(Box::new(create_temp_table)); + + // 2. Copy data (all columns) + let column_aliases: Vec = table_def + .columns + .iter() + .map(|c| Alias::new(&c.name)) + .collect(); + let mut select_query = Query::select(); + for col_alias in &column_aliases { + select_query = select_query.column(col_alias.clone()).to_owned(); + } + select_query = select_query.from(Alias::new(table)).to_owned(); + + let insert_stmt = Query::insert() + .into_table(Alias::new(&temp_table)) + .columns(column_aliases.clone()) + .select_from(select_query) + .unwrap() + .to_owned(); + let insert_query = BuiltQuery::Insert(Box::new(insert_stmt)); + + // 3. Drop original table + let drop_table = Table::drop().table(Alias::new(table)).to_owned(); + let drop_query = BuiltQuery::DropTable(Box::new(drop_table)); + + // 4. Rename temporary table to original name + let rename_query = build_rename_table(&temp_table, table); + + // 5. Recreate indexes from Index constraints + let mut index_queries = Vec::new(); + for c in &table_def.constraints { + if let TableConstraint::Index { + name: idx_name, + columns: idx_cols, + } = c + { + let index_name = vespertide_naming::build_index_name( + table, + idx_cols, + idx_name.as_deref(), + ); + let mut idx_stmt = sea_query::Index::create(); + idx_stmt = idx_stmt.name(&index_name).to_owned(); + for col_name in idx_cols { + idx_stmt = idx_stmt.col(Alias::new(col_name)).to_owned(); + } + idx_stmt = idx_stmt.table(Alias::new(table)).to_owned(); + index_queries.push(BuiltQuery::CreateIndex(Box::new(idx_stmt))); + } + } + + let mut queries = vec![create_query, insert_query, drop_query, rename_query]; + queries.extend(index_queries); + Ok(queries) + } else { + let pg_sql = format!("ALTER TABLE \"{}\" DROP CONSTRAINT \"{}\"", table, name); + let mysql_sql = format!("ALTER TABLE `{}` DROP CHECK `{}`", table, name); + Ok(vec![BuiltQuery::Raw(RawSql::per_backend( + pg_sql.clone(), + mysql_sql, + pg_sql, + ))]) + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::sql::types::DatabaseBackend; + use insta::{assert_snapshot, with_settings}; + use rstest::rstest; + use vespertide_core::{ColumnDef, ColumnType, SimpleColumnType, TableConstraint, TableDef}; + + #[rstest] + #[case::remove_constraint_primary_key_postgres( + "remove_constraint_primary_key_postgres", + DatabaseBackend::Postgres, + &["DROP CONSTRAINT \"users_pkey\""] + )] + #[case::remove_constraint_primary_key_mysql( + "remove_constraint_primary_key_mysql", + DatabaseBackend::MySql, + &["DROP PRIMARY KEY"] + )] + #[case::remove_constraint_primary_key_sqlite( + "remove_constraint_primary_key_sqlite", + DatabaseBackend::Sqlite, + &["CREATE TABLE \"users_temp\""] + )] + #[case::remove_constraint_unique_named_postgres( + "remove_constraint_unique_named_postgres", + DatabaseBackend::Postgres, + &["DROP CONSTRAINT \"uq_users__uq_email\""] + )] + #[case::remove_constraint_unique_named_mysql( + "remove_constraint_unique_named_mysql", + DatabaseBackend::MySql, + &["DROP INDEX `uq_users__uq_email`"] + )] + #[case::remove_constraint_unique_named_sqlite( + "remove_constraint_unique_named_sqlite", + DatabaseBackend::Sqlite, + &["CREATE TABLE \"users_temp\""] + )] + #[case::remove_constraint_foreign_key_named_postgres( + "remove_constraint_foreign_key_named_postgres", + DatabaseBackend::Postgres, + &["DROP CONSTRAINT \"fk_users__fk_user\""] + )] + #[case::remove_constraint_foreign_key_named_mysql( + "remove_constraint_foreign_key_named_mysql", + DatabaseBackend::MySql, + &["DROP FOREIGN KEY `fk_users__fk_user`"] + )] + #[case::remove_constraint_foreign_key_named_sqlite( + "remove_constraint_foreign_key_named_sqlite", + DatabaseBackend::Sqlite, + &["CREATE TABLE \"users_temp\""] + )] + #[case::remove_constraint_check_named_postgres( + "remove_constraint_check_named_postgres", + DatabaseBackend::Postgres, + &["DROP CONSTRAINT \"chk_age\""] + )] + #[case::remove_constraint_check_named_mysql( + "remove_constraint_check_named_mysql", + DatabaseBackend::MySql, + &["DROP CHECK `chk_age`"] + )] + #[case::remove_constraint_check_named_sqlite( + "remove_constraint_check_named_sqlite", + DatabaseBackend::Sqlite, + &["CREATE TABLE \"users_temp\""] + )] + fn test_remove_constraint( + #[case] title: &str, + #[case] backend: DatabaseBackend, + #[case] expected: &[&str], + ) { + let constraint = if title.contains("primary_key") { + TableConstraint::PrimaryKey { + columns: vec!["id".into()], + auto_increment: false, + } + } else if title.contains("unique") { + TableConstraint::Unique { + name: Some("uq_email".into()), + columns: vec!["email".into()], + } + } else if title.contains("foreign_key") { + TableConstraint::ForeignKey { + name: Some("fk_user".into()), + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: None, + on_update: None, + } + } else { + TableConstraint::Check { + name: "chk_age".into(), + expr: "age > 0".into(), + } + }; + + // For SQLite, we need to provide current schema with the constraint to be removed + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: if title.contains("check") { + vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "age".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ] + } else if title.contains("foreign_key") { + vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "user_id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ] + } else { + // primary key / unique cases + vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }] + }, + constraints: vec![constraint.clone()], + }]; + + let result = + build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); + let sql = result[0].build(backend); + for exp in expected { + assert!( + sql.contains(exp), + "Expected SQL to contain '{}', got: {}", + exp, + sql + ); + } + + with_settings!({ snapshot_suffix => format!("remove_constraint_{}", title) }, { + assert_snapshot!(result.iter().map(|q| q.build(backend)).collect::>().join("\n")); + }); + } + + #[test] + fn test_remove_constraint_primary_key_sqlite_table_not_found() { + // Test error when table is not found (line 25) + let constraint = TableConstraint::PrimaryKey { + columns: vec!["id".into()], + auto_increment: false, + }; + let result = build_remove_constraint( + &DatabaseBackend::Sqlite, + "nonexistent_table", + &constraint, + &[], // Empty schema + ); + assert!(result.is_err()); + let err_msg = result.unwrap_err().to_string(); + assert!(err_msg.contains("Table 'nonexistent_table' not found in current schema")); + } + + #[rstest] + #[case::remove_primary_key_with_index_postgres(DatabaseBackend::Postgres)] + #[case::remove_primary_key_with_index_mysql(DatabaseBackend::MySql)] + #[case::remove_primary_key_with_index_sqlite(DatabaseBackend::Sqlite)] + fn test_remove_constraint_primary_key_with_index(#[case] backend: DatabaseBackend) { + // Test PrimaryKey removal with indexes + let constraint = TableConstraint::PrimaryKey { + columns: vec!["id".into()], + auto_increment: false, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![ + constraint.clone(), + TableConstraint::Index { + name: Some("idx_id".into()), + columns: vec!["id".into()], + }, + ], + }]; + + let result = + build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + if matches!(backend, DatabaseBackend::Sqlite) { + assert!(sql.contains("CREATE INDEX")); + assert!(sql.contains("ix_users__idx_id")); + } + + with_settings!({ snapshot_suffix => format!("remove_primary_key_with_index_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_primary_key_with_unique_constraint_postgres(DatabaseBackend::Postgres)] + #[case::remove_primary_key_with_unique_constraint_mysql(DatabaseBackend::MySql)] + #[case::remove_primary_key_with_unique_constraint_sqlite(DatabaseBackend::Sqlite)] + fn test_remove_constraint_primary_key_with_unique_constraint(#[case] backend: DatabaseBackend) { + // Test PrimaryKey removal with unique constraint + let constraint = TableConstraint::PrimaryKey { + columns: vec!["id".into()], + auto_increment: false, + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }], + constraints: vec![ + constraint.clone(), + TableConstraint::Unique { + name: Some("uq_email".into()), + columns: vec!["email".into()], + }, + ], + }]; + + let result = + build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + if matches!(backend, DatabaseBackend::Sqlite) { + // Unique constraint should be in the temp table definition + assert!(sql.contains("CREATE TABLE")); + } + + with_settings!({ snapshot_suffix => format!("remove_primary_key_with_unique_constraint_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[test] + fn test_remove_constraint_unique_sqlite_table_not_found() { + // Test error when table is not found (line 112) + let constraint = TableConstraint::Unique { + name: Some("uq_email".into()), + columns: vec!["email".into()], + }; + let result = build_remove_constraint( + &DatabaseBackend::Sqlite, + "nonexistent_table", + &constraint, + &[], // Empty schema + ); + assert!(result.is_err()); + let err_msg = result.unwrap_err().to_string(); + assert!(err_msg.contains("Table 'nonexistent_table' not found in current schema")); + } + + #[rstest] + #[case::remove_unique_without_name_postgres(DatabaseBackend::Postgres)] + #[case::remove_unique_without_name_mysql(DatabaseBackend::MySql)] + #[case::remove_unique_without_name_sqlite(DatabaseBackend::Sqlite)] + fn test_remove_constraint_unique_without_name(#[case] backend: DatabaseBackend) { + // Test Unique removal without name (lines 134, 137, 210) + let constraint = TableConstraint::Unique { + name: None, + columns: vec!["email".into()], + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![constraint.clone()], + }]; + + let result = + build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + // Should generate default constraint name + if !matches!(backend, DatabaseBackend::Sqlite) { + assert!(sql.contains("users_email_key") || sql.contains("email")); + } + + with_settings!({ snapshot_suffix => format!("remove_unique_without_name_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_unique_with_index_postgres(DatabaseBackend::Postgres)] + #[case::remove_unique_with_index_mysql(DatabaseBackend::MySql)] + #[case::remove_unique_with_index_sqlite(DatabaseBackend::Sqlite)] + fn test_remove_constraint_unique_with_index(#[case] backend: DatabaseBackend) { + // Test Unique removal with indexes + let constraint = TableConstraint::Unique { + name: Some("uq_email".into()), + columns: vec!["email".into()], + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![ + constraint.clone(), + TableConstraint::Index { + name: Some("idx_id".into()), + columns: vec!["id".into()], + }, + ], + }]; + + let result = + build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + if matches!(backend, DatabaseBackend::Sqlite) { + assert!(sql.contains("CREATE INDEX")); + assert!(sql.contains("ix_users__idx_id")); + } + + with_settings!({ snapshot_suffix => format!("remove_unique_with_index_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_unique_with_other_unique_constraint_postgres(DatabaseBackend::Postgres)] + #[case::remove_unique_with_other_unique_constraint_mysql(DatabaseBackend::MySql)] + #[case::remove_unique_with_other_unique_constraint_sqlite(DatabaseBackend::Sqlite)] + fn test_remove_constraint_unique_with_other_unique_constraint( + #[case] backend: DatabaseBackend, + ) { + // Test Unique removal with another unique constraint + let constraint = TableConstraint::Unique { + name: Some("uq_email".into()), + columns: vec!["email".into()], + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![ + constraint.clone(), + TableConstraint::Unique { + name: Some("uq_name".into()), + columns: vec!["name".into()], + }, + ], + }]; + + let result = + build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + if matches!(backend, DatabaseBackend::Sqlite) { + // The remaining unique constraint should be preserved + assert!(sql.contains("CREATE TABLE")); + } + + with_settings!({ snapshot_suffix => format!("remove_unique_with_other_unique_constraint_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[test] + fn test_remove_constraint_foreign_key_sqlite_table_not_found() { + // Test error when table is not found (line 236) + let constraint = TableConstraint::ForeignKey { + name: Some("fk_user".into()), + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: None, + on_update: None, + }; + let result = build_remove_constraint( + &DatabaseBackend::Sqlite, + "nonexistent_table", + &constraint, + &[], // Empty schema + ); + assert!(result.is_err()); + let err_msg = result.unwrap_err().to_string(); + assert!(err_msg.contains("Table 'nonexistent_table' not found in current schema")); + } + + #[rstest] + #[case::remove_foreign_key_without_name_postgres(DatabaseBackend::Postgres)] + #[case::remove_foreign_key_without_name_mysql(DatabaseBackend::MySql)] + #[case::remove_foreign_key_without_name_sqlite(DatabaseBackend::Sqlite)] + fn test_remove_constraint_foreign_key_without_name(#[case] backend: DatabaseBackend) { + // Test ForeignKey removal without name (lines 260, 263, 329) + let constraint = TableConstraint::ForeignKey { + name: None, + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: None, + on_update: None, + }; + let current_schema = vec![TableDef { + name: "posts".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "user_id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![constraint.clone()], + }]; + + let result = + build_remove_constraint(&backend, "posts", &constraint, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + // Should generate default constraint name + if !matches!(backend, DatabaseBackend::Sqlite) { + assert!(sql.contains("posts_user_id_fkey") || sql.contains("user_id")); + } + + with_settings!({ snapshot_suffix => format!("remove_foreign_key_without_name_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_foreign_key_with_index_postgres(DatabaseBackend::Postgres)] + #[case::remove_foreign_key_with_index_mysql(DatabaseBackend::MySql)] + #[case::remove_foreign_key_with_index_sqlite(DatabaseBackend::Sqlite)] + fn test_remove_constraint_foreign_key_with_index(#[case] backend: DatabaseBackend) { + // Test ForeignKey removal with indexes + let constraint = TableConstraint::ForeignKey { + name: Some("fk_user".into()), + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: None, + on_update: None, + }; + let current_schema = vec![TableDef { + name: "posts".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "user_id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![ + constraint.clone(), + TableConstraint::Index { + name: Some("idx_user_id".into()), + columns: vec!["user_id".into()], + }, + ], + }]; + + let result = + build_remove_constraint(&backend, "posts", &constraint, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + if matches!(backend, DatabaseBackend::Sqlite) { + assert!(sql.contains("CREATE INDEX")); + assert!(sql.contains("idx_user_id")); + } + + with_settings!({ snapshot_suffix => format!("remove_foreign_key_with_index_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_foreign_key_with_unique_constraint_postgres(DatabaseBackend::Postgres)] + #[case::remove_foreign_key_with_unique_constraint_mysql(DatabaseBackend::MySql)] + #[case::remove_foreign_key_with_unique_constraint_sqlite(DatabaseBackend::Sqlite)] + fn test_remove_constraint_foreign_key_with_unique_constraint(#[case] backend: DatabaseBackend) { + // Test ForeignKey removal with unique constraint + let constraint = TableConstraint::ForeignKey { + name: Some("fk_user".into()), + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: None, + on_update: None, + }; + let current_schema = vec![TableDef { + name: "posts".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "user_id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![ + constraint.clone(), + TableConstraint::Unique { + name: Some("uq_user_id".into()), + columns: vec!["user_id".into()], + }, + ], + }]; + + let result = + build_remove_constraint(&backend, "posts", &constraint, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + if matches!(backend, DatabaseBackend::Sqlite) { + // Unique constraint should be preserved in the temp table + assert!(sql.contains("CREATE TABLE")); + } + + with_settings!({ snapshot_suffix => format!("remove_foreign_key_with_unique_constraint_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[test] + fn test_remove_constraint_check_sqlite_table_not_found() { + // Test error when table is not found (line 346) + let constraint = TableConstraint::Check { + name: "chk_age".into(), + expr: "age > 0".into(), + }; + let result = build_remove_constraint( + &DatabaseBackend::Sqlite, + "nonexistent_table", + &constraint, + &[], // Empty schema + ); + assert!(result.is_err()); + let err_msg = result.unwrap_err().to_string(); + assert!(err_msg.contains("Table 'nonexistent_table' not found in current schema")); + } + + #[rstest] + #[case::remove_check_with_index_postgres(DatabaseBackend::Postgres)] + #[case::remove_check_with_index_mysql(DatabaseBackend::MySql)] + #[case::remove_check_with_index_sqlite(DatabaseBackend::Sqlite)] + fn test_remove_constraint_check_with_index(#[case] backend: DatabaseBackend) { + // Test Check removal with indexes + let constraint = TableConstraint::Check { + name: "chk_age".into(), + expr: "age > 0".into(), + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "age".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![ + constraint.clone(), + TableConstraint::Index { + name: Some("idx_age".into()), + columns: vec!["age".into()], + }, + ], + }]; + + let result = + build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + if matches!(backend, DatabaseBackend::Sqlite) { + assert!(sql.contains("CREATE INDEX")); + assert!(sql.contains("idx_age")); + } + + with_settings!({ snapshot_suffix => format!("remove_check_with_index_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_check_with_unique_constraint_postgres(DatabaseBackend::Postgres)] + #[case::remove_check_with_unique_constraint_mysql(DatabaseBackend::MySql)] + #[case::remove_check_with_unique_constraint_sqlite(DatabaseBackend::Sqlite)] + fn test_remove_constraint_check_with_unique_constraint(#[case] backend: DatabaseBackend) { + // Test Check removal with unique constraint + let constraint = TableConstraint::Check { + name: "chk_age".into(), + expr: "age > 0".into(), + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "age".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![ + constraint.clone(), + TableConstraint::Unique { + name: Some("uq_age".into()), + columns: vec!["age".into()], + }, + ], + }]; + + let result = + build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + if matches!(backend, DatabaseBackend::Sqlite) { + // Unique constraint should be preserved in the temp table + assert!(sql.contains("CREATE TABLE")); + } + + with_settings!({ snapshot_suffix => format!("remove_check_with_unique_constraint_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_unique_with_other_constraints_postgres(DatabaseBackend::Postgres)] + #[case::remove_unique_with_other_constraints_mysql(DatabaseBackend::MySql)] + #[case::remove_unique_with_other_constraints_sqlite(DatabaseBackend::Sqlite)] + fn test_remove_constraint_unique_with_other_constraints(#[case] backend: DatabaseBackend) { + // Test Unique removal with other constraint types (line 137) + let constraint = TableConstraint::Unique { + name: Some("uq_email".into()), + columns: vec!["email".into()], + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "email".into(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![ + TableConstraint::PrimaryKey { + columns: vec!["id".into()], + auto_increment: false, + }, + constraint.clone(), + TableConstraint::Check { + name: "chk_email".into(), + expr: "email IS NOT NULL".into(), + }, + ], + }]; + + let result = + build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + // Should still work with other constraint types present + assert!(sql.contains("DROP") || sql.contains("CREATE TABLE")); + + with_settings!({ snapshot_suffix => format!("remove_unique_with_other_constraints_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_foreign_key_with_other_constraints_postgres(DatabaseBackend::Postgres)] + #[case::remove_foreign_key_with_other_constraints_mysql(DatabaseBackend::MySql)] + #[case::remove_foreign_key_with_other_constraints_sqlite(DatabaseBackend::Sqlite)] + fn test_remove_constraint_foreign_key_with_other_constraints(#[case] backend: DatabaseBackend) { + // Test ForeignKey removal with other constraint types (line 263) + let constraint = TableConstraint::ForeignKey { + name: Some("fk_user".into()), + columns: vec!["user_id".into()], + ref_table: "users".into(), + ref_columns: vec!["id".into()], + on_delete: None, + on_update: None, + }; + let current_schema = vec![TableDef { + name: "posts".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "user_id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![ + TableConstraint::PrimaryKey { + columns: vec!["id".into()], + auto_increment: false, + }, + constraint.clone(), + TableConstraint::Unique { + name: Some("uq_user_id".into()), + columns: vec!["user_id".into()], + }, + TableConstraint::Check { + name: "chk_user_id".into(), + expr: "user_id > 0".into(), + }, + ], + }]; + + let result = + build_remove_constraint(&backend, "posts", &constraint, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + // Should still work with other constraint types present + assert!(sql.contains("DROP") || sql.contains("CREATE TABLE")); + + with_settings!({ snapshot_suffix => format!("remove_foreign_key_with_other_constraints_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_check_with_other_constraints_postgres(DatabaseBackend::Postgres)] + #[case::remove_check_with_other_constraints_mysql(DatabaseBackend::MySql)] + #[case::remove_check_with_other_constraints_sqlite(DatabaseBackend::Sqlite)] + fn test_remove_constraint_check_with_other_constraints(#[case] backend: DatabaseBackend) { + // Test Check removal with other constraint types (line 357) + let constraint = TableConstraint::Check { + name: "chk_age".into(), + expr: "age > 0".into(), + }; + let current_schema = vec![TableDef { + name: "users".into(), + description: None, + columns: vec![ + ColumnDef { + name: "id".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: false, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ColumnDef { + name: "age".into(), + r#type: ColumnType::Simple(SimpleColumnType::Integer), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: None, + foreign_key: None, + }, + ], + constraints: vec![ + TableConstraint::PrimaryKey { + columns: vec!["id".into()], + auto_increment: false, + }, + TableConstraint::Unique { + name: Some("uq_age".into()), + columns: vec!["age".into()], + }, + constraint.clone(), + ], + }]; + + let result = + build_remove_constraint(&backend, "users", &constraint, ¤t_schema).unwrap(); + let sql = result + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + // Should still work with other constraint types present + assert!(sql.contains("DROP") || sql.contains("CREATE TABLE")); + + with_settings!({ snapshot_suffix => format!("remove_check_with_other_constraints_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } + + #[rstest] + #[case::remove_index_with_custom_inline_name_postgres(DatabaseBackend::Postgres)] + #[case::remove_index_with_custom_inline_name_mysql(DatabaseBackend::MySql)] + #[case::remove_index_with_custom_inline_name_sqlite(DatabaseBackend::Sqlite)] + fn test_remove_constraint_index_with_custom_inline_name(#[case] backend: DatabaseBackend) { + // Test Index removal with a custom name from inline index field + // This tests the scenario where index: "custom_idx_name" is used + let constraint = TableConstraint::Index { + name: Some("custom_idx_email".into()), + columns: vec!["email".into()], + }; + + let schema = vec![TableDef { + name: "users".to_string(), + description: None, + columns: vec![ColumnDef { + name: "email".to_string(), + r#type: ColumnType::Simple(SimpleColumnType::Text), + nullable: true, + default: None, + comment: None, + primary_key: None, + unique: None, + index: Some(vespertide_core::StrOrBoolOrArray::Str( + "custom_idx_email".into(), + )), + foreign_key: None, + }], + constraints: vec![], + }]; + + let result = build_remove_constraint(&backend, "users", &constraint, &schema); + assert!(result.is_ok()); + let sql = result + .unwrap() + .iter() + .map(|q| q.build(backend)) + .collect::>() + .join("\n"); + + // Should use the custom index name + assert!(sql.contains("custom_idx_email")); + + with_settings!({ snapshot_suffix => format!("remove_index_custom_name_{:?}", backend) }, { + assert_snapshot!(sql); + }); + } +} diff --git a/crates/vespertide-query/tests/enum_migration_test.rs b/crates/vespertide-query/tests/enum_migration_test.rs index 407ca7d..d83ef8c 100644 --- a/crates/vespertide-query/tests/enum_migration_test.rs +++ b/crates/vespertide-query/tests/enum_migration_test.rs @@ -29,6 +29,7 @@ fn test_enum_value_change_generates_correct_sql() { // Baseline schema after migration 0002 (with 2-value enum) let baseline_schema = vec![TableDef { name: "user".into(), + description: None, columns: vec![ ColumnDef { name: "id".into(), diff --git a/schemas/model.schema.json b/schemas/model.schema.json index 6dafb7a..bf4a0a6 100644 --- a/schemas/model.schema.json +++ b/schemas/model.schema.json @@ -15,6 +15,12 @@ "$ref": "#/$defs/TableConstraint" } }, + "description": { + "type": [ + "string", + "null" + ] + }, "name": { "type": "string" }