diff --git a/Cargo.toml b/Cargo.toml index f1bd272e4..8deaee8d4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,6 +2,7 @@ resolver = "2" members = [ "crates/toasty", + "crates/toasty-cli", "crates/toasty-codegen", "crates/toasty-core", "crates/toasty-macros", @@ -23,6 +24,7 @@ members = [ # Examples "examples/composite-key", "examples/hello-toasty", + "examples/todo-with-cli", "examples/user-has-one-profile", # Tests @@ -35,6 +37,7 @@ members = [ [workspace.dependencies] # Toasty crates toasty = { path = "crates/toasty" } +toasty-cli = { path = "crates/toasty-cli" } toasty-codegen = { path = "crates/toasty-codegen" } toasty-core = { path = "crates/toasty-core" } toasty-macros = { path = "crates/toasty-macros" } @@ -62,7 +65,9 @@ bit-set = "0.8.0" by_address = "1.2.1" cfg-if = "1.0.0" clap = { version = "4.5.20", features = ["derive"] } +console = "0.16" deadpool = { version = "0.12.3", features = ["rt_tokio_1"] } +dialoguer = "0.12" heck = "0.5.0" indexmap = "2.6.0" index_vec = "0.1.4" @@ -85,6 +90,8 @@ rusqlite = { version = "0.32", features = ["bundled"] } serde = { version = "1.0.214", features = ["derive"] } serde_json = "1.0.132" syn = { version = "2.0.86", features = ["full", "extra-traits", "visit-mut"] } +toml = "0.9.11" +toml_edit = "0.24.0" tempfile = "3.8" tokio = { version = "1.18", features = ["full"] } tokio-postgres = "0.7.13" diff --git a/crates/toasty-cli/Cargo.toml b/crates/toasty-cli/Cargo.toml new file mode 100644 index 000000000..cba771cb7 --- /dev/null +++ b/crates/toasty-cli/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "toasty-cli" +version = "0.1.0" +edition = "2024" +publish = false + +[dependencies] +toasty.workspace = true +toasty-core = { workspace = true, features = ["serde"] } +toasty-sql.workspace = true + +anyhow = "1.0.100" +clap.workspace = true +console.workspace = true +dialoguer.workspace = true +rand.workspace = true +serde.workspace = true +toml.workspace = true +toml_edit = { workspace = true, features = ["serde"] } diff --git a/crates/toasty-cli/src/config.rs b/crates/toasty-cli/src/config.rs new file mode 100644 index 000000000..d80549753 --- /dev/null +++ b/crates/toasty-cli/src/config.rs @@ -0,0 +1,33 @@ +use crate::migration::MigrationConfig; +use anyhow::Result; +use serde::{Deserialize, Serialize}; +use std::fs; +use std::path::Path; + +/// Configuration for Toasty CLI operations +#[derive(Debug, Default, Clone, Serialize, Deserialize)] +pub struct Config { + /// Migration-related configuration + pub migration: MigrationConfig, +} + +impl Config { + /// Create a new Config with default values + pub fn new() -> Self { + Self::default() + } + + /// Load configuration from Toasty.toml in the project root + pub fn load() -> Result { + let path = Path::new("Toasty.toml"); + let contents = fs::read_to_string(path)?; + let config: Config = toml::from_str(&contents)?; + Ok(config) + } + + /// Set the migration configuration + pub fn migration(mut self, migration: MigrationConfig) -> Self { + self.migration = migration; + self + } +} diff --git a/crates/toasty-cli/src/lib.rs b/crates/toasty-cli/src/lib.rs new file mode 100644 index 000000000..b3f7ec2cb --- /dev/null +++ b/crates/toasty-cli/src/lib.rs @@ -0,0 +1,73 @@ +mod config; +mod migration; +mod theme; + +pub use config::*; +pub use migration::*; + +use anyhow::Result; +use clap::Parser; +use toasty::Db; + +/// Toasty CLI library for building custom command-line tools +pub struct ToastyCli { + db: Db, + config: Config, +} + +impl ToastyCli { + /// Create a new ToastyCli instance with the given database connection + pub fn new(db: Db) -> Self { + Self { + db, + config: Config::default(), + } + } + + /// Create a new ToastyCli instance with a custom configuration + pub fn with_config(db: Db, config: Config) -> Self { + Self { db, config } + } + + /// Get a reference to the configuration + pub fn config(&self) -> &Config { + &self.config + } + + /// Parse and execute CLI commands from command-line arguments + pub async fn parse_and_run(&self) -> Result<()> { + let cli = Cli::parse(); + self.run(cli).await + } + + /// Parse and execute CLI commands from an iterator of arguments + pub async fn parse_from(&self, args: I) -> Result<()> + where + I: IntoIterator, + T: Into + Clone, + { + let cli = Cli::parse_from(args); + self.run(cli).await + } + + async fn run(&self, cli: Cli) -> Result<()> { + match cli.command { + Command::Migration(cmd) => cmd.run(&self.db, &self.config).await, + } + } +} + +#[derive(Parser, Debug)] +#[command(name = "toasty")] +#[command(about = "Toasty CLI - Database migration and management tool")] +#[command(version)] +struct Cli { + #[command(subcommand)] + command: Command, +} + +#[derive(Parser, Debug)] +enum Command { + /// Database migration commands + Migration(migration::MigrationCommand), +} diff --git a/crates/toasty-cli/src/migration.rs b/crates/toasty-cli/src/migration.rs new file mode 100644 index 000000000..c3e8c6a15 --- /dev/null +++ b/crates/toasty-cli/src/migration.rs @@ -0,0 +1,58 @@ +mod apply; +mod config; +mod drop; +mod generate; +mod history_file; +mod snapshot; +mod snapshot_file; + +pub use apply::*; +pub use config::*; +pub use drop::*; +pub use generate::*; +pub use history_file::*; +pub use snapshot::*; +pub use snapshot_file::*; + +use crate::Config; +use anyhow::Result; +use clap::Parser; +use toasty::Db; + +#[derive(Parser, Debug)] +pub struct MigrationCommand { + #[command(subcommand)] + subcommand: MigrationSubcommand, +} + +#[derive(Parser, Debug)] +enum MigrationSubcommand { + /// Apply pending migrations to the database + Apply(ApplyCommand), + + /// Generate a new migration based on schema changes + Generate(GenerateCommand), + + /// Print the current schema snapshot file + Snapshot(SnapshotCommand), + + /// Drop a migration from the history + Drop(DropCommand), +} + +impl MigrationCommand { + pub(crate) async fn run(self, db: &Db, config: &Config) -> Result<()> { + self.subcommand.run(db, config).await + } +} + +impl MigrationSubcommand { + async fn run(self, db: &Db, config: &Config) -> Result<()> { + match self { + Self::Apply(cmd) => cmd.run(db, config).await, + Self::Generate(cmd) => cmd.run(db, config), + Self::Snapshot(cmd) => cmd.run(db, config), + Self::Drop(cmd) => cmd.run(db, config), + } + } +} diff --git a/crates/toasty-cli/src/migration/apply.rs b/crates/toasty-cli/src/migration/apply.rs new file mode 100644 index 000000000..5c6a38f41 --- /dev/null +++ b/crates/toasty-cli/src/migration/apply.rs @@ -0,0 +1,111 @@ +use super::HistoryFile; +use crate::Config; +use anyhow::Result; +use clap::Parser; +use console::style; +use std::collections::HashSet; +use std::fs; +use toasty::Db; +use toasty::schema::db::Migration; + +#[derive(Parser, Debug)] +pub struct ApplyCommand {} + +impl ApplyCommand { + pub(crate) async fn run(self, db: &Db, config: &Config) -> Result<()> { + println!(); + println!(" {}", style("Apply Migrations").cyan().bold().underlined()); + println!(); + + let history_path = config.migration.get_history_file_path(); + + // Load migration history + let history = HistoryFile::load_or_default(&history_path)?; + + if history.migrations().is_empty() { + println!( + " {}", + style("No migrations found in history file.") + .magenta() + .dim() + ); + println!(); + return Ok(()); + } + + // Get a connection to check which migrations have been applied + let mut conn = db.driver().connect().await?; + + // Get list of already applied migrations + let applied_migrations = conn.applied_migrations().await?; + let applied_ids: HashSet = applied_migrations.iter().map(|m| m.id()).collect(); + + // Find migrations that haven't been applied yet + let pending_migrations: Vec<_> = history + .migrations() + .iter() + .filter(|m| !applied_ids.contains(&m.id)) + .collect(); + + if pending_migrations.is_empty() { + println!( + " {}", + style("All migrations are already applied. Database is up to date.") + .green() + .dim() + ); + println!(); + return Ok(()); + } + + let pending_count = pending_migrations.len(); + println!( + " {} Found {} pending migration(s) to apply", + style("→").cyan(), + pending_count + ); + println!(); + + // Apply each pending migration + for migration_entry in &pending_migrations { + let migration_path = config + .migration + .get_migrations_dir() + .join(&migration_entry.name); + + println!( + " {} Applying migration: {}", + style("→").cyan(), + style(&migration_entry.name).bold() + ); + + // Load the migration SQL file + let sql = fs::read_to_string(&migration_path)?; + let migration = Migration::new_sql(sql); + + // Apply the migration + conn.apply_migration(migration_entry.id, migration_entry.name.clone(), &migration) + .await?; + + println!( + " {} {}", + style("✓").green().bold(), + style(format!("Applied: {}", migration_entry.name)).dim() + ); + } + + println!(); + println!( + " {}", + style(format!( + "Successfully applied {} migration(s)", + pending_count + )) + .green() + .bold() + ); + println!(); + + Ok(()) + } +} diff --git a/crates/toasty-cli/src/migration/config.rs b/crates/toasty-cli/src/migration/config.rs new file mode 100644 index 000000000..c33026996 --- /dev/null +++ b/crates/toasty-cli/src/migration/config.rs @@ -0,0 +1,78 @@ +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; + +/// Configuration for migration operations +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MigrationConfig { + /// Path to the migrations folder + pub path: PathBuf, + + /// Style of migration file prefixes + pub prefix_style: MigrationPrefixStyle, + + /// Whether the history file should store and verify checksums of the migration files so that + /// they may not be changed. + pub checksums: bool, + + /// Whether to add statement breakpoint comments to generated SQL migration files. + /// These comments mark boundaries where SQL statements should be split for execution. + /// This is needed because different databases have different batching capabilities: + /// some (like PostgreSQL) can execute multiple statements in one batch, while others + /// require each statement to be executed separately. + pub statement_breakpoints: bool, +} + +/// Style for migration file name prefixes +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum MigrationPrefixStyle { + /// Sequential numbering (e.g., 0001_, 0002_, 0003_) + Sequential, + + /// Timestamp-based (e.g., 20240112_153045_) + Timestamp, +} + +impl Default for MigrationConfig { + fn default() -> Self { + Self { + path: PathBuf::from("toasty"), + prefix_style: MigrationPrefixStyle::Sequential, + checksums: false, + statement_breakpoints: true, + } + } +} + +impl MigrationConfig { + /// Create a new MigrationConfig with default values + pub fn new() -> Self { + Self::default() + } + + /// Set the migrations path + pub fn path(mut self, path: impl Into) -> Self { + self.path = path.into(); + self + } + + /// Set the migration prefix style + pub fn prefix_style(mut self, style: MigrationPrefixStyle) -> Self { + self.prefix_style = style; + self + } + + /// Returns the directory of the migration files derived from `path`. + pub fn get_migrations_dir(&self) -> PathBuf { + self.path.join("migrations") + } + + /// Returns the directory of the snapshot files derived from `path`. + pub fn get_snapshots_dir(&self) -> PathBuf { + self.path.join("snapshots") + } + + /// Get the path to the history file + pub fn get_history_file_path(&self) -> PathBuf { + self.path.join("history.toml") + } +} diff --git a/crates/toasty-cli/src/migration/drop.rs b/crates/toasty-cli/src/migration/drop.rs new file mode 100644 index 000000000..f47d1188b --- /dev/null +++ b/crates/toasty-cli/src/migration/drop.rs @@ -0,0 +1,129 @@ +use super::HistoryFile; +use crate::{Config, theme::dialoguer_theme}; +use anyhow::Result; +use clap::Parser; +use console::style; +use dialoguer::Select; +use std::fs; +use toasty::Db; + +#[derive(Parser, Debug)] +pub struct DropCommand { + /// Name of the migration to drop (if not provided, will prompt) + #[arg(short, long)] + name: Option, + + /// Drop the latest migration + #[arg(short, long)] + latest: bool, +} + +impl DropCommand { + pub(crate) fn run(self, _db: &Db, config: &Config) -> Result<()> { + let history_path = config.migration.get_history_file_path(); + let mut history = HistoryFile::load_or_default(&history_path)?; + + if history.migrations().is_empty() { + eprintln!("{}", style("No migrations found in history").red().bold()); + anyhow::bail!("No migrations found in history"); + } + + // Determine which migration to drop + let migration_index = if self.latest { + // Drop the latest migration + history.migrations().len() - 1 + } else if let Some(name) = &self.name { + // Find migration by name + history + .migrations() + .iter() + .position(|m| m.name == *name) + .ok_or_else(|| anyhow::anyhow!("Migration '{}' not found", name))? + } else { + // Interactive picker with fancy theme + println!(); + println!(" {}", style("Drop Migration").cyan().bold().underlined()); + println!(); + + let migration_display: Vec = history + .migrations() + .iter() + .map(|m| format!(" {}", m.name)) + .collect(); + + Select::with_theme(&dialoguer_theme()) + .with_prompt(" Select migration to drop") + .items(&migration_display) + .default(migration_display.len() - 1) + .interact()? + }; + + println!(); + + let migration = &history.migrations()[migration_index]; + let migration_name = migration.name.clone(); + let snapshot_name = migration.snapshot_name.clone(); + + // Delete migration file + let migration_path = config.migration.get_migrations_dir().join(&migration_name); + if migration_path.exists() { + fs::remove_file(&migration_path)?; + println!( + " {} {}", + style("✓").green().bold(), + style(format!("Deleted migration: {}", migration_name)).dim() + ); + } else { + println!( + " {} {}", + style("⚠").yellow().bold(), + style(format!("Migration file not found: {}", migration_name)) + .yellow() + .dim() + ); + } + + // Delete snapshot file + let snapshot_path = config.migration.get_snapshots_dir().join(&snapshot_name); + if snapshot_path.exists() { + fs::remove_file(&snapshot_path)?; + println!( + " {} {}", + style("✓").green().bold(), + style(format!("Deleted snapshot: {}", snapshot_name)).dim() + ); + } else { + println!( + " {} {}", + style("⚠").yellow().bold(), + style(format!("Snapshot file not found: {}", snapshot_name)) + .yellow() + .dim() + ); + } + + // Remove from history + history.remove_migration(migration_index); + history.save(&history_path)?; + + println!( + " {} {}", + style("✓").green().bold(), + style("Updated migration history").dim() + ); + println!(); + println!( + " {} {}", + style("").magenta(), + style(format!( + "Migration '{}' successfully dropped", + migration_name + )) + .green() + .bold() + ); + println!(); + + Ok(()) + } +} diff --git a/crates/toasty-cli/src/migration/generate.rs b/crates/toasty-cli/src/migration/generate.rs new file mode 100644 index 000000000..71c9ab7ce --- /dev/null +++ b/crates/toasty-cli/src/migration/generate.rs @@ -0,0 +1,322 @@ +use super::{HistoryFile, HistoryFileMigration, SnapshotFile}; +use crate::{Config, theme::dialoguer_theme}; +use anyhow::Result; +use clap::Parser; +use console::style; +use dialoguer::Select; +use rand::Rng; +use std::collections::{HashMap, HashSet}; +use std::fs; +use toasty::{ + Db, + schema::db::{ + ColumnId, ColumnsDiffItem, IndexId, IndicesDiffItem, Migration, RenameHints, Schema, + SchemaDiff, TableId, TablesDiffItem, + }, +}; + +#[derive(Parser, Debug)] +pub struct GenerateCommand { + /// Name for the migration + #[arg(short, long)] + name: Option, +} + +/// Collects rename hints by interactively asking the user about potential renames +fn collect_rename_hints(previous_schema: &Schema, schema: &Schema) -> Result { + let mut hints = RenameHints::default(); + let mut ignored_tables = HashSet::::new(); + let mut ignored_columns = HashMap::>::new(); + let mut ignored_indices = HashMap::>::new(); + + 'main: loop { + let diff = SchemaDiff::from(previous_schema, schema, &hints); + + // Check for table renames + let dropped_tables: Vec<_> = diff + .tables() + .iter() + .filter_map(|item| match item { + TablesDiffItem::DropTable(table) if !ignored_tables.contains(&table.id) => { + Some(*table) + } + _ => None, + }) + .collect(); + + let added_tables: Vec<_> = diff + .tables() + .iter() + .filter_map(|item| match item { + TablesDiffItem::CreateTable(table) => Some(*table), + _ => None, + }) + .collect(); + + // If there are both dropped and added tables, ask about potential renames + if !dropped_tables.is_empty() && !added_tables.is_empty() { + for dropped_table in &dropped_tables { + let mut options = vec![format!(" Drop \"{}\" ✖", dropped_table.name)]; + for added_table in &added_tables { + options.push(format!( + " Rename \"{}\" → \"{}\"", + dropped_table.name, added_table.name + )); + } + + let selection = Select::with_theme(&dialoguer_theme()) + .with_prompt(format!(" Table \"{}\" is missing", dropped_table.name)) + .items(&options) + .default(0) + .interact()?; + + if selection == 0 { + // User confirmed it was dropped + ignored_tables.insert(dropped_table.id); + } else { + // User indicated a rename (selection - 1 maps to added_tables index) + let to_table = added_tables[selection - 1]; + drop(diff); + hints.add_table_hint(dropped_table.id, to_table.id); + continue 'main; // Regenerate diff with new hint + } + } + } + + // Check for column and index renames within altered tables + for item in diff.tables().iter() { + if let TablesDiffItem::AlterTable { + previous, + next: _, + columns, + indices, + } = item + { + // Handle column renames + let dropped_columns: Vec<_> = columns + .iter() + .filter_map(|item| match item { + ColumnsDiffItem::DropColumn(column) + if !ignored_columns + .get(&previous.id) + .is_some_and(|set| set.contains(&column.id)) => + { + Some(*column) + } + _ => None, + }) + .collect(); + + let added_columns: Vec<_> = columns + .iter() + .filter_map(|item| match item { + ColumnsDiffItem::AddColumn(column) => Some(*column), + _ => None, + }) + .collect(); + + if !dropped_columns.is_empty() && !added_columns.is_empty() { + for dropped_column in &dropped_columns { + let mut options = vec![format!(" Drop \"{}\" ✖", dropped_column.name)]; + for added_column in &added_columns { + options.push(format!( + " Rename \"{}\" → \"{}\"", + dropped_column.name, added_column.name + )); + } + + let selection = Select::with_theme(&dialoguer_theme()) + .with_prompt(format!( + " Column \"{}\".\"{}\" is missing", + previous.name, dropped_column.name + )) + .items(&options) + .default(0) + .interact()?; + + if selection == 0 { + // User confirmed it was dropped + ignored_columns + .entry(previous.id) + .or_default() + .insert(dropped_column.id); + } else { + // User indicated a rename + let next_column = added_columns[selection - 1]; + drop(diff); + hints.add_column_hint(dropped_column.id, next_column.id); + continue 'main; // Regenerate diff with new hint + } + } + } + + // Handle index renames + let dropped_indices: Vec<_> = indices + .iter() + .filter_map(|item| match item { + IndicesDiffItem::DropIndex(index) + if !ignored_indices + .get(&previous.id) + .is_some_and(|set| set.contains(&index.id)) => + { + Some(*index) + } + _ => None, + }) + .collect(); + + let added_indices: Vec<_> = indices + .iter() + .filter_map(|item| match item { + IndicesDiffItem::CreateIndex(index) => Some(*index), + _ => None, + }) + .collect(); + + if !dropped_indices.is_empty() && !added_indices.is_empty() { + for dropped_index in &dropped_indices { + let mut options = vec![format!(" Drop \"{}\" ✖", dropped_index.name)]; + for added_index in &added_indices { + options.push(format!( + " Rename \"{}\" → \"{}\"", + dropped_index.name, added_index.name + )); + } + + let selection = Select::with_theme(&dialoguer_theme()) + .with_prompt(format!( + " Index \"{}\".\"{}\" is missing", + previous.name, dropped_index.name + )) + .items(&options) + .default(0) + .interact()?; + + if selection == 0 { + // User confirmed it was dropped + ignored_indices + .entry(previous.id) + .or_default() + .insert(dropped_index.id); + } else { + // User indicated a rename + let to_index = added_indices[selection - 1]; + drop(diff); + hints.add_index_hint(dropped_index.id, to_index.id); + continue 'main; // Regenerate diff with new hint + } + } + } + } + } + + // No more potential renames to ask about + break; + } + + Ok(hints) +} + +impl GenerateCommand { + pub(crate) fn run(self, db: &Db, config: &Config) -> Result<()> { + println!(); + println!( + " {}", + style("Generate Migration").cyan().bold().underlined() + ); + println!(); + + let history_path = config.migration.get_history_file_path(); + + fs::create_dir_all(config.migration.get_migrations_dir())?; + fs::create_dir_all(config.migration.get_snapshots_dir())?; + fs::create_dir_all(history_path.parent().unwrap())?; + + let mut history = HistoryFile::load_or_default(&history_path)?; + + let previous_snapshot = history + .migrations() + .last() + .map(|f| { + SnapshotFile::load(config.migration.get_snapshots_dir().join(&f.snapshot_name)) + }) + .transpose()?; + let previous_schema = previous_snapshot + .map(|snapshot| snapshot.schema) + .unwrap_or_else(Schema::default); + + let schema = toasty::schema::db::Schema::clone(&db.schema().db); + + let rename_hints = collect_rename_hints(&previous_schema, &schema)?; + let diff = SchemaDiff::from(&previous_schema, &schema, &rename_hints); + + if diff.is_empty() { + println!( + " {}", + style("The current schema matches the previous snapshot. No migration needed.") + .magenta() + .dim() + ); + println!(); + return Ok(()); + } + + let snapshot = SnapshotFile::new(schema.clone()); + let migration_number = history.next_migration_number(); + let snapshot_name = format!("{:04}_snapshot.toml", migration_number); + let snapshot_path = config.migration.get_snapshots_dir().join(&snapshot_name); + + let migration_name = format!( + "{:04}_{}.sql", + migration_number, + self.name.as_deref().unwrap_or("migration") + ); + let migration_path = config.migration.get_migrations_dir().join(&migration_name); + + let migration = db.driver().generate_migration(&diff); + + history.add_migration(HistoryFileMigration { + // Some databases only supported signed 64-bit integers. + id: rand::thread_rng().gen_range(0..i64::MAX) as u64, + name: migration_name.clone(), + snapshot_name: snapshot_name.clone(), + checksum: None, + }); + + let Migration::Sql(sql) = migration; + std::fs::write(migration_path, sql)?; + println!( + " {} {}", + style("✓").green().bold(), + style(format!("Created migration file: {}", migration_name)).dim() + ); + + snapshot.save(&snapshot_path)?; + println!( + " {} {}", + style("✓").green().bold(), + style(format!("Created snapshot: {}", snapshot_name)).dim() + ); + + history.save(&history_path)?; + println!( + " {} {}", + style("✓").green().bold(), + style("Updated migration history").dim() + ); + + println!(); + println!( + " {}", + style(format!( + "Migration '{}' generated successfully", + migration_name + )) + .green() + .bold() + ); + println!(); + + Ok(()) + } +} diff --git a/crates/toasty-cli/src/migration/history_file.rs b/crates/toasty-cli/src/migration/history_file.rs new file mode 100644 index 000000000..7ad226516 --- /dev/null +++ b/crates/toasty-cli/src/migration/history_file.rs @@ -0,0 +1,121 @@ +use anyhow::{Result, bail}; +use serde::{Deserialize, Serialize}; +use std::fmt; +use std::path::Path; +use std::str::FromStr; + +const HISTORY_FILE_VERSION: u32 = 1; + +/// History file containing the record of all applied migrations +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct HistoryFile { + /// History file format version + version: u32, + + /// Migration history + migrations: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct HistoryFileMigration { + /// Random unique identifier for this migration. + pub id: u64, + + /// Migration name/identifier. + pub name: String, + + /// Name of the snapshot generated alongside this migration. + pub snapshot_name: String, + + /// Optional checksum of the migration file to detect changes + #[serde(skip_serializing_if = "Option::is_none")] + pub checksum: Option, +} + +impl HistoryFile { + /// Create a new empty history file + pub fn new() -> Self { + Self { + version: HISTORY_FILE_VERSION, + migrations: Vec::new(), + } + } + + /// Load a history file from a TOML file + pub fn load(path: impl AsRef) -> Result { + let contents = std::fs::read_to_string(path.as_ref())?; + contents.parse() + } + + /// Save the history file to a TOML file + pub fn save(&self, path: impl AsRef) -> Result<()> { + std::fs::write(path.as_ref(), self.to_string())?; + Ok(()) + } + + /// Loads the history file, or returns an empty one if it does not exist + pub fn load_or_default(path: impl AsRef) -> Result { + if std::fs::exists(&path)? { + return Self::load(path); + } + Ok(Self::default()) + } + + pub fn migrations(&self) -> &[HistoryFileMigration] { + &self.migrations + } + + /// Get the next migration number by parsing the last migration's name + pub fn next_migration_number(&self) -> u32 { + self.migrations + .last() + .and_then(|m| { + // Extract the first 4 digits from the migration name (e.g., "0001_migration.sql" -> 1) + m.name.split('_').next()?.parse::().ok() + }) + .map(|n| n + 1) + .unwrap_or(0) + } + + /// Add a migration to the history + pub fn add_migration(&mut self, migration: HistoryFileMigration) { + self.migrations.push(migration); + } + + /// Remove a migration from the history by index + pub fn remove_migration(&mut self, index: usize) { + self.migrations.remove(index); + } +} + +impl Default for HistoryFile { + fn default() -> Self { + Self::new() + } +} + +impl FromStr for HistoryFile { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let file: HistoryFile = toml::from_str(s)?; + + // Validate version + if file.version != HISTORY_FILE_VERSION { + bail!( + "Unsupported history file version: {}. Expected version {}", + file.version, + HISTORY_FILE_VERSION + ); + } + + Ok(file) + } +} + +impl fmt::Display for HistoryFile { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let toml_str = toml::to_string_pretty(self).map_err(|_| fmt::Error)?; + write!(f, "{}", toml_str) + } +} diff --git a/crates/toasty-cli/src/migration/snapshot.rs b/crates/toasty-cli/src/migration/snapshot.rs new file mode 100644 index 000000000..ab7c226cc --- /dev/null +++ b/crates/toasty-cli/src/migration/snapshot.rs @@ -0,0 +1,51 @@ +use super::SnapshotFile; +use crate::Config; +use anyhow::Result; +use clap::Parser; +use console::style; +use toasty::Db; + +#[derive(Parser, Debug)] +pub struct SnapshotCommand { + // Future options can be added here +} + +impl SnapshotCommand { + pub(crate) fn run(self, db: &Db, _config: &Config) -> Result<()> { + println!(); + println!( + " {}", + style("Current Schema Snapshot").cyan().bold().underlined() + ); + println!(); + + let snapshot_file = SnapshotFile::new(toasty::schema::db::Schema::clone(&db.schema().db)); + + // Print the snapshot with nice formatting + let snapshot_str = snapshot_file.to_string(); + for line in snapshot_str.lines() { + if line.starts_with('[') { + println!(" {}", style(line).yellow().bold()); + } else if line.contains('=') { + let parts: Vec<&str> = line.splitn(2, '=').collect(); + if parts.len() == 2 { + println!( + " {}{} {}", + style(parts[0]).cyan(), + style("=").dim(), + style(parts[1]).green() + ); + } else { + println!(" {}", style(line).dim()); + } + } else if line.trim().is_empty() { + println!(); + } else { + println!(" {}", style(line).dim()); + } + } + + println!(); + Ok(()) + } +} diff --git a/crates/toasty-cli/src/migration/snapshot_file.rs b/crates/toasty-cli/src/migration/snapshot_file.rs new file mode 100644 index 000000000..b5103121e --- /dev/null +++ b/crates/toasty-cli/src/migration/snapshot_file.rs @@ -0,0 +1,105 @@ +use anyhow::{Result, bail}; +use serde::{Deserialize, Serialize}; +use std::fmt; +use std::path::Path; +use std::str::FromStr; +use toasty_core::schema::db::Schema; +use toml_edit::{DocumentMut, Item}; + +const SNAPSHOT_FILE_VERSION: u32 = 1; + +/// Snapshot file containing the current database schema state +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SnapshotFile { + /// Snapshot file format version + version: u32, + + /// The database schema + pub schema: Schema, +} + +impl SnapshotFile { + /// Create a new snapshot file with the given schema + pub fn new(schema: Schema) -> Self { + Self { + version: SNAPSHOT_FILE_VERSION, + schema, + } + } + + /// Load a snapshot file from a TOML file + pub fn load(path: impl AsRef) -> Result { + let contents = std::fs::read_to_string(path.as_ref())?; + contents.parse() + } + + /// Save the snapshot file to a TOML file + pub fn save(&self, path: impl AsRef) -> Result<()> { + std::fs::write(path.as_ref(), self.to_string())?; + Ok(()) + } +} + +impl FromStr for SnapshotFile { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let file: SnapshotFile = toml::from_str(s)?; + + // Validate version + if file.version != SNAPSHOT_FILE_VERSION { + bail!( + "Unsupported snapshot file version: {}. Expected version {}", + file.version, + SNAPSHOT_FILE_VERSION + ); + } + + Ok(file) + } +} + +impl fmt::Display for SnapshotFile { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let doc = self.to_toml_document().map_err(|_| fmt::Error)?; + write!(f, "{}", doc) + } +} + +impl SnapshotFile { + fn to_toml_document(&self) -> Result { + let mut doc = toml_edit::ser::to_document(self)?; + for (_key, item) in doc.as_table_mut().iter_mut() { + if item.is_inline_table() { + let mut placeholder = Item::None; + std::mem::swap(item, &mut placeholder); + let mut table = placeholder.into_table().unwrap(); + + for (_key, item) in table.iter_mut() { + if item.is_array() { + let mut placeholder = Item::None; + std::mem::swap(item, &mut placeholder); + let mut array = placeholder.into_array_of_tables().unwrap(); + + for table in array.iter_mut() { + for (_key, item) in table.iter_mut() { + if item.is_array() { + let mut placeholder = Item::None; + std::mem::swap(item, &mut placeholder); + let array = placeholder.into_array_of_tables().unwrap(); + *item = array.into(); + } + } + } + + *item = array.into(); + } + } + + *item = table.into(); + } + } + + Ok(doc) + } +} diff --git a/crates/toasty-cli/src/theme.rs b/crates/toasty-cli/src/theme.rs new file mode 100644 index 000000000..47ae8bfc3 --- /dev/null +++ b/crates/toasty-cli/src/theme.rs @@ -0,0 +1,20 @@ +use console::style; +use dialoguer::theme::ColorfulTheme; + +/// Returns the standard theme used for interactive prompts +pub fn dialoguer_theme() -> ColorfulTheme { + ColorfulTheme { + active_item_style: console::Style::new().cyan().bold(), + active_item_prefix: style("❯".to_string()).cyan().bold(), + inactive_item_prefix: style(" ".to_string()), + checked_item_prefix: style("✔".to_string()).green(), + unchecked_item_prefix: style("✖".to_string()).red(), + prompt_style: console::Style::new().bold(), + prompt_prefix: style("?".to_string()).yellow().bold(), + success_prefix: style("✔".to_string()).green().bold(), + error_prefix: style("✖".to_string()).red().bold(), + hint_style: console::Style::new().dim(), + values_style: console::Style::new().cyan(), + ..Default::default() + } +} diff --git a/crates/toasty-core/Cargo.toml b/crates/toasty-core/Cargo.toml index 51f1f17ef..fcec08413 100644 --- a/crates/toasty-core/Cargo.toml +++ b/crates/toasty-core/Cargo.toml @@ -12,6 +12,7 @@ indexmap.workspace = true jiff = { workspace = true, optional = true } rust_decimal = { workspace = true, optional = true } bigdecimal = { workspace = true, optional = true } +serde = { workspace = true, optional = true } std-util.workspace = true tokio-stream.workspace = true uuid.workspace = true @@ -22,6 +23,7 @@ assert-struct = ["dep:assert-struct"] rust_decimal = ["dep:rust_decimal"] bigdecimal = ["dep:bigdecimal"] jiff = ["dep:jiff"] +serde = ["dep:serde", "bit-set/serde"] [dev-dependencies] pretty_assertions.workspace = true diff --git a/crates/toasty-core/src/driver.rs b/crates/toasty-core/src/driver.rs index 7d562d692..0666e2f00 100644 --- a/crates/toasty-core/src/driver.rs +++ b/crates/toasty-core/src/driver.rs @@ -7,7 +7,10 @@ pub use response::{Response, Rows}; pub mod operation; pub use operation::Operation; -use crate::{async_trait, schema::db::Schema}; +use crate::{ + async_trait, + schema::db::{AppliedMigration, Migration, Schema, SchemaDiff}, +}; use std::{fmt::Debug, sync::Arc}; @@ -24,6 +27,9 @@ pub trait Driver: Debug + Send + Sync + 'static { fn max_connections(&self) -> Option { None } + + /// Generates a migration from a [`SchemaDiff`]. + fn generate_migration(&self, schema_diff: &SchemaDiff<'_>) -> Migration; } #[async_trait] @@ -38,4 +44,12 @@ pub trait Connection: Debug + Send + 'static { async fn reset_db(&mut self, _schema: &Schema) -> crate::Result<()> { unimplemented!() } + + async fn applied_migrations(&mut self) -> crate::Result>; + async fn apply_migration( + &mut self, + id: u64, + name: String, + migration: &Migration, + ) -> crate::Result<()>; } diff --git a/crates/toasty-core/src/driver/capability.rs b/crates/toasty-core/src/driver/capability.rs index ca2cb97e2..70545bea2 100644 --- a/crates/toasty-core/src/driver/capability.rs +++ b/crates/toasty-core/src/driver/capability.rs @@ -5,9 +5,12 @@ pub struct Capability { /// When true, the database uses a SQL-based query language. pub sql: bool, - /// Column storage types supported by the database + /// Column storage types supported by the database. pub storage_types: StorageTypes, + /// Schema mutation capabilities supported by the datbase. + pub schema_mutations: SchemaMutations, + /// SQL: supports update statements in CTE queries. pub cte_with_update: bool, @@ -92,6 +95,17 @@ pub struct StorageTypes { pub max_unsigned_integer: Option, } +/// The database's capabilities to mutate the schema (tables, columns, indices). +#[derive(Debug)] +pub struct SchemaMutations { + /// Whether the database can change the type of an existing column. + pub alter_column_type: bool, + + /// Whether the database can change name, type and constraints of a column all + /// withing a single statement. + pub alter_column_properties_atomic: bool, +} + impl Capability { /// Validates the consistency of the capability configuration. /// @@ -148,6 +162,7 @@ impl Capability { pub const SQLITE: Self = Self { sql: true, storage_types: StorageTypes::SQLITE, + schema_mutations: SchemaMutations::SQLITE, cte_with_update: false, select_for_update: false, returning_from_mutation: true, @@ -172,6 +187,7 @@ impl Capability { pub const POSTGRESQL: Self = Self { cte_with_update: true, storage_types: StorageTypes::POSTGRESQL, + schema_mutations: SchemaMutations::POSTGRESQL, select_for_update: true, auto_increment: true, bigdecimal_implemented: false, @@ -193,6 +209,7 @@ impl Capability { pub const MYSQL: Self = Self { cte_with_update: false, storage_types: StorageTypes::MYSQL, + schema_mutations: SchemaMutations::MYSQL, select_for_update: true, returning_from_mutation: false, auto_increment: true, @@ -214,6 +231,7 @@ impl Capability { pub const DYNAMODB: Self = Self { sql: false, storage_types: StorageTypes::DYNAMODB, + schema_mutations: SchemaMutations::DYNAMODB, cte_with_update: false, select_for_update: false, returning_from_mutation: false, @@ -355,6 +373,29 @@ impl StorageTypes { }; } +impl SchemaMutations { + pub const SQLITE: Self = Self { + alter_column_type: false, + alter_column_properties_atomic: false, + }; + + pub const POSTGRESQL: Self = Self { + alter_column_type: true, + alter_column_properties_atomic: false, + }; + + pub const MYSQL: Self = Self { + alter_column_type: true, + alter_column_properties_atomic: true, + }; + + // DynamoDB migrations are currently not supported. + pub const DYNAMODB: Self = Self { + alter_column_type: false, + alter_column_properties_atomic: false, + }; +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/toasty-core/src/schema/app/field.rs b/crates/toasty-core/src/schema/app/field.rs index 47ec45f6a..d229a522e 100644 --- a/crates/toasty-core/src/schema/app/field.rs +++ b/crates/toasty-core/src/schema/app/field.rs @@ -30,6 +30,7 @@ pub struct Field { } #[derive(Copy, Clone, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct FieldId { pub model: ModelId, pub index: usize, diff --git a/crates/toasty-core/src/schema/app/index.rs b/crates/toasty-core/src/schema/app/index.rs index fc9c16493..cf35d6c33 100644 --- a/crates/toasty-core/src/schema/app/index.rs +++ b/crates/toasty-core/src/schema/app/index.rs @@ -16,7 +16,7 @@ pub struct Index { pub primary_key: bool, } -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct IndexId { pub model: ModelId, pub index: usize, diff --git a/crates/toasty-core/src/schema/app/model.rs b/crates/toasty-core/src/schema/app/model.rs index 3209e1ac9..6821ed758 100644 --- a/crates/toasty-core/src/schema/app/model.rs +++ b/crates/toasty-core/src/schema/app/model.rs @@ -24,6 +24,7 @@ pub struct Model { } #[derive(Copy, Clone, Eq, PartialEq, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct ModelId(pub usize); impl Model { diff --git a/crates/toasty-core/src/schema/db.rs b/crates/toasty-core/src/schema/db.rs index c2a3ceb66..45a2b8574 100644 --- a/crates/toasty-core/src/schema/db.rs +++ b/crates/toasty-core/src/schema/db.rs @@ -1,17 +1,17 @@ mod column; -pub use column::{Column, ColumnId}; - +mod diff; mod index; -pub use index::{Index, IndexColumn, IndexId, IndexOp, IndexScope}; - +mod migration; mod pk; -pub use pk::PrimaryKey; - mod schema; -pub use schema::Schema; - mod table; -pub use table::{Table, TableId}; - mod ty; -pub use ty::Type; + +pub use column::*; +pub use diff::*; +pub use index::*; +pub use migration::*; +pub use pk::*; +pub use schema::*; +pub use table::*; +pub use ty::*; diff --git a/crates/toasty-core/src/schema/db/column.rs b/crates/toasty-core/src/schema/db/column.rs index ad0cf7e8e..e41d55c14 100644 --- a/crates/toasty-core/src/schema/db/column.rs +++ b/crates/toasty-core/src/schema/db/column.rs @@ -1,9 +1,14 @@ -use super::{table, TableId, Type}; +use super::{table, DiffContext, TableId, Type}; use crate::stmt; -use std::fmt; +use std::{ + collections::{HashMap, HashSet}, + fmt, + ops::Deref, +}; -#[derive(Debug, PartialEq)] +#[derive(Debug, Clone, PartialEq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Column { /// Uniquely identifies the column in the schema. pub id: ColumnId, @@ -30,13 +35,12 @@ pub struct Column { } #[derive(PartialEq, Eq, Clone, Copy, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct ColumnId { pub table: TableId, pub index: usize, } -impl Column {} - impl ColumnId { pub(crate) fn placeholder() -> Self { Self { @@ -57,3 +61,301 @@ impl fmt::Debug for ColumnId { write!(fmt, "ColumnId({}/{})", self.table.0, self.index) } } + +pub struct ColumnsDiff<'a> { + items: Vec>, +} + +impl<'a> ColumnsDiff<'a> { + pub fn from(cx: &DiffContext<'a>, previous: &'a [Column], next: &'a [Column]) -> Self { + fn has_diff(previous: &Column, next: &Column) -> bool { + previous.name != next.name + || previous.storage_ty != next.storage_ty + || previous.nullable != next.nullable + || previous.primary_key != next.primary_key + || previous.auto_increment != next.auto_increment + } + + let mut items = vec![]; + let mut add_ids: HashSet<_> = next.iter().map(|next| next.id).collect(); + + let next_map = + HashMap::<&str, &'a Column>::from_iter(next.iter().map(|to| (to.name.as_str(), to))); + + for previous in previous { + let next = if let Some(next_id) = cx.rename_hints().get_column(previous.id) { + cx.next().column(next_id) + } else if let Some(next) = next_map.get(previous.name.as_str()) { + next + } else { + items.push(ColumnsDiffItem::DropColumn(previous)); + continue; + }; + + add_ids.remove(&next.id); + + if has_diff(previous, next) { + items.push(ColumnsDiffItem::AlterColumn { previous, next }); + } + } + + for column_id in add_ids { + items.push(ColumnsDiffItem::AddColumn(cx.next().column(column_id))); + } + + Self { items } + } + + pub const fn is_empty(&self) -> bool { + self.items.is_empty() + } +} + +impl<'a> Deref for ColumnsDiff<'a> { + type Target = Vec>; + + fn deref(&self) -> &Self::Target { + &self.items + } +} + +pub enum ColumnsDiffItem<'a> { + AddColumn(&'a Column), + DropColumn(&'a Column), + AlterColumn { + previous: &'a Column, + next: &'a Column, + }, +} + +#[cfg(test)] +mod tests { + use crate::schema::db::{ + Column, ColumnId, ColumnsDiff, ColumnsDiffItem, DiffContext, PrimaryKey, RenameHints, + Schema, Table, TableId, Type, + }; + use crate::stmt; + + fn make_column( + table_id: usize, + index: usize, + name: &str, + storage_ty: Type, + nullable: bool, + ) -> Column { + Column { + id: ColumnId { + table: TableId(table_id), + index, + }, + name: name.to_string(), + ty: stmt::Type::String, // Simplified for tests + storage_ty, + nullable, + primary_key: false, + auto_increment: false, + } + } + + fn make_schema_with_columns(table_id: usize, columns: Vec) -> Schema { + let mut schema = Schema::default(); + schema.tables.push(Table { + id: TableId(table_id), + name: "test_table".to_string(), + columns, + primary_key: PrimaryKey { + columns: vec![], + index: super::super::IndexId { + table: TableId(table_id), + index: 0, + }, + }, + indices: vec![], + }); + schema + } + + #[test] + fn test_no_diff_same_columns() { + let from_cols = vec![ + make_column(0, 0, "id", Type::Integer(8), false), + make_column(0, 1, "name", Type::Text, false), + ]; + let to_cols = vec![ + make_column(0, 0, "id", Type::Integer(8), false), + make_column(0, 1, "name", Type::Text, false), + ]; + + let from_schema = make_schema_with_columns(0, from_cols.clone()); + let to_schema = make_schema_with_columns(0, to_cols.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = ColumnsDiff::from(&cx, &from_cols, &to_cols); + assert!(diff.is_empty()); + } + + #[test] + fn test_add_column() { + let from_cols = vec![make_column(0, 0, "id", Type::Integer(8), false)]; + let to_cols = vec![ + make_column(0, 0, "id", Type::Integer(8), false), + make_column(0, 1, "name", Type::Text, false), + ]; + + let from_schema = make_schema_with_columns(0, from_cols.clone()); + let to_schema = make_schema_with_columns(0, to_cols.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = ColumnsDiff::from(&cx, &from_cols, &to_cols); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], ColumnsDiffItem::AddColumn(_))); + if let ColumnsDiffItem::AddColumn(col) = diff.items[0] { + assert_eq!(col.name, "name"); + } + } + + #[test] + fn test_drop_column() { + let from_cols = vec![ + make_column(0, 0, "id", Type::Integer(8), false), + make_column(0, 1, "name", Type::Text, false), + ]; + let to_cols = vec![make_column(0, 0, "id", Type::Integer(8), false)]; + + let from_schema = make_schema_with_columns(0, from_cols.clone()); + let to_schema = make_schema_with_columns(0, to_cols.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = ColumnsDiff::from(&cx, &from_cols, &to_cols); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], ColumnsDiffItem::DropColumn(_))); + if let ColumnsDiffItem::DropColumn(col) = diff.items[0] { + assert_eq!(col.name, "name"); + } + } + + #[test] + fn test_alter_column_type() { + let from_cols = vec![make_column(0, 0, "id", Type::Integer(8), false)]; + let to_cols = vec![make_column(0, 0, "id", Type::Text, false)]; + + let from_schema = make_schema_with_columns(0, from_cols.clone()); + let to_schema = make_schema_with_columns(0, to_cols.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = ColumnsDiff::from(&cx, &from_cols, &to_cols); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], ColumnsDiffItem::AlterColumn { .. })); + } + + #[test] + fn test_alter_column_nullable() { + let from_cols = vec![make_column(0, 0, "id", Type::Integer(8), false)]; + let to_cols = vec![make_column(0, 0, "id", Type::Integer(8), true)]; + + let from_schema = make_schema_with_columns(0, from_cols.clone()); + let to_schema = make_schema_with_columns(0, to_cols.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = ColumnsDiff::from(&cx, &from_cols, &to_cols); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], ColumnsDiffItem::AlterColumn { .. })); + } + + #[test] + fn test_rename_column_with_hint() { + // Column renamed from "old_name" to "new_name" + let from_cols = vec![make_column(0, 0, "old_name", Type::Text, false)]; + let to_cols = vec![make_column(0, 0, "new_name", Type::Text, false)]; + + let from_schema = make_schema_with_columns(0, from_cols.clone()); + let to_schema = make_schema_with_columns(0, to_cols.clone()); + + let mut hints = RenameHints::new(); + hints.add_column_hint( + ColumnId { + table: TableId(0), + index: 0, + }, + ColumnId { + table: TableId(0), + index: 0, + }, + ); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = ColumnsDiff::from(&cx, &from_cols, &to_cols); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], ColumnsDiffItem::AlterColumn { .. })); + if let ColumnsDiffItem::AlterColumn { previous, next } = diff.items[0] { + assert_eq!(previous.name, "old_name"); + assert_eq!(next.name, "new_name"); + } + } + + #[test] + fn test_rename_column_without_hint_is_drop_and_add() { + // Column renamed from "old_name" to "new_name", but no hint provided + // Should be treated as drop + add + let from_cols = vec![make_column(0, 0, "old_name", Type::Text, false)]; + let to_cols = vec![make_column(0, 0, "new_name", Type::Text, false)]; + + let from_schema = make_schema_with_columns(0, from_cols.clone()); + let to_schema = make_schema_with_columns(0, to_cols.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = ColumnsDiff::from(&cx, &from_cols, &to_cols); + assert_eq!(diff.items.len(), 2); + + let has_drop = diff + .items + .iter() + .any(|item| matches!(item, ColumnsDiffItem::DropColumn(_))); + let has_add = diff + .items + .iter() + .any(|item| matches!(item, ColumnsDiffItem::AddColumn(_))); + assert!(has_drop); + assert!(has_add); + } + + #[test] + fn test_multiple_operations() { + let from_cols = vec![ + make_column(0, 0, "id", Type::Integer(8), false), + make_column(0, 1, "old_name", Type::Text, false), + make_column(0, 2, "to_drop", Type::Text, false), + ]; + let to_cols = vec![ + make_column(0, 0, "id", Type::Text, false), // type changed + make_column(0, 1, "new_name", Type::Text, false), // renamed + make_column(0, 2, "added", Type::Integer(8), false), // new column + ]; + + let from_schema = make_schema_with_columns(0, from_cols.clone()); + let to_schema = make_schema_with_columns(0, to_cols.clone()); + + let mut hints = RenameHints::new(); + hints.add_column_hint( + ColumnId { + table: TableId(0), + index: 1, + }, + ColumnId { + table: TableId(0), + index: 1, + }, + ); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = ColumnsDiff::from(&cx, &from_cols, &to_cols); + // Should have: 1 alter (id type changed), 1 alter (renamed), 1 drop (to_drop), 1 add (added) + assert_eq!(diff.items.len(), 4); + } +} diff --git a/crates/toasty-core/src/schema/db/diff.rs b/crates/toasty-core/src/schema/db/diff.rs new file mode 100644 index 000000000..ab852dd53 --- /dev/null +++ b/crates/toasty-core/src/schema/db/diff.rs @@ -0,0 +1,69 @@ +use std::collections::HashMap; + +use crate::schema::db::{ColumnId, IndexId, Schema, TableId}; + +#[derive(Default)] +pub struct RenameHints { + tables: HashMap, + columns: HashMap, + indices: HashMap, +} + +impl RenameHints { + pub fn new() -> Self { + Self::default() + } + + pub fn add_table_hint(&mut self, from: TableId, to: TableId) { + self.tables.insert(from, to); + } + + pub fn add_column_hint(&mut self, from: ColumnId, to: ColumnId) { + self.columns.insert(from, to); + } + + pub fn add_index_hint(&mut self, from: IndexId, to: IndexId) { + self.indices.insert(from, to); + } + + pub fn get_table(&self, from: TableId) -> Option { + self.tables.get(&from).copied() + } + + pub fn get_column(&self, from: ColumnId) -> Option { + self.columns.get(&from).copied() + } + + pub fn get_index(&self, from: IndexId) -> Option { + self.indices.get(&from).copied() + } +} + +pub struct DiffContext<'a> { + previous: &'a Schema, + next: &'a Schema, + + rename_hints: &'a RenameHints, +} + +impl<'a> DiffContext<'a> { + pub fn new(previous: &'a Schema, next: &'a Schema, rename_hints: &'a RenameHints) -> Self { + Self { + previous, + next, + rename_hints, + } + } + + pub fn rename_hints(&self) -> &'a RenameHints { + self.rename_hints + } + + pub fn previous(&self) -> &'a Schema { + self.previous + } + + pub fn next(&self) -> &'a Schema { + self.next + } +} diff --git a/crates/toasty-core/src/schema/db/index.rs b/crates/toasty-core/src/schema/db/index.rs index 6c0d45f54..231abeab1 100644 --- a/crates/toasty-core/src/schema/db/index.rs +++ b/crates/toasty-core/src/schema/db/index.rs @@ -1,9 +1,14 @@ -use super::{Column, ColumnId, Schema, TableId}; +use super::{Column, ColumnId, DiffContext, Schema, TableId}; use crate::stmt; -use std::fmt; +use std::{ + collections::{HashMap, HashSet}, + fmt, + ops::Deref, +}; -#[derive(Debug)] +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Index { /// Uniquely identifies the index within the schema pub id: IndexId, @@ -24,13 +29,15 @@ pub struct Index { pub primary_key: bool, } -#[derive(Copy, Clone, Eq, PartialEq)] +#[derive(Copy, Clone, Eq, PartialEq, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct IndexId { pub table: TableId, pub index: usize, } -#[derive(Debug)] +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct IndexColumn { /// The column being indexed pub column: ColumnId, @@ -42,13 +49,15 @@ pub struct IndexColumn { pub scope: IndexScope, } -#[derive(Debug, Copy, Clone)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum IndexOp { Eq, Sort(stmt::Direction), } -#[derive(Debug, Copy, Clone, Eq, PartialEq)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum IndexScope { /// The index column is used to partition rows across nodes of a distributed database. Partition, @@ -87,3 +96,577 @@ impl fmt::Debug for IndexId { write!(fmt, "IndexId({}/{})", self.table.0, self.index) } } + +pub struct IndicesDiff<'a> { + items: Vec>, +} + +impl<'a> IndicesDiff<'a> { + pub fn from(cx: &DiffContext<'a>, previous: &'a [Index], next: &'a [Index]) -> Self { + fn has_diff(cx: &DiffContext<'_>, previous: &Index, next: &Index) -> bool { + // Check basic properties + if previous.name != next.name + || previous.columns.len() != next.columns.len() + || previous.unique != next.unique + || previous.primary_key != next.primary_key + { + return true; + } + + // Check if index columns have changed + for (previous_col, next_col) in previous.columns.iter().zip(next.columns.iter()) { + // Check if op or scope changed + if previous_col.op != next_col.op || previous_col.scope != next_col.scope { + return true; + } + + // Check if the column changed (accounting for renames) + let columns_match = + if let Some(renamed_to) = cx.rename_hints().get_column(previous_col.column) { + // Column was renamed - check if it matches the target column + renamed_to == next_col.column + } else { + // No rename hint - check if columns match by name + let previous_column = cx.previous().column(previous_col.column); + let next_column = cx.next().column(next_col.column); + previous_column.name == next_column.name + }; + + if !columns_match { + return true; + } + } + + false + } + + let mut items = vec![]; + let mut create_ids: HashSet<_> = next.iter().map(|to| to.id).collect(); + + let next_map = + HashMap::<&str, &'a Index>::from_iter(next.iter().map(|to| (to.name.as_str(), to))); + + for previous in previous { + let next = if let Some(next_id) = cx.rename_hints().get_index(previous.id) { + cx.next().index(next_id) + } else if let Some(next) = next_map.get(previous.name.as_str()) { + next + } else { + items.push(IndicesDiffItem::DropIndex(previous)); + continue; + }; + + create_ids.remove(&next.id); + + if has_diff(cx, previous, next) { + items.push(IndicesDiffItem::AlterIndex { previous, next }); + } + } + + for index_id in create_ids { + items.push(IndicesDiffItem::CreateIndex(cx.next().index(index_id))); + } + + Self { items } + } + + pub const fn is_empty(&self) -> bool { + self.items.is_empty() + } +} + +impl<'a> Deref for IndicesDiff<'a> { + type Target = Vec>; + + fn deref(&self) -> &Self::Target { + &self.items + } +} + +pub enum IndicesDiffItem<'a> { + CreateIndex(&'a Index), + DropIndex(&'a Index), + AlterIndex { + previous: &'a Index, + next: &'a Index, + }, +} + +#[cfg(test)] +mod tests { + use crate::schema::db::{ + Column, ColumnId, DiffContext, Index, IndexColumn, IndexId, IndexOp, IndexScope, + IndicesDiff, IndicesDiffItem, PrimaryKey, RenameHints, Schema, Table, TableId, Type, + }; + use crate::stmt; + + fn make_column(table_id: usize, index: usize, name: &str) -> Column { + Column { + id: ColumnId { + table: TableId(table_id), + index, + }, + name: name.to_string(), + ty: stmt::Type::String, + storage_ty: Type::Text, + nullable: false, + primary_key: false, + auto_increment: false, + } + } + + fn make_index( + table_id: usize, + index: usize, + name: &str, + columns: Vec<(usize, IndexOp, IndexScope)>, + unique: bool, + ) -> Index { + Index { + id: IndexId { + table: TableId(table_id), + index, + }, + name: name.to_string(), + on: TableId(table_id), + columns: columns + .into_iter() + .map(|(col_idx, op, scope)| IndexColumn { + column: ColumnId { + table: TableId(table_id), + index: col_idx, + }, + op, + scope, + }) + .collect(), + unique, + primary_key: false, + } + } + + fn make_schema_with_indices( + table_id: usize, + columns: Vec, + indices: Vec, + ) -> Schema { + let mut schema = Schema::default(); + schema.tables.push(Table { + id: TableId(table_id), + name: "test_table".to_string(), + columns, + primary_key: PrimaryKey { + columns: vec![], + index: IndexId { + table: TableId(table_id), + index: 0, + }, + }, + indices, + }); + schema + } + + #[test] + fn test_no_diff_same_indices() { + let columns = vec![make_column(0, 0, "id"), make_column(0, 1, "name")]; + + let from_indices = vec![make_index( + 0, + 0, + "idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + false, + )]; + let to_indices = vec![make_index( + 0, + 0, + "idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + false, + )]; + + let from_schema = make_schema_with_indices(0, columns.clone(), from_indices.clone()); + let to_schema = make_schema_with_indices(0, columns, to_indices.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = IndicesDiff::from(&cx, &from_indices, &to_indices); + assert!(diff.is_empty()); + } + + #[test] + fn test_create_index() { + let columns = vec![make_column(0, 0, "id"), make_column(0, 1, "name")]; + + let from_indices = vec![]; + let to_indices = vec![make_index( + 0, + 0, + "idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + false, + )]; + + let from_schema = make_schema_with_indices(0, columns.clone(), from_indices.clone()); + let to_schema = make_schema_with_indices(0, columns, to_indices.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = IndicesDiff::from(&cx, &from_indices, &to_indices); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], IndicesDiffItem::CreateIndex(_))); + if let IndicesDiffItem::CreateIndex(idx) = diff.items[0] { + assert_eq!(idx.name, "idx_name"); + } + } + + #[test] + fn test_drop_index() { + let columns = vec![make_column(0, 0, "id"), make_column(0, 1, "name")]; + + let from_indices = vec![make_index( + 0, + 0, + "idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + false, + )]; + let to_indices = vec![]; + + let from_schema = make_schema_with_indices(0, columns.clone(), from_indices.clone()); + let to_schema = make_schema_with_indices(0, columns, to_indices.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = IndicesDiff::from(&cx, &from_indices, &to_indices); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], IndicesDiffItem::DropIndex(_))); + if let IndicesDiffItem::DropIndex(idx) = diff.items[0] { + assert_eq!(idx.name, "idx_name"); + } + } + + #[test] + fn test_alter_index_unique() { + let columns = vec![make_column(0, 0, "id"), make_column(0, 1, "name")]; + + let from_indices = vec![make_index( + 0, + 0, + "idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + false, + )]; + let to_indices = vec![make_index( + 0, + 0, + "idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + true, // changed to unique + )]; + + let from_schema = make_schema_with_indices(0, columns.clone(), from_indices.clone()); + let to_schema = make_schema_with_indices(0, columns, to_indices.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = IndicesDiff::from(&cx, &from_indices, &to_indices); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], IndicesDiffItem::AlterIndex { .. })); + } + + #[test] + fn test_alter_index_columns() { + let columns = vec![ + make_column(0, 0, "id"), + make_column(0, 1, "name"), + make_column(0, 2, "email"), + ]; + + let from_indices = vec![make_index( + 0, + 0, + "idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + false, + )]; + let to_indices = vec![make_index( + 0, + 0, + "idx_name", + vec![ + (1, IndexOp::Eq, IndexScope::Local), + (2, IndexOp::Eq, IndexScope::Local), + ], + false, + )]; + + let from_schema = make_schema_with_indices(0, columns.clone(), from_indices.clone()); + let to_schema = make_schema_with_indices(0, columns, to_indices.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = IndicesDiff::from(&cx, &from_indices, &to_indices); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], IndicesDiffItem::AlterIndex { .. })); + } + + #[test] + fn test_alter_index_op() { + let columns = vec![make_column(0, 0, "id"), make_column(0, 1, "name")]; + + let from_indices = vec![make_index( + 0, + 0, + "idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + false, + )]; + let to_indices = vec![make_index( + 0, + 0, + "idx_name", + vec![(1, IndexOp::Sort(stmt::Direction::Asc), IndexScope::Local)], + false, + )]; + + let from_schema = make_schema_with_indices(0, columns.clone(), from_indices.clone()); + let to_schema = make_schema_with_indices(0, columns, to_indices.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = IndicesDiff::from(&cx, &from_indices, &to_indices); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], IndicesDiffItem::AlterIndex { .. })); + } + + #[test] + fn test_alter_index_scope() { + let columns = vec![make_column(0, 0, "id"), make_column(0, 1, "name")]; + + let from_indices = vec![make_index( + 0, + 0, + "idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + false, + )]; + let to_indices = vec![make_index( + 0, + 0, + "idx_name", + vec![(1, IndexOp::Eq, IndexScope::Partition)], + false, + )]; + + let from_schema = make_schema_with_indices(0, columns.clone(), from_indices.clone()); + let to_schema = make_schema_with_indices(0, columns, to_indices.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = IndicesDiff::from(&cx, &from_indices, &to_indices); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], IndicesDiffItem::AlterIndex { .. })); + } + + #[test] + fn test_rename_index_with_hint() { + let columns = vec![make_column(0, 0, "id"), make_column(0, 1, "name")]; + + let from_indices = vec![make_index( + 0, + 0, + "old_idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + false, + )]; + let to_indices = vec![make_index( + 0, + 0, + "new_idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + false, + )]; + + let from_schema = make_schema_with_indices(0, columns.clone(), from_indices.clone()); + let to_schema = make_schema_with_indices(0, columns, to_indices.clone()); + + let mut hints = RenameHints::new(); + hints.add_index_hint( + IndexId { + table: TableId(0), + index: 0, + }, + IndexId { + table: TableId(0), + index: 0, + }, + ); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = IndicesDiff::from(&cx, &from_indices, &to_indices); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], IndicesDiffItem::AlterIndex { .. })); + if let IndicesDiffItem::AlterIndex { previous, next } = diff.items[0] { + assert_eq!(previous.name, "old_idx_name"); + assert_eq!(next.name, "new_idx_name"); + } + } + + #[test] + fn test_rename_index_without_hint_is_drop_and_create() { + let columns = vec![make_column(0, 0, "id"), make_column(0, 1, "name")]; + + let from_indices = vec![make_index( + 0, + 0, + "old_idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + false, + )]; + let to_indices = vec![make_index( + 0, + 0, + "new_idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + false, + )]; + + let from_schema = make_schema_with_indices(0, columns.clone(), from_indices.clone()); + let to_schema = make_schema_with_indices(0, columns, to_indices.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = IndicesDiff::from(&cx, &from_indices, &to_indices); + assert_eq!(diff.items.len(), 2); + + let has_drop = diff + .items + .iter() + .any(|item| matches!(item, IndicesDiffItem::DropIndex(_))); + let has_create = diff + .items + .iter() + .any(|item| matches!(item, IndicesDiffItem::CreateIndex(_))); + assert!(has_drop); + assert!(has_create); + } + + #[test] + fn test_index_with_renamed_column() { + let from_columns = vec![make_column(0, 0, "id"), make_column(0, 1, "old_name")]; + let to_columns = vec![make_column(0, 0, "id"), make_column(0, 1, "new_name")]; + + let from_indices = vec![make_index( + 0, + 0, + "idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + false, + )]; + let to_indices = vec![make_index( + 0, + 0, + "idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + false, + )]; + + let from_schema = make_schema_with_indices(0, from_columns, from_indices.clone()); + let to_schema = make_schema_with_indices(0, to_columns, to_indices.clone()); + + let mut hints = RenameHints::new(); + hints.add_column_hint( + ColumnId { + table: TableId(0), + index: 1, + }, + ColumnId { + table: TableId(0), + index: 1, + }, + ); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = IndicesDiff::from(&cx, &from_indices, &to_indices); + // Index should remain unchanged when column is renamed with hint + assert!(diff.is_empty()); + } + + #[test] + fn test_multiple_operations() { + let columns = vec![ + make_column(0, 0, "id"), + make_column(0, 1, "name"), + make_column(0, 2, "email"), + ]; + + let from_indices = vec![ + make_index( + 0, + 0, + "idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + false, + ), + make_index( + 0, + 1, + "old_idx", + vec![(2, IndexOp::Eq, IndexScope::Local)], + false, + ), + make_index( + 0, + 2, + "idx_to_drop", + vec![(0, IndexOp::Eq, IndexScope::Local)], + false, + ), + ]; + let to_indices = vec![ + make_index( + 0, + 0, + "idx_name", + vec![(1, IndexOp::Eq, IndexScope::Local)], + true, // changed to unique + ), + make_index( + 0, + 1, + "new_idx", + vec![(2, IndexOp::Eq, IndexScope::Local)], + false, + ), + make_index( + 0, + 2, + "idx_added", + vec![(1, IndexOp::Sort(stmt::Direction::Asc), IndexScope::Local)], + false, + ), + ]; + + let from_schema = make_schema_with_indices(0, columns.clone(), from_indices.clone()); + let to_schema = make_schema_with_indices(0, columns, to_indices.clone()); + + let mut hints = RenameHints::new(); + hints.add_index_hint( + IndexId { + table: TableId(0), + index: 1, + }, + IndexId { + table: TableId(0), + index: 1, + }, + ); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = IndicesDiff::from(&cx, &from_indices, &to_indices); + // Should have: 1 alter (idx_name unique changed), 1 alter (renamed), 1 drop (idx_to_drop), 1 create (idx_added) + assert_eq!(diff.items.len(), 4); + } +} diff --git a/crates/toasty-core/src/schema/db/migration.rs b/crates/toasty-core/src/schema/db/migration.rs new file mode 100644 index 000000000..cb15b0f19 --- /dev/null +++ b/crates/toasty-core/src/schema/db/migration.rs @@ -0,0 +1,44 @@ +/// Database migration generate from a [`super::SchemaDiff`] by a driver. +pub enum Migration { + Sql(String), +} + +impl Migration { + /// Create a new SQL migration from a single SQL string. + pub fn new_sql(sql: String) -> Self { + Migration::Sql(sql) + } + + /// Create a new SQL migration from multiple SQL statements. + /// Statements are joined with breakpoint markers. + pub fn new_sql_with_breakpoints>(statements: &[S]) -> Self { + let sql = statements + .iter() + .map(|s| s.as_ref()) + .collect::>() + .join("\n-- #[toasty::breakpoint]\n"); + Migration::Sql(sql) + } + + /// Get individual SQL statements by splitting on breakpoint markers. + pub fn statements(&self) -> Vec<&str> { + match self { + Migration::Sql(sql) => sql.split("\n-- #[toasty::breakpoint]\n").collect(), + } + } +} + +/// Metadata about a migration that has already been applied to a database. +pub struct AppliedMigration { + id: u64, +} + +impl AppliedMigration { + pub fn new(id: u64) -> Self { + Self { id } + } + + pub fn id(&self) -> u64 { + self.id + } +} diff --git a/crates/toasty-core/src/schema/db/pk.rs b/crates/toasty-core/src/schema/db/pk.rs index c760bab92..87e29805e 100644 --- a/crates/toasty-core/src/schema/db/pk.rs +++ b/crates/toasty-core/src/schema/db/pk.rs @@ -1,6 +1,7 @@ use super::{ColumnId, IndexId}; -#[derive(Debug, PartialEq)] +#[derive(Debug, Clone, PartialEq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct PrimaryKey { /// Fields composing the primary key pub columns: Vec, diff --git a/crates/toasty-core/src/schema/db/schema.rs b/crates/toasty-core/src/schema/db/schema.rs index 8acf10892..3bda4ecc9 100644 --- a/crates/toasty-core/src/schema/db/schema.rs +++ b/crates/toasty-core/src/schema/db/schema.rs @@ -1,6 +1,9 @@ -use super::{Column, ColumnId, Index, IndexId, Table, TableId}; +use super::{ + Column, ColumnId, DiffContext, Index, IndexId, RenameHints, Table, TableId, TablesDiff, +}; -#[derive(Debug, Default)] +#[derive(Debug, Default, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Schema { pub tables: Vec, } @@ -14,6 +17,14 @@ impl Schema { .expect("invalid column ID") } + pub fn column_mut(&mut self, id: impl Into) -> &mut Column { + let id = id.into(); + self.table_mut(id.table) + .columns + .get_mut(id.index) + .expect("invalid column ID") + } + // NOTE: this is unlikely to confuse users given the context. #[allow(clippy::should_implement_trait)] pub fn index(&self, id: IndexId) -> &Index { @@ -23,7 +34,53 @@ impl Schema { .expect("invalid index ID") } + // NOTE: this is unlikely to confuse users given the context. + #[allow(clippy::should_implement_trait)] + pub fn index_mut(&mut self, id: IndexId) -> &mut Index { + self.table_mut(id.table) + .indices + .get_mut(id.index) + .expect("invalid index ID") + } + pub fn table(&self, id: impl Into) -> &Table { self.tables.get(id.into().0).expect("invalid table ID") } + + pub fn table_mut(&mut self, id: impl Into) -> &mut Table { + self.tables.get_mut(id.into().0).expect("invalid table ID") + } +} + +pub struct SchemaDiff<'a> { + previous: &'a Schema, + next: &'a Schema, + tables: TablesDiff<'a>, +} + +impl<'a> SchemaDiff<'a> { + pub fn from(from: &'a Schema, to: &'a Schema, rename_hints: &'a RenameHints) -> Self { + let cx = &DiffContext::new(from, to, rename_hints); + Self { + previous: from, + next: to, + tables: TablesDiff::from(cx, &from.tables, &to.tables), + } + } + + pub fn tables(&self) -> &TablesDiff<'a> { + &self.tables + } + + pub fn is_empty(&self) -> bool { + self.tables.is_empty() + } + + pub fn previous(&self) -> &'a Schema { + self.previous + } + + pub fn next(&self) -> &'a Schema { + self.next + } } diff --git a/crates/toasty-core/src/schema/db/table.rs b/crates/toasty-core/src/schema/db/table.rs index 2be087ef2..8329e139c 100644 --- a/crates/toasty-core/src/schema/db/table.rs +++ b/crates/toasty-core/src/schema/db/table.rs @@ -1,10 +1,18 @@ use super::{Column, ColumnId, Index, IndexId, PrimaryKey}; -use crate::stmt; +use crate::{ + schema::db::{column::ColumnsDiff, diff::DiffContext, index::IndicesDiff}, + stmt, +}; -use std::fmt; +use std::{ + collections::{HashMap, HashSet}, + fmt, + ops::Deref, +}; /// A database table -#[derive(Debug)] +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Table { /// Uniquely identifies a table pub id: TableId, @@ -22,6 +30,7 @@ pub struct Table { /// Uniquely identifies a table #[derive(PartialEq, Eq, Clone, Copy, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct TableId(pub usize); impl Table { @@ -78,3 +87,248 @@ impl fmt::Debug for TableId { write!(fmt, "TableId({})", self.0) } } + +pub struct TablesDiff<'a> { + items: Vec>, +} + +impl<'a> TablesDiff<'a> { + pub fn from(cx: &DiffContext<'a>, previous: &'a [Table], next: &'a [Table]) -> Self { + let mut items = vec![]; + let mut create_ids: HashSet<_> = next.iter().map(|next| next.id).collect(); + + let next_map = HashMap::<&str, &'a Table>::from_iter( + next.iter().map(|next| (next.name.as_str(), next)), + ); + + for previous in previous { + let next = if let Some(next_id) = cx.rename_hints().get_table(previous.id) { + cx.next().table(next_id) + } else if let Some(to) = next_map.get(previous.name.as_str()) { + to + } else { + items.push(TablesDiffItem::DropTable(previous)); + continue; + }; + + create_ids.remove(&next.id); + + let columns = ColumnsDiff::from(cx, &previous.columns, &next.columns); + let indices = IndicesDiff::from(cx, &previous.indices, &next.indices); + if previous.name != next.name || !columns.is_empty() || !indices.is_empty() { + items.push(TablesDiffItem::AlterTable { + previous, + next, + columns, + indices, + }); + } + } + + for table_id in create_ids { + items.push(TablesDiffItem::CreateTable(cx.next().table(table_id))); + } + + Self { items } + } +} + +impl<'a> Deref for TablesDiff<'a> { + type Target = Vec>; + + fn deref(&self) -> &Self::Target { + &self.items + } +} + +pub enum TablesDiffItem<'a> { + CreateTable(&'a Table), + DropTable(&'a Table), + AlterTable { + previous: &'a Table, + next: &'a Table, + columns: ColumnsDiff<'a>, + indices: IndicesDiff<'a>, + }, +} + +#[cfg(test)] +mod tests { + use crate::schema::db::{ + Column, ColumnId, DiffContext, IndexId, PrimaryKey, RenameHints, Schema, Table, TableId, + TablesDiff, TablesDiffItem, Type, + }; + use crate::stmt; + + fn make_table(id: usize, name: &str, num_columns: usize) -> Table { + let mut columns = vec![]; + for i in 0..num_columns { + columns.push(Column { + id: ColumnId { + table: TableId(id), + index: i, + }, + name: format!("col{}", i), + ty: stmt::Type::String, + storage_ty: Type::Text, + nullable: false, + primary_key: false, + auto_increment: false, + }); + } + + Table { + id: TableId(id), + name: name.to_string(), + columns, + primary_key: PrimaryKey { + columns: vec![], + index: IndexId { + table: TableId(id), + index: 0, + }, + }, + indices: vec![], + } + } + + fn make_schema(tables: Vec
) -> Schema { + Schema { tables } + } + + #[test] + fn test_no_diff_same_tables() { + let from_tables = vec![make_table(0, "users", 2), make_table(1, "posts", 3)]; + let to_tables = vec![make_table(0, "users", 2), make_table(1, "posts", 3)]; + + let from_schema = make_schema(from_tables.clone()); + let to_schema = make_schema(to_tables.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = TablesDiff::from(&cx, &from_tables, &to_tables); + assert_eq!(diff.items.len(), 0); + } + + #[test] + fn test_create_table() { + let from_tables = vec![make_table(0, "users", 2)]; + let to_tables = vec![make_table(0, "users", 2), make_table(1, "posts", 3)]; + + let from_schema = make_schema(from_tables.clone()); + let to_schema = make_schema(to_tables.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = TablesDiff::from(&cx, &from_tables, &to_tables); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], TablesDiffItem::CreateTable(_))); + if let TablesDiffItem::CreateTable(table) = diff.items[0] { + assert_eq!(table.name, "posts"); + } + } + + #[test] + fn test_drop_table() { + let from_tables = vec![make_table(0, "users", 2), make_table(1, "posts", 3)]; + let to_tables = vec![make_table(0, "users", 2)]; + + let from_schema = make_schema(from_tables.clone()); + let to_schema = make_schema(to_tables.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = TablesDiff::from(&cx, &from_tables, &to_tables); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], TablesDiffItem::DropTable(_))); + if let TablesDiffItem::DropTable(table) = diff.items[0] { + assert_eq!(table.name, "posts"); + } + } + + #[test] + fn test_rename_table_with_hint() { + let from_tables = vec![make_table(0, "old_users", 2)]; + let to_tables = vec![make_table(0, "new_users", 2)]; + + let from_schema = make_schema(from_tables.clone()); + let to_schema = make_schema(to_tables.clone()); + + let mut hints = RenameHints::new(); + hints.add_table_hint(TableId(0), TableId(0)); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = TablesDiff::from(&cx, &from_tables, &to_tables); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], TablesDiffItem::AlterTable { .. })); + if let TablesDiffItem::AlterTable { previous, next, .. } = &diff.items[0] { + assert_eq!(previous.name, "old_users"); + assert_eq!(next.name, "new_users"); + } + } + + #[test] + fn test_rename_table_without_hint_is_drop_and_create() { + let from_tables = vec![make_table(0, "old_users", 2)]; + let to_tables = vec![make_table(0, "new_users", 2)]; + + let from_schema = make_schema(from_tables.clone()); + let to_schema = make_schema(to_tables.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = TablesDiff::from(&cx, &from_tables, &to_tables); + assert_eq!(diff.items.len(), 2); + + let has_drop = diff + .items + .iter() + .any(|item| matches!(item, TablesDiffItem::DropTable(_))); + let has_create = diff + .items + .iter() + .any(|item| matches!(item, TablesDiffItem::CreateTable(_))); + assert!(has_drop); + assert!(has_create); + } + + #[test] + fn test_alter_table_column_change() { + let from_tables = vec![make_table(0, "users", 2)]; + let to_tables = vec![make_table(0, "users", 3)]; // added a column + + let from_schema = make_schema(from_tables.clone()); + let to_schema = make_schema(to_tables.clone()); + let hints = RenameHints::new(); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = TablesDiff::from(&cx, &from_tables, &to_tables); + assert_eq!(diff.items.len(), 1); + assert!(matches!(diff.items[0], TablesDiffItem::AlterTable { .. })); + } + + #[test] + fn test_multiple_operations() { + let from_tables = vec![ + make_table(0, "users", 2), + make_table(1, "posts", 3), + make_table(2, "old_table", 1), + ]; + let to_tables = vec![ + make_table(0, "users", 3), // added column + make_table(1, "new_posts", 3), // renamed + make_table(2, "comments", 2), // new table (reused ID 2) + ]; + + let from_schema = make_schema(from_tables.clone()); + let to_schema = make_schema(to_tables.clone()); + + let mut hints = RenameHints::new(); + hints.add_table_hint(TableId(1), TableId(1)); + let cx = DiffContext::new(&from_schema, &to_schema, &hints); + + let diff = TablesDiff::from(&cx, &from_tables, &to_tables); + // Should have: 1 alter (users added column), 1 alter (posts renamed), 1 drop (old_table), 1 create (comments) + assert_eq!(diff.items.len(), 4); + } +} diff --git a/crates/toasty-core/src/schema/db/ty.rs b/crates/toasty-core/src/schema/db/ty.rs index e26dd3351..7089092ae 100644 --- a/crates/toasty-core/src/schema/db/ty.rs +++ b/crates/toasty-core/src/schema/db/ty.rs @@ -59,6 +59,7 @@ use crate::{driver, stmt, Result}; /// - [`Type::from_app`] - Mapping logic from statement types to database types /// - [`Column`](crate::schema::db::Column) - Schema representation with both type systems #[derive(Debug, Clone, PartialEq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum Type { /// A boolean value Boolean, diff --git a/crates/toasty-core/src/stmt/direction.rs b/crates/toasty-core/src/stmt/direction.rs index e3c8acd82..d1d7b4ce6 100644 --- a/crates/toasty-core/src/stmt/direction.rs +++ b/crates/toasty-core/src/stmt/direction.rs @@ -1,4 +1,5 @@ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum Direction { Asc, Desc, diff --git a/crates/toasty-core/src/stmt/path_field_set.rs b/crates/toasty-core/src/stmt/path_field_set.rs index a905d1caf..6b4e10b59 100644 --- a/crates/toasty-core/src/stmt/path_field_set.rs +++ b/crates/toasty-core/src/stmt/path_field_set.rs @@ -1,6 +1,7 @@ use bit_set::BitSet; #[derive(Debug, Clone, Default, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct PathFieldSet { container: BitSet, } diff --git a/crates/toasty-core/src/stmt/ty.rs b/crates/toasty-core/src/stmt/ty.rs index 82c43c925..0445f07f5 100644 --- a/crates/toasty-core/src/stmt/ty.rs +++ b/crates/toasty-core/src/stmt/ty.rs @@ -66,6 +66,7 @@ use crate::{ /// - [`stmt::Value`] - Values typed by this system /// - [`stmt::Expr`] - Expressions typed by this system #[derive(Debug, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum Type { /// Boolean value Bool, diff --git a/crates/toasty-core/src/stmt/ty_enum.rs b/crates/toasty-core/src/stmt/ty_enum.rs index 9c7b53420..a5ad60472 100644 --- a/crates/toasty-core/src/stmt/ty_enum.rs +++ b/crates/toasty-core/src/stmt/ty_enum.rs @@ -1,11 +1,13 @@ use super::Type; #[derive(Debug, Default, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct TypeEnum { pub variants: Vec, } #[derive(Debug, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct EnumVariant { /// Enum discriminant pub discriminant: usize, diff --git a/crates/toasty-driver-dynamodb/src/lib.rs b/crates/toasty-driver-dynamodb/src/lib.rs index 596143b9d..17e2432b1 100644 --- a/crates/toasty-driver-dynamodb/src/lib.rs +++ b/crates/toasty-driver-dynamodb/src/lib.rs @@ -8,7 +8,7 @@ pub(crate) use value::Value; use toasty_core::{ async_trait, driver::{operation::Operation, Capability, Driver, Response}, - schema::db::{Column, ColumnId, Schema, Table}, + schema::db::{Column, ColumnId, Migration, Schema, SchemaDiff, Table}, stmt::{self, ExprContext}, Result, }; @@ -42,6 +42,10 @@ impl Driver for DynamoDb { async fn connect(&self) -> toasty_core::Result> { Ok(Box::new(Connection::connect(&self.url).await?)) } + + fn generate_migration(&self, _schema_diff: &SchemaDiff<'_>) -> Migration { + unimplemented!("DynamoDB migrations are not yet supported. DynamoDB schema changes require manual table updates through the AWS console or SDK.") + } } #[derive(Debug)] @@ -122,6 +126,21 @@ impl toasty_core::driver::Connection for Connection { Ok(()) } + + async fn applied_migrations( + &mut self, + ) -> Result> { + todo!("DynamoDB migrations are not yet implemented") + } + + async fn apply_migration( + &mut self, + _id: u64, + _name: String, + _migration: &toasty_core::schema::db::Migration, + ) -> Result<()> { + todo!("DynamoDB migrations are not yet implemented") + } } impl Connection { diff --git a/crates/toasty-driver-integration-suite/src/logging_driver.rs b/crates/toasty-driver-integration-suite/src/logging_driver.rs index 79d3bb001..59a6c65c7 100644 --- a/crates/toasty-driver-integration-suite/src/logging_driver.rs +++ b/crates/toasty-driver-integration-suite/src/logging_driver.rs @@ -3,7 +3,7 @@ use toasty::driver::Driver; use toasty_core::{ async_trait, driver::{Capability, Connection, Operation, Response, Rows}, - schema::db::Schema, + schema::db::{AppliedMigration, Migration, Schema, SchemaDiff}, Result, }; @@ -38,6 +38,10 @@ impl Driver for LoggingDriver { ops_log: self.ops_log_handle(), })) } + + fn generate_migration(&self, schema_diff: &SchemaDiff<'_>) -> Migration { + self.inner.generate_migration(schema_diff) + } } #[derive(Debug)] @@ -90,6 +94,19 @@ impl Connection for LoggingConnection { async fn reset_db(&mut self, schema: &Schema) -> Result<()> { self.inner.reset_db(schema).await } + + async fn applied_migrations(&mut self) -> Result> { + self.inner.applied_migrations().await + } + + async fn apply_migration( + &mut self, + id: u64, + name: String, + migration: &Migration, + ) -> Result<()> { + self.inner.apply_migration(id, name, migration).await + } } /// Duplicate a Response, using ValueStream::dup() for value streams diff --git a/crates/toasty-driver-mysql/src/lib.rs b/crates/toasty-driver-mysql/src/lib.rs index 95cb2bfc9..d5479bd6e 100644 --- a/crates/toasty-driver-mysql/src/lib.rs +++ b/crates/toasty-driver-mysql/src/lib.rs @@ -11,11 +11,11 @@ use std::sync::Arc; use toasty_core::{ async_trait, driver::{operation::Transaction, Capability, Driver, Operation, Response}, - schema::db::{Schema, Table}, + schema::db::{Migration, Schema, SchemaDiff, Table}, stmt::{self, ValueRecord}, Result, }; -use toasty_sql as sql; +use toasty_sql::{self as sql, TypedValue}; use url::Url; #[derive(Debug)] @@ -74,6 +74,26 @@ impl Driver for MySQL { .map_err(toasty_core::Error::driver_operation_failed)?; Ok(Box::new(Connection::new(conn))) } + + fn generate_migration(&self, schema_diff: &SchemaDiff<'_>) -> Migration { + let statements = sql::MigrationStatement::from_diff(schema_diff, &Capability::MYSQL); + + let sql_strings: Vec = statements + .iter() + .map(|stmt| { + let mut params = Vec::::new(); + let sql = + sql::Serializer::mysql(stmt.schema()).serialize(stmt.statement(), &mut params); + assert!( + params.is_empty(), + "migration statements should not have parameters" + ); + sql + }) + .collect(); + + Migration::new_sql_with_breakpoints(&sql_strings) + } } #[derive(Debug)] @@ -306,4 +326,98 @@ impl toasty_core::driver::Connection for Connection { Ok(()) } + + async fn applied_migrations( + &mut self, + ) -> Result> { + // Ensure the migrations table exists + self.conn + .exec_drop( + "CREATE TABLE IF NOT EXISTS __toasty_migrations ( + id BIGINT UNSIGNED PRIMARY KEY, + name TEXT NOT NULL, + applied_at TIMESTAMP NOT NULL + )", + (), + ) + .await + .map_err(toasty_core::Error::driver_operation_failed)?; + + // Query all applied migrations + let rows: Vec = self + .conn + .exec("SELECT id FROM __toasty_migrations ORDER BY applied_at", ()) + .await + .map_err(toasty_core::Error::driver_operation_failed)?; + + Ok(rows + .into_iter() + .map(toasty_core::schema::db::AppliedMigration::new) + .collect()) + } + + async fn apply_migration( + &mut self, + id: u64, + name: String, + migration: &toasty_core::schema::db::Migration, + ) -> Result<()> { + // Ensure the migrations table exists + self.conn + .exec_drop( + "CREATE TABLE IF NOT EXISTS __toasty_migrations ( + id BIGINT UNSIGNED PRIMARY KEY, + name TEXT NOT NULL, + applied_at TIMESTAMP NOT NULL + )", + (), + ) + .await + .map_err(toasty_core::Error::driver_operation_failed)?; + + // Start transaction + let mut transaction = self + .conn + .start_transaction(Default::default()) + .await + .map_err(toasty_core::Error::driver_operation_failed)?; + + // Execute each migration statement + for statement in migration.statements() { + if let Err(e) = transaction + .query_drop(statement) + .await + .map_err(toasty_core::Error::driver_operation_failed) + { + transaction + .rollback() + .await + .map_err(toasty_core::Error::driver_operation_failed)?; + return Err(e); + } + } + + // Record the migration + if let Err(e) = transaction + .exec_drop( + "INSERT INTO __toasty_migrations (id, name, applied_at) VALUES (?, ?, NOW())", + (id, name), + ) + .await + .map_err(toasty_core::Error::driver_operation_failed) + { + transaction + .rollback() + .await + .map_err(toasty_core::Error::driver_operation_failed)?; + return Err(e); + } + + // Commit transaction + transaction + .commit() + .await + .map_err(toasty_core::Error::driver_operation_failed)?; + Ok(()) + } } diff --git a/crates/toasty-driver-postgresql/src/lib.rs b/crates/toasty-driver-postgresql/src/lib.rs index f171b822c..cf0ff30dd 100644 --- a/crates/toasty-driver-postgresql/src/lib.rs +++ b/crates/toasty-driver-postgresql/src/lib.rs @@ -9,12 +9,12 @@ use std::sync::Arc; use toasty_core::{ async_trait, driver::{Capability, Driver, Operation, Response}, - schema::db::{Schema, Table}, + schema::db::{Migration, Schema, SchemaDiff, Table}, stmt, stmt::ValueRecord, Result, }; -use toasty_sql as sql; +use toasty_sql::{self as sql, TypedValue}; use tokio_postgres::{Client, Config}; use url::Url; @@ -84,6 +84,26 @@ impl Driver for PostgreSQL { Connection::connect(self.config.clone(), tokio_postgres::NoTls).await?, )) } + + fn generate_migration(&self, schema_diff: &SchemaDiff<'_>) -> Migration { + let statements = sql::MigrationStatement::from_diff(schema_diff, &Capability::POSTGRESQL); + + let sql_strings: Vec = statements + .iter() + .map(|stmt| { + let mut params = Vec::::new(); + let sql = sql::Serializer::postgresql(stmt.schema()) + .serialize(stmt.statement(), &mut params); + assert!( + params.is_empty(), + "migration statements should not have parameters" + ); + sql + }) + .collect(); + + Migration::new_sql(sql_strings.join("\n")) + } } #[derive(Debug)] @@ -297,4 +317,104 @@ impl toasty_core::driver::Connection for Connection { Ok(()) } + + async fn applied_migrations( + &mut self, + ) -> Result> { + // Ensure the migrations table exists + self.client + .execute( + "CREATE TABLE IF NOT EXISTS __toasty_migrations ( + id BIGINT PRIMARY KEY, + name TEXT NOT NULL, + applied_at TIMESTAMP NOT NULL + )", + &[], + ) + .await + .map_err(toasty_core::Error::driver_operation_failed)?; + + // Query all applied migrations + let rows = self + .client + .query( + "SELECT id FROM __toasty_migrations ORDER BY applied_at", + &[], + ) + .await + .map_err(toasty_core::Error::driver_operation_failed)?; + + Ok(rows + .iter() + .map(|row| { + let id: i64 = row.get(0); + toasty_core::schema::db::AppliedMigration::new(id as u64) + }) + .collect()) + } + + async fn apply_migration( + &mut self, + id: u64, + name: String, + migration: &toasty_core::schema::db::Migration, + ) -> Result<()> { + // Ensure the migrations table exists + self.client + .execute( + "CREATE TABLE IF NOT EXISTS __toasty_migrations ( + id BIGINT PRIMARY KEY, + name TEXT NOT NULL, + applied_at TIMESTAMP NOT NULL + )", + &[], + ) + .await + .map_err(toasty_core::Error::driver_operation_failed)?; + + // Start transaction + let transaction = self + .client + .transaction() + .await + .map_err(toasty_core::Error::driver_operation_failed)?; + + // Execute each migration statement + for statement in migration.statements() { + if let Err(e) = transaction + .execute(statement, &[]) + .await + .map_err(toasty_core::Error::driver_operation_failed) + { + transaction + .rollback() + .await + .map_err(toasty_core::Error::driver_operation_failed)?; + return Err(e); + } + } + + // Record the migration + if let Err(e) = transaction + .execute( + "INSERT INTO __toasty_migrations (id, name, applied_at) VALUES ($1, $2, NOW())", + &[&(id as i64), &name], + ) + .await + .map_err(toasty_core::Error::driver_operation_failed) + { + transaction + .rollback() + .await + .map_err(toasty_core::Error::driver_operation_failed)?; + return Err(e); + } + + // Commit transaction + transaction + .commit() + .await + .map_err(toasty_core::Error::driver_operation_failed)?; + Ok(()) + } } diff --git a/crates/toasty-driver-sqlite/src/lib.rs b/crates/toasty-driver-sqlite/src/lib.rs index 3cc5435d9..f04cd1438 100644 --- a/crates/toasty-driver-sqlite/src/lib.rs +++ b/crates/toasty-driver-sqlite/src/lib.rs @@ -12,10 +12,10 @@ use toasty_core::{ operation::{Operation, Transaction}, Capability, Driver, Response, }, - schema::db::{Schema, Table}, + schema::db::{Migration, Schema, SchemaDiff, Table}, stmt, Result, }; -use toasty_sql as sql; +use toasty_sql::{self as sql, TypedValue}; use url::Url; #[derive(Debug)] @@ -68,6 +68,26 @@ impl Driver for Sqlite { fn max_connections(&self) -> Option { matches!(self, Self::InMemory).then_some(1) } + + fn generate_migration(&self, schema_diff: &SchemaDiff<'_>) -> Migration { + let statements = sql::MigrationStatement::from_diff(schema_diff, &Capability::SQLITE); + + let sql_strings: Vec = statements + .iter() + .map(|stmt| { + let mut params = Vec::::new(); + let sql = + sql::Serializer::sqlite(stmt.schema()).serialize(stmt.statement(), &mut params); + assert!( + params.is_empty(), + "migration statements should not have parameters" + ); + sql + }) + .collect(); + + Migration::new_sql_with_breakpoints(&sql_strings) + } } #[derive(Debug)] @@ -207,6 +227,91 @@ impl toasty_core::driver::Connection for Connection { Ok(()) } + + async fn applied_migrations( + &mut self, + ) -> Result> { + // Ensure the migrations table exists + self.connection + .execute( + "CREATE TABLE IF NOT EXISTS __toasty_migrations ( + id INTEGER PRIMARY KEY, + name TEXT NOT NULL, + applied_at TEXT NOT NULL + )", + [], + ) + .map_err(toasty_core::Error::driver_operation_failed)?; + + // Query all applied migrations + let mut stmt = self + .connection + .prepare("SELECT id FROM __toasty_migrations ORDER BY applied_at") + .map_err(toasty_core::Error::driver_operation_failed)?; + + let rows = stmt + .query_map([], |row| { + let id: u64 = row.get(0)?; + Ok(toasty_core::schema::db::AppliedMigration::new(id)) + }) + .map_err(toasty_core::Error::driver_operation_failed)?; + + rows.collect::>>() + .map_err(toasty_core::Error::driver_operation_failed) + } + + async fn apply_migration( + &mut self, + id: u64, + name: String, + migration: &toasty_core::schema::db::Migration, + ) -> Result<()> { + // Ensure the migrations table exists + self.connection + .execute( + "CREATE TABLE IF NOT EXISTS __toasty_migrations ( + id INTEGER PRIMARY KEY, + name TEXT NOT NULL, + applied_at TEXT NOT NULL + )", + [], + ) + .map_err(toasty_core::Error::driver_operation_failed)?; + + // Start transaction + self.connection + .execute("BEGIN", []) + .map_err(toasty_core::Error::driver_operation_failed)?; + + // Execute each migration statement + for statement in migration.statements() { + if let Err(e) = self + .connection + .execute(statement, []) + .map_err(toasty_core::Error::driver_operation_failed) + { + self.connection + .execute("ROLLBACK", []) + .map_err(toasty_core::Error::driver_operation_failed)?; + return Err(e); + } + } + + // Record the migration + if let Err(e) = self.connection.execute( + "INSERT INTO __toasty_migrations (id, name, applied_at) VALUES (?1, ?2, datetime('now'))", + rusqlite::params![id, name], + ).map_err(toasty_core::Error::driver_operation_failed) { + self.connection.execute("ROLLBACK", []).map_err(toasty_core::Error::driver_operation_failed)?; + return Err(e); + } + + // Commit transaction + self.connection + .execute("COMMIT", []) + .map_err(toasty_core::Error::driver_operation_failed)?; + Ok(()) + } } impl Connection { diff --git a/crates/toasty-sql/src/lib.rs b/crates/toasty-sql/src/lib.rs index 241e6c6e3..443f5fe0d 100644 --- a/crates/toasty-sql/src/lib.rs +++ b/crates/toasty-sql/src/lib.rs @@ -1,3 +1,6 @@ +pub mod migration; +pub use migration::*; + pub mod serializer; pub use serializer::{Params, Serializer, TypedValue}; diff --git a/crates/toasty-sql/src/migration.rs b/crates/toasty-sql/src/migration.rs new file mode 100644 index 000000000..18a4d24c8 --- /dev/null +++ b/crates/toasty-sql/src/migration.rs @@ -0,0 +1,259 @@ +use std::borrow::Cow; + +use toasty_core::{ + driver::Capability, + schema::db::{ + ColumnsDiff, ColumnsDiffItem, IndicesDiffItem, Schema, SchemaDiff, Table, TablesDiffItem, + }, +}; + +use crate::stmt::{AlterColumnChanges, AlterTable, AlterTableAction, DropTable, Name, Statement}; + +pub struct MigrationStatement<'a> { + statement: Statement, + schema: Cow<'a, Schema>, +} + +impl<'a> MigrationStatement<'a> { + fn new(statement: Statement, schema: Cow<'a, Schema>) -> Self { + MigrationStatement { statement, schema } + } + + pub fn from_diff(schema_diff: &'a SchemaDiff<'a>, capability: &Capability) -> Vec { + let mut result = Vec::new(); + for table in schema_diff.tables().iter() { + match table { + TablesDiffItem::CreateTable(table) => { + result.push(Self::new( + Statement::create_table(table, capability), + Cow::Borrowed(schema_diff.next()), + )); + for index in &table.indices { + result.push(Self::new( + Statement::create_index(index), + Cow::Borrowed(schema_diff.next()), + )); + } + } + TablesDiffItem::DropTable(table) => result.push(Self::new( + Statement::drop_table(table), + Cow::Borrowed(schema_diff.previous()), + )), + TablesDiffItem::AlterTable { + previous, + next, + columns, + indices, + .. + } => { + let mut schema = Cow::Borrowed(schema_diff.previous()); + if previous.name != next.name { + result.push(Self::new( + Statement::alter_table_rename_to(previous, &next.name), + schema.clone(), + )); + schema.to_mut().table_mut(previous.id).name = next.name.clone(); + } + + // Check if any column alteration requires table recreation + // (e.g. SQLite can't alter column type/nullability/auto_increment) + let needs_recreation = !capability.schema_mutations.alter_column_type + && columns.iter().any(|item| { + matches!( + item, + ColumnsDiffItem::AlterColumn { + previous: prev_col, + next: next_col + } if AlterColumnChanges::from_diff(prev_col, next_col).has_type_change() + ) + }); + + if needs_recreation { + Self::emit_table_recreation( + &mut result, + &schema, + previous, + next, + columns, + capability, + ); + } else { + Self::emit_column_changes(&mut result, &schema, columns, capability); + } + + // Indices diff + for item in indices.iter() { + match item { + IndicesDiffItem::CreateIndex(index) => { + result.push(Self::new( + Statement::create_index(index), + Cow::Borrowed(schema_diff.next()), + )); + } + IndicesDiffItem::DropIndex(index) => { + result.push(Self::new( + Statement::drop_index(index), + Cow::Borrowed(schema_diff.previous()), + )); + } + IndicesDiffItem::AlterIndex { previous, next } => { + result.push(Self::new( + Statement::drop_index(previous), + Cow::Borrowed(schema_diff.previous()), + )); + result.push(Self::new( + Statement::create_index(next), + Cow::Borrowed(schema_diff.next()), + )); + } + } + } + } + } + } + result + } + + fn emit_table_recreation( + result: &mut Vec, + schema: &Cow<'a, Schema>, + previous: &Table, + next: &Table, + columns: &ColumnsDiff<'_>, + capability: &Capability, + ) { + let current_name = schema.table(previous.id).name.clone(); + let temp_name = format!("_toasty_new_{}", current_name); + + // 1. PRAGMA foreign_keys = OFF + result.push(Self::new( + Statement::pragma_disable_foreign_keys(), + schema.clone(), + )); + + // 2. CREATE TABLE temp with new schema + let temp_schema = { + let mut s = schema.as_ref().clone(); + let t = s.table_mut(next.id); + t.name = temp_name.clone(); + t.columns = next.columns.clone(); + t.primary_key = next.primary_key.clone(); + s + }; + result.push(Self::new( + Statement::create_table(next, capability), + Cow::Owned(temp_schema), + )); + + // 3. INSERT INTO temp SELECT ... FROM current + let column_mappings: Vec<(Name, Name)> = next + .columns + .iter() + .filter(|col| { + // Skip added columns (no source data) + !columns + .iter() + .any(|item| matches!(item, ColumnsDiffItem::AddColumn(c) if c.id == col.id)) + }) + .map(|col| { + let target_name = Name::from(&col.name[..]); + // Check if this column was renamed + let source_name = columns + .iter() + .find_map(|item| match item { + ColumnsDiffItem::AlterColumn { + previous: prev_col, + next: next_col, + } if next_col.id == col.id && prev_col.name != next_col.name => { + Some(Name::from(&prev_col.name[..])) + } + _ => None, + }) + .unwrap_or_else(|| Name::from(&col.name[..])); + (target_name, source_name) + }) + .collect(); + + result.push(Self::new( + Statement::copy_table( + Name::from(current_name.as_str()), + Name::from(temp_name.as_str()), + column_mappings, + ), + schema.clone(), + )); + + // 4. DROP TABLE current + result.push(Self::new( + DropTable { + name: Name::from(current_name.as_str()), + if_exists: false, + } + .into(), + schema.clone(), + )); + + // 5. ALTER TABLE temp RENAME TO current + result.push(Self::new( + AlterTable { + name: Name::from(temp_name.as_str()), + action: AlterTableAction::RenameTo(Name::from(current_name.as_str())), + } + .into(), + schema.clone(), + )); + + // 6. PRAGMA foreign_keys = ON + result.push(Self::new( + Statement::pragma_enable_foreign_keys(), + schema.clone(), + )); + } + + fn emit_column_changes( + result: &mut Vec, + schema: &Cow<'a, Schema>, + columns: &ColumnsDiff<'_>, + capability: &Capability, + ) { + for item in columns.iter() { + match item { + ColumnsDiffItem::AddColumn(column) => { + result.push(Self::new( + Statement::add_column(column, capability), + schema.clone(), + )); + } + ColumnsDiffItem::DropColumn(column) => { + result.push(Self::new(Statement::drop_column(column), schema.clone())); + } + ColumnsDiffItem::AlterColumn { + previous, + next: col_next, + } => { + let changes = AlterColumnChanges::from_diff(previous, col_next); + let changes = if capability.schema_mutations.alter_column_properties_atomic { + vec![changes] + } else { + changes.split() + }; + + for changes in changes { + result.push(Self::new( + Statement::alter_column(previous, changes, capability), + schema.clone(), + )); + } + } + } + } + } + + pub fn statement(&self) -> &Statement { + &self.statement + } + + pub fn schema(&self) -> &Schema { + &self.schema + } +} diff --git a/crates/toasty-sql/src/serializer/statement.rs b/crates/toasty-sql/src/serializer/statement.rs index a6d78ba0e..11b8d6052 100644 --- a/crates/toasty-sql/src/serializer/statement.rs +++ b/crates/toasty-sql/src/serializer/statement.rs @@ -2,7 +2,10 @@ use std::mem; use super::{ColumnAlias, Comma, Delimited, Ident, Params, ToSql}; -use crate::{serializer::ExprContext, stmt}; +use crate::{ + serializer::{ExprContext, Flavor}, + stmt::{self, AlterColumnChanges, ColumnDef}, +}; use toasty_core::{schema::db, stmt::SourceTableId}; struct ColumnsWithConstraints<'a>(&'a stmt::CreateTable); @@ -41,13 +44,16 @@ impl ToSql for ColumnsWithConstraints<'_> { true }; - let columns = Comma(&self.0.columns); + for (index, column) in self.0.columns.iter().enumerate() { + fmt!(cx, f, "\n " column); + if index < self.0.columns.len() - 1 { + fmt!(cx, f, ","); + } + } match &self.0.primary_key { - Some(pk) if trailing_pk => { - fmt!(cx, f, columns ", PRIMARY KEY " pk); - } - _ => fmt!(cx, f, columns), + Some(pk) if trailing_pk => fmt!(cx, f, ",\n PRIMARY KEY " pk "\n"), + _ => fmt!(cx, f, "\n"), } } } @@ -70,6 +76,110 @@ impl ToSql for &stmt::CreateIndex { } } +impl ToSql for &stmt::AddColumn { + fn to_sql(self, cx: &ExprContext<'_>, f: &mut super::Formatter<'_, P>) { + let table = f.serializer.table(self.table); + let table_name = Ident(&table.name); + + // Create new expression scope to serialize the statement + let cx = cx.scope(table); + + fmt!( + &cx, f, "ALTER TABLE " table_name " ADD COLUMN " self.column + ); + } +} + +impl ToSql for &stmt::AlterColumn { + fn to_sql(self, cx: &ExprContext<'_>, f: &mut super::Formatter<'_, P>) { + let table = f.serializer.table(self.id.table); + let table_name = Ident(&table.name); + + // Create new expression scope to serialize the statement + let cx = cx.scope(table); + + let column_name = Ident(&self.column_def.name); + + match f.serializer.flavor { + Flavor::Postgresql => match &self.changes { + AlterColumnChanges { + new_name: Some(name), + new_ty: None, + new_not_null: None, + new_auto_increment: None, + } => fmt!(&cx, f, "ALTER TABLE " table_name " RENAME COLUMN " column_name " TO " Ident(name.as_str())), + AlterColumnChanges { + new_name: None, + new_ty: Some(ty), + new_not_null: None, + new_auto_increment: None, + } => fmt!(&cx, f, "ALTER TABLE " table_name " ALTER COLUMN " column_name " TYPE " ty), + AlterColumnChanges { + new_name: None, + new_ty: None, + new_not_null: Some(true), + new_auto_increment: None, + } => fmt!(&cx, f, "ALTER TABLE " table_name " ALTER COLUMN " column_name " SET NOT NULL"), + AlterColumnChanges { + new_name: None, + new_ty: None, + new_not_null: Some(false), + new_auto_increment: None, + } => fmt!(&cx, f, "ALTER TABLE " table_name " ALTER COLUMN " column_name " DROP NOT NULL"), + AlterColumnChanges { + new_name: None, + new_ty: None, + new_not_null: None, + new_auto_increment: Some(true), + } => fmt!(&cx, f, "ALTER TABLE " table_name " ALTER COLUMN " column_name " ADD GENERATED BY DEFAULT AS IDENTITY"), + AlterColumnChanges { + new_name: None, + new_ty: None, + new_not_null: None, + new_auto_increment: Some(false), + } => fmt!(&cx, f, "ALTER TABLE " table_name " ALTER COLUMN " column_name " DROP IDENTITY"), + _ => panic!("PostgreSQL does not support modifying multiple column properties in one ALTER TABLE statement") + }, + Flavor::Mysql => { + let new_column_def = ColumnDef { + name: self.changes.new_name.as_ref().unwrap_or(&self.column_def.name).clone(), + ty: self.changes.new_ty.as_ref().unwrap_or(&self.column_def.ty).clone(), + not_null: self.changes.new_not_null.unwrap_or(self.column_def.not_null), + auto_increment: self.changes.new_auto_increment.unwrap_or(self.column_def.auto_increment), + }; + fmt!(&cx, f, "ALTER TABLE " table_name " CHANGE COLUMN " column_name " " new_column_def) + }, + Flavor::Sqlite => match &self.changes { + AlterColumnChanges { + new_name: Some(name), + new_ty: None, + new_not_null: None, + new_auto_increment: None, + } => fmt!(&cx, f, "ALTER TABLE " table_name " RENAME COLUMN " column_name " TO " Ident(name.as_str())), + _ => panic!("SQLite only supports renaming columns in ALTER TABLE statement") + }, + } + } +} + +impl ToSql for &stmt::AlterTable { + fn to_sql(self, cx: &ExprContext<'_>, f: &mut super::Formatter<'_, P>) { + match &self.action { + stmt::AlterTableAction::RenameTo(new_name) => { + fmt!(cx, f, "ALTER TABLE " self.name " RENAME TO " new_name); + } + } + } +} + +impl ToSql for &stmt::CopyTable { + fn to_sql(self, cx: &ExprContext<'_>, f: &mut super::Formatter<'_, P>) { + let target_cols = Comma(self.columns.iter().map(|(target, _)| target)); + let source_cols = Comma(self.columns.iter().map(|(_, source)| source)); + fmt!(cx, f, "INSERT INTO " self.target " (" target_cols ") SELECT " source_cols " FROM " self.source); + } +} + impl ToSql for &stmt::CreateTable { fn to_sql(self, cx: &ExprContext<'_>, f: &mut super::Formatter<'_, P>) { let table = f.serializer.table(self.table); @@ -117,6 +227,38 @@ impl ToSql for &stmt::Direction { } } +impl ToSql for &stmt::DropColumn { + fn to_sql(self, cx: &ExprContext<'_>, f: &mut super::Formatter<'_, P>) { + let table = f.serializer.table(self.table); + let table_name = Ident(&table.name); + let if_exists = if self.if_exists { "IF EXISTS " } else { "" }; + + // Create new expression scope to serialize the statement + let cx = cx.scope(table); + + fmt!(&cx, f, "ALTER TABLE " table_name " DROP COLUMN " if_exists self.name); + } +} + +impl ToSql for &stmt::DropIndex { + fn to_sql(self, cx: &ExprContext<'_>, f: &mut super::Formatter<'_, P>) { + let if_exists = if self.if_exists { "IF EXISTS " } else { "" }; + fmt!(cx, f, "DROP INDEX " if_exists self.name); + } +} + +impl ToSql for &stmt::Pragma { + fn to_sql(self, cx: &ExprContext<'_>, f: &mut super::Formatter<'_, P>) { + if !f.serializer.is_sqlite() { + panic!("\"PRAGMA\" statements only supported in SQLite"); + } + match &self.value { + Some(value) => fmt!(cx, f, "PRAGMA " self.name.as_str() " = " value.as_str()), + None => fmt!(cx, f, "PRAGMA " self.name.as_str()), + } + } +} + impl ToSql for &stmt::DropTable { fn to_sql(self, cx: &ExprContext<'_>, f: &mut super::Formatter<'_, P>) { let if_exists = if self.if_exists { "IF EXISTS " } else { "" }; @@ -315,9 +457,16 @@ impl ToSql for &toasty_core::stmt::Statement { impl ToSql for &stmt::Statement { fn to_sql(self, cx: &ExprContext<'_>, f: &mut super::Formatter<'_, P>) { match self { + stmt::Statement::AddColumn(stmt) => stmt.to_sql(cx, f), + stmt::Statement::AlterColumn(stmt) => stmt.to_sql(cx, f), + stmt::Statement::AlterTable(stmt) => stmt.to_sql(cx, f), + stmt::Statement::CopyTable(stmt) => stmt.to_sql(cx, f), stmt::Statement::CreateIndex(stmt) => stmt.to_sql(cx, f), stmt::Statement::CreateTable(stmt) => stmt.to_sql(cx, f), + stmt::Statement::DropColumn(stmt) => stmt.to_sql(cx, f), + stmt::Statement::DropIndex(stmt) => stmt.to_sql(cx, f), stmt::Statement::DropTable(stmt) => stmt.to_sql(cx, f), + stmt::Statement::Pragma(stmt) => stmt.to_sql(cx, f), stmt::Statement::Delete(stmt) => stmt.to_sql(cx, f), stmt::Statement::Insert(stmt) => stmt.to_sql(cx, f), stmt::Statement::Query(stmt) => stmt.to_sql(cx, f), diff --git a/crates/toasty-sql/src/serializer/ty.rs b/crates/toasty-sql/src/serializer/ty.rs index ae1c9c48a..9622d72e1 100644 --- a/crates/toasty-sql/src/serializer/ty.rs +++ b/crates/toasty-sql/src/serializer/ty.rs @@ -10,7 +10,7 @@ impl ToSql for &db::Type { db::Type::Boolean => fmt!(cx, f, "BOOLEAN"), db::Type::Integer(1..=2) => fmt!(cx, f, "SMALLINT"), db::Type::Integer(3..=4) => fmt!(cx, f, "INTEGER"), - db::Type::Integer(5..=8) => fmt!(cx, f, "bigint"), + db::Type::Integer(5..=8) => fmt!(cx, f, "BIGINT"), db::Type::Integer(_) => todo!(), db::Type::UnsignedInteger(size) => { match f.serializer.flavor { diff --git a/crates/toasty-sql/src/stmt.rs b/crates/toasty-sql/src/stmt.rs index 4df3eabc3..85e93b28a 100644 --- a/crates/toasty-sql/src/stmt.rs +++ b/crates/toasty-sql/src/stmt.rs @@ -1,25 +1,59 @@ +mod add_column; +pub use add_column::AddColumn; + +mod alter_column; +pub use alter_column::{AlterColumn, AlterColumnChanges}; + +mod alter_table; +pub use alter_table::{AlterTable, AlterTableAction}; + mod column_def; pub use column_def::ColumnDef; +mod copy_table; +pub use copy_table::CopyTable; + mod create_index; pub use create_index::CreateIndex; mod create_table; pub use create_table::CreateTable; +mod drop_column; +pub use drop_column::DropColumn; + +mod drop_index; +pub use drop_index::DropIndex; + mod drop_table; pub use drop_table::DropTable; +mod ident; +pub use ident::Ident; + mod name; pub use name::Name; +mod pragma; +pub use pragma::Pragma; + +mod table_name; +pub use table_name::TableName; + pub use toasty_core::stmt::*; #[derive(Debug, Clone)] pub enum Statement { + AddColumn(AddColumn), + AlterColumn(AlterColumn), + AlterTable(AlterTable), + CopyTable(CopyTable), CreateIndex(CreateIndex), CreateTable(CreateTable), + DropColumn(DropColumn), DropTable(DropTable), + DropIndex(DropIndex), + Pragma(Pragma), Delete(Delete), Insert(Insert), Query(Query), diff --git a/crates/toasty-sql/src/stmt/add_column.rs b/crates/toasty-sql/src/stmt/add_column.rs new file mode 100644 index 000000000..e42a5f499 --- /dev/null +++ b/crates/toasty-sql/src/stmt/add_column.rs @@ -0,0 +1,33 @@ +use super::{ColumnDef, Statement}; + +use toasty_core::{ + driver::Capability, + schema::db::{Column, TableId}, +}; + +/// A statement to add a column to a table. +#[derive(Debug, Clone)] +pub struct AddColumn { + /// ID of the table to add the column to. + pub table: TableId, + + /// Column definition. + pub column: ColumnDef, +} + +impl Statement { + /// Adds a column to a table. + pub fn add_column(column: &Column, capability: &Capability) -> Self { + AddColumn { + table: column.id.table, + column: ColumnDef::from_schema(column, &capability.storage_types), + } + .into() + } +} + +impl From for Statement { + fn from(value: AddColumn) -> Self { + Self::AddColumn(value) + } +} diff --git a/crates/toasty-sql/src/stmt/alter_column.rs b/crates/toasty-sql/src/stmt/alter_column.rs new file mode 100644 index 000000000..8ce93102a --- /dev/null +++ b/crates/toasty-sql/src/stmt/alter_column.rs @@ -0,0 +1,117 @@ +use crate::stmt::ColumnDef; + +use super::Statement; + +use toasty_core::{ + driver::Capability, + schema::db::{Column, ColumnId, Type}, +}; + +/// A statement to alter a column in a table. +#[derive(Debug, Clone)] +pub struct AlterColumn { + /// ID of the column being altered. + pub id: ColumnId, + + /// Current column definition. + pub column_def: ColumnDef, + + /// Changes to be made to the column. + pub changes: AlterColumnChanges, +} + +/// A statement to alter a column in a table. +#[derive(Debug, Clone)] +pub struct AlterColumnChanges { + /// New name for the column (if renaming). + pub new_name: Option, + + /// New type information. + pub new_ty: Option, + + /// New nullability constraint. + pub new_not_null: Option, + + /// New auto increment behavior. + pub new_auto_increment: Option, +} + +impl AlterColumnChanges { + pub fn from_diff(previous: &Column, next: &Column) -> Self { + Self { + new_name: (previous.name != next.name).then(|| next.name.clone()), + new_ty: (previous.storage_ty != next.storage_ty).then(|| next.storage_ty.clone()), + new_not_null: (previous.nullable != next.nullable).then_some(!next.nullable), + new_auto_increment: (previous.auto_increment != next.auto_increment) + .then_some(next.auto_increment), + } + } + + /// Splits up this set of changes into a [`Vec`] of individual changes. + pub fn split(self) -> Vec { + let Self { + new_name, + new_ty, + new_not_null, + new_auto_increment, + } = self; + let default = AlterColumnChanges { + new_name: None, + new_ty: None, + new_not_null: None, + new_auto_increment: None, + }; + let mut result = vec![]; + if new_name.is_some() { + result.push(Self { + new_name, + ..default.clone() + }); + } + if new_ty.is_some() { + result.push(Self { + new_ty, + ..default.clone() + }); + } + if new_not_null.is_some() { + result.push(Self { + new_not_null, + ..default.clone() + }); + } + if new_auto_increment.is_some() { + result.push(Self { + new_auto_increment, + ..default.clone() + }); + } + result + } + + pub fn has_type_change(&self) -> bool { + self.new_ty.is_some() || self.new_not_null.is_some() || self.new_auto_increment.is_some() + } +} + +impl Statement { + /// Alters a column. + pub fn alter_column( + column: &Column, + changes: AlterColumnChanges, + capability: &Capability, + ) -> Self { + AlterColumn { + id: column.id, + column_def: ColumnDef::from_schema(column, &capability.storage_types), + changes, + } + .into() + } +} + +impl From for Statement { + fn from(value: AlterColumn) -> Self { + Self::AlterColumn(value) + } +} diff --git a/crates/toasty-sql/src/stmt/alter_table.rs b/crates/toasty-sql/src/stmt/alter_table.rs new file mode 100644 index 000000000..582f72d93 --- /dev/null +++ b/crates/toasty-sql/src/stmt/alter_table.rs @@ -0,0 +1,37 @@ +use super::{Name, Statement}; + +use toasty_core::schema::db::Table; + +/// A statement to alter a SQL table. +#[derive(Debug, Clone)] +pub struct AlterTable { + /// Current name of the table. + pub name: Name, + + /// The alteration to apply. + pub action: AlterTableAction, +} + +/// The action to perform in an ALTER TABLE statement. +#[derive(Debug, Clone)] +pub enum AlterTableAction { + /// Rename the table to a new name. + RenameTo(Name), +} + +impl Statement { + /// Renames a table. + pub fn alter_table_rename_to(table: &Table, new_name: &str) -> Self { + AlterTable { + name: Name::from(&table.name[..]), + action: AlterTableAction::RenameTo(Name::from(new_name)), + } + .into() + } +} + +impl From for Statement { + fn from(value: AlterTable) -> Self { + Self::AlterTable(value) + } +} diff --git a/crates/toasty-sql/src/stmt/copy_table.rs b/crates/toasty-sql/src/stmt/copy_table.rs new file mode 100644 index 000000000..dfca01cae --- /dev/null +++ b/crates/toasty-sql/src/stmt/copy_table.rs @@ -0,0 +1,38 @@ +// TODO: Remove this file. This should be implementable with [`stmt::Insert`], however for +// migrations we need to reference table names which are not part of the schema, and that +// is currently not implemented. + +use super::{Name, Statement}; + +/// A statement to copy rows from one table to another. +/// +/// Generates: `INSERT INTO "target" ("t_col1", "t_col2") SELECT "s_col1", "s_col2" FROM "source"` +#[derive(Debug, Clone)] +pub struct CopyTable { + /// Source table name. + pub source: Name, + + /// Target table name. + pub target: Name, + + /// Column mappings: (target_column_name, source_column_name). + pub columns: Vec<(Name, Name)>, +} + +impl Statement { + /// Creates a statement that copies rows from one table to another. + pub fn copy_table(source: Name, target: Name, columns: Vec<(Name, Name)>) -> Self { + CopyTable { + source, + target, + columns, + } + .into() + } +} + +impl From for Statement { + fn from(value: CopyTable) -> Self { + Self::CopyTable(value) + } +} diff --git a/crates/toasty-sql/src/stmt/drop_column.rs b/crates/toasty-sql/src/stmt/drop_column.rs new file mode 100644 index 000000000..7f4732d09 --- /dev/null +++ b/crates/toasty-sql/src/stmt/drop_column.rs @@ -0,0 +1,48 @@ +use super::{Name, Statement}; + +use toasty_core::schema::db::{Column, TableId}; + +/// A statement to drop a column from a table. +#[derive(Debug, Clone)] +pub struct DropColumn { + /// ID of the table to drop the column from. + pub table: TableId, + + /// Name of the column to drop. + pub name: Name, + + /// Whether or not to add an `IF EXISTS` clause. + pub if_exists: bool, +} + +impl Statement { + /// Drops a column from a table. + /// + /// This function _does not_ add an `IF EXISTS` clause. + pub fn drop_column(column: &Column) -> Self { + DropColumn { + table: column.id.table, + name: Name::from(&column.name[..]), + if_exists: false, + } + .into() + } + + /// Drops a column from a table if it exists. + /// + /// This function _does_ add an `IF EXISTS` clause. + pub fn drop_column_if_exists(column: &Column) -> Self { + DropColumn { + table: column.id.table, + name: Name::from(&column.name[..]), + if_exists: true, + } + .into() + } +} + +impl From for Statement { + fn from(value: DropColumn) -> Self { + Self::DropColumn(value) + } +} diff --git a/crates/toasty-sql/src/stmt/drop_index.rs b/crates/toasty-sql/src/stmt/drop_index.rs new file mode 100644 index 000000000..32626beb1 --- /dev/null +++ b/crates/toasty-sql/src/stmt/drop_index.rs @@ -0,0 +1,43 @@ +use super::{Name, Statement}; + +use toasty_core::schema::db::Index; + +/// A statement to drop a SQL index. +#[derive(Debug, Clone)] +pub struct DropIndex { + /// Name of the index. + pub name: Name, + + /// Whether or not to add an `IF EXISTS` clause. + pub if_exists: bool, +} + +impl Statement { + /// Drops an index. + /// + /// This function _does not_ add an `IF EXISTS` clause. + pub fn drop_index(index: &Index) -> Self { + DropIndex { + name: Name::from(&index.name[..]), + if_exists: false, + } + .into() + } + + /// Drops a index if it exists. + /// + /// This function _does_ add an `IF EXISTS` clause. + pub fn drop_index_if_exists(index: &Index) -> Self { + DropIndex { + name: Name::from(&index.name[..]), + if_exists: true, + } + .into() + } +} + +impl From for Statement { + fn from(value: DropIndex) -> Self { + Self::DropIndex(value) + } +} diff --git a/crates/toasty-sql/src/stmt/pragma.rs b/crates/toasty-sql/src/stmt/pragma.rs new file mode 100644 index 000000000..98dc40997 --- /dev/null +++ b/crates/toasty-sql/src/stmt/pragma.rs @@ -0,0 +1,46 @@ +use super::Statement; + +/// A SQLite PRAGMA statement. +#[derive(Debug, Clone)] +pub struct Pragma { + /// The pragma name (e.g. "foreign_keys"). + pub name: String, + + /// The value to set, if any. When `None`, this is a query pragma. + pub value: Option, +} + +impl Statement { + /// Sets `PRAGMA foreign_keys = ON`. + pub fn pragma_enable_foreign_keys() -> Self { + Pragma { + name: "foreign_keys".to_string(), + value: Some("ON".to_string()), + } + .into() + } + + /// Sets `PRAGMA foreign_keys = OFF`. + pub fn pragma_disable_foreign_keys() -> Self { + Pragma { + name: "foreign_keys".to_string(), + value: Some("OFF".to_string()), + } + .into() + } + + /// Creates a PRAGMA statement with the given name and value. + pub fn pragma(name: impl Into, value: impl Into) -> Self { + Pragma { + name: name.into(), + value: Some(value.into()), + } + .into() + } +} + +impl From for Statement { + fn from(value: Pragma) -> Self { + Self::Pragma(value) + } +} diff --git a/crates/toasty-sql/src/stmt/table_name.rs b/crates/toasty-sql/src/stmt/table_name.rs new file mode 100644 index 000000000..a1cbaf36a --- /dev/null +++ b/crates/toasty-sql/src/stmt/table_name.rs @@ -0,0 +1,26 @@ +use super::ident::Ident; +use toasty_core::schema::db::TableId; + +#[derive(Debug, Clone)] +pub enum TableName { + TableId(TableId), + Ident(Ident), +} + +impl From for TableName { + fn from(value: TableId) -> Self { + TableName::TableId(value) + } +} + +impl From for TableName { + fn from(value: Ident) -> Self { + TableName::Ident(value) + } +} + +impl<'a> From<&'a str> for TableName { + fn from(value: &'a str) -> Self { + TableName::Ident(Ident::from(value)) + } +} diff --git a/crates/toasty-sql/tests/migration_add_column.rs b/crates/toasty-sql/tests/migration_add_column.rs new file mode 100644 index 000000000..81c22f427 --- /dev/null +++ b/crates/toasty-sql/tests/migration_add_column.rs @@ -0,0 +1,204 @@ +use toasty_core::{ + driver::Capability, + schema::db::{ + Column, ColumnId, IndexId, PrimaryKey, RenameHints, Schema, SchemaDiff, Table, TableId, + Type, + }, + stmt as core_stmt, +}; +use toasty_sql::{ + migration::MigrationStatement, + serializer::{Params, Placeholder}, + Serializer, +}; + +struct NoParams; + +impl Params for NoParams { + fn push(&mut self, _: &core_stmt::Value, _: Option<&core_stmt::Type>) -> Placeholder { + Placeholder(0) + } +} + +fn make_column(table_id: usize, index: usize, name: &str, storage_ty: Type) -> Column { + Column { + id: ColumnId { + table: TableId(table_id), + index, + }, + name: name.to_string(), + ty: core_stmt::Type::String, + storage_ty, + nullable: false, + primary_key: index == 0, + auto_increment: false, + } +} + +fn make_table(id: usize, name: &str, columns: Vec) -> Table { + let pk_columns: Vec = columns + .iter() + .filter(|c| c.primary_key) + .map(|c| c.id) + .collect(); + + Table { + id: TableId(id), + name: name.to_string(), + columns, + primary_key: PrimaryKey { + columns: pk_columns, + index: IndexId { + table: TableId(id), + index: 0, + }, + }, + indices: vec![], + } +} + +fn serialize_migration(stmts: &[MigrationStatement<'_>], flavor: &str) -> Vec { + stmts + .iter() + .map(|ms| { + let serializer = match flavor { + "sqlite" => Serializer::sqlite(ms.schema()), + "postgresql" => Serializer::postgresql(ms.schema()), + "mysql" => Serializer::mysql(ms.schema()), + _ => panic!("unknown flavor: {flavor}"), + }; + serializer.serialize(ms.statement(), &mut NoParams) + }) + .collect() +} + +fn add_column_sql(new_col: Column, capability: &Capability, flavor: &str) -> String { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8))], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8)), new_col], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, capability); + let sql = serialize_migration(&stmts, flavor); + assert_eq!(sql.len(), 1); + sql.into_iter().next().unwrap() +} + +#[test] +fn add_column_not_null_sqlite() { + let col = make_column(0, 1, "name", Type::Text); + let sql = add_column_sql(col, &Capability::SQLITE, "sqlite"); + assert_eq!( + sql, + "ALTER TABLE \"users\" ADD COLUMN \"name\" TEXT NOT NULL;" + ); +} + +#[test] +fn add_column_not_null_postgresql() { + let col = make_column(0, 1, "name", Type::Text); + let sql = add_column_sql(col, &Capability::POSTGRESQL, "postgresql"); + assert_eq!( + sql, + "ALTER TABLE \"users\" ADD COLUMN \"name\" TEXT NOT NULL;" + ); +} + +#[test] +fn add_column_nullable_sqlite() { + let mut col = make_column(0, 1, "email", Type::Text); + col.nullable = true; + let sql = add_column_sql(col, &Capability::SQLITE, "sqlite"); + assert!(sql.contains("\"email\" TEXT"), "got: {sql}"); + assert!(!sql.contains("NOT NULL"), "got: {sql}"); +} + +#[test] +fn add_column_nullable_postgresql() { + let mut col = make_column(0, 1, "email", Type::Text); + col.nullable = true; + let sql = add_column_sql(col, &Capability::POSTGRESQL, "postgresql"); + assert!(sql.contains("\"email\" TEXT"), "got: {sql}"); + assert!(!sql.contains("NOT NULL"), "got: {sql}"); +} + +#[test] +fn add_column_auto_increment_sqlite() { + let mut col = make_column(0, 1, "seq", Type::Integer(8)); + col.auto_increment = true; + let sql = add_column_sql(col, &Capability::SQLITE, "sqlite"); + assert!(sql.contains("AUTOINCREMENT"), "got: {sql}"); +} + +#[test] +fn add_column_auto_increment_postgresql() { + let mut col = make_column(0, 1, "seq", Type::Integer(8)); + col.auto_increment = true; + let sql = add_column_sql(col, &Capability::POSTGRESQL, "postgresql"); + assert!( + sql.contains("GENERATED BY DEFAULT AS IDENTITY"), + "got: {sql}" + ); +} + +#[test] +fn add_column_auto_increment_mysql() { + let mut col = make_column(0, 1, "seq", Type::Integer(8)); + col.auto_increment = true; + let sql = add_column_sql(col, &Capability::MYSQL, "mysql"); + assert!(sql.contains("AUTO_INCREMENT"), "got: {sql}"); +} + +#[test] +fn add_column_nullable_auto_increment_sqlite() { + let mut col = make_column(0, 1, "seq", Type::Integer(8)); + col.nullable = true; + col.auto_increment = true; + let sql = add_column_sql(col, &Capability::SQLITE, "sqlite"); + assert!(!sql.contains("NOT NULL"), "got: {sql}"); + assert!(sql.contains("AUTOINCREMENT"), "got: {sql}"); +} + +#[test] +fn add_multiple_columns() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8))], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + make_column(0, 2, "email", Type::Text), + ], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql.len(), 2); + assert!(sql.iter().any(|s| s.contains("\"name\""))); + assert!(sql.iter().any(|s| s.contains("\"email\""))); + assert!(sql.iter().all(|s| s.contains("ADD COLUMN"))); +} diff --git a/crates/toasty-sql/tests/migration_alter_column.rs b/crates/toasty-sql/tests/migration_alter_column.rs new file mode 100644 index 000000000..8052c613f --- /dev/null +++ b/crates/toasty-sql/tests/migration_alter_column.rs @@ -0,0 +1,705 @@ +use toasty_core::{ + driver::Capability, + schema::db::{ + Column, ColumnId, IndexId, PrimaryKey, RenameHints, Schema, SchemaDiff, Table, TableId, + Type, + }, + stmt as core_stmt, +}; +use toasty_sql::{ + migration::MigrationStatement, + serializer::{Params, Placeholder}, + Serializer, +}; + +struct NoParams; + +impl Params for NoParams { + fn push(&mut self, _: &core_stmt::Value, _: Option<&core_stmt::Type>) -> Placeholder { + Placeholder(0) + } +} + +fn make_column(table_id: usize, index: usize, name: &str, storage_ty: Type) -> Column { + Column { + id: ColumnId { + table: TableId(table_id), + index, + }, + name: name.to_string(), + ty: core_stmt::Type::String, + storage_ty, + nullable: false, + primary_key: index == 0, + auto_increment: false, + } +} + +fn make_table(id: usize, name: &str, columns: Vec) -> Table { + let pk_columns: Vec = columns + .iter() + .filter(|c| c.primary_key) + .map(|c| c.id) + .collect(); + + Table { + id: TableId(id), + name: name.to_string(), + columns, + primary_key: PrimaryKey { + columns: pk_columns, + index: IndexId { + table: TableId(id), + index: 0, + }, + }, + indices: vec![], + } +} + +fn serialize_migration(stmts: &[MigrationStatement<'_>], flavor: &str) -> Vec { + stmts + .iter() + .map(|ms| { + let serializer = match flavor { + "sqlite" => Serializer::sqlite(ms.schema()), + "postgresql" => Serializer::postgresql(ms.schema()), + "mysql" => Serializer::mysql(ms.schema()), + _ => panic!("unknown flavor: {flavor}"), + }; + serializer.serialize(ms.statement(), &mut NoParams) + }) + .collect() +} + +// --------------------------------------------------------------------------- +// PostgreSQL: each property change is a separate statement +// --------------------------------------------------------------------------- + +#[test] +fn alter_column_rename_postgresql() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + )], + }; + + let mut renamed = make_column(0, 1, "full_name", Type::Text); + renamed.primary_key = false; + + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8)), renamed], + )], + }; + + let mut hints = RenameHints::new(); + hints.add_column_hint( + ColumnId { + table: TableId(0), + index: 1, + }, + ColumnId { + table: TableId(0), + index: 1, + }, + ); + + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::POSTGRESQL); + let sql = serialize_migration(&stmts, "postgresql"); + + assert_eq!( + sql, + vec!["ALTER TABLE \"users\" RENAME COLUMN \"name\" TO \"full_name\";",] + ); +} + +#[test] +fn alter_column_rename_with_table_rename_postgresql() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + )], + }; + + let mut renamed = make_column(0, 1, "full_name", Type::Text); + renamed.primary_key = false; + + let to = Schema { + tables: vec![make_table( + 0, + "accounts", + vec![make_column(0, 0, "id", Type::Integer(8)), renamed], + )], + }; + + let mut hints = RenameHints::new(); + hints.add_table_hint(TableId(0), TableId(0)); + hints.add_column_hint( + ColumnId { + table: TableId(0), + index: 1, + }, + ColumnId { + table: TableId(0), + index: 1, + }, + ); + + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::POSTGRESQL); + let sql = serialize_migration(&stmts, "postgresql"); + + assert_eq!( + sql, + vec![ + "ALTER TABLE \"users\" RENAME TO \"accounts\";", + "ALTER TABLE \"accounts\" RENAME COLUMN \"name\" TO \"full_name\";", + ] + ); +} + +#[test] +fn alter_column_set_not_null_postgresql() { + let mut email = make_column(0, 1, "email", Type::Text); + email.nullable = true; + + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8)), email], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "email", Type::Text), + ], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::POSTGRESQL); + let sql = serialize_migration(&stmts, "postgresql"); + + assert_eq!( + sql, + vec!["ALTER TABLE \"users\" ALTER COLUMN \"email\" SET NOT NULL;",] + ); +} + +#[test] +fn alter_column_drop_not_null_postgresql() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "email", Type::Text), + ], + )], + }; + + let mut email = make_column(0, 1, "email", Type::Text); + email.nullable = true; + + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8)), email], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::POSTGRESQL); + let sql = serialize_migration(&stmts, "postgresql"); + + assert_eq!( + sql, + vec!["ALTER TABLE \"users\" ALTER COLUMN \"email\" DROP NOT NULL;",] + ); +} + +#[test] +fn alter_column_change_type_postgresql() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "value", Type::Integer(4)), + ], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "value", Type::Text), + ], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::POSTGRESQL); + let sql = serialize_migration(&stmts, "postgresql"); + + assert_eq!( + sql, + vec!["ALTER TABLE \"users\" ALTER COLUMN \"value\" TYPE TEXT;",] + ); +} + +#[test] +fn alter_column_multiple_changes_postgresql() { + // Change type AND nullability → two separate statements + let mut value = make_column(0, 1, "value", Type::Integer(4)); + value.nullable = true; + + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8)), value], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "value", Type::Text), + ], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::POSTGRESQL); + let sql = serialize_migration(&stmts, "postgresql"); + + assert_eq!( + sql, + vec![ + "ALTER TABLE \"users\" ALTER COLUMN \"value\" TYPE TEXT;", + "ALTER TABLE \"users\" ALTER COLUMN \"value\" SET NOT NULL;", + ] + ); +} + +#[test] +fn alter_column_multiple_changes_with_table_rename_postgresql() { + let mut value = make_column(0, 1, "value", Type::Integer(4)); + value.nullable = true; + + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8)), value], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "accounts", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "value", Type::Text), + ], + )], + }; + + let mut hints = RenameHints::new(); + hints.add_table_hint(TableId(0), TableId(0)); + + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::POSTGRESQL); + let sql = serialize_migration(&stmts, "postgresql"); + + assert_eq!( + sql, + vec![ + "ALTER TABLE \"users\" RENAME TO \"accounts\";", + "ALTER TABLE \"accounts\" ALTER COLUMN \"value\" TYPE TEXT;", + "ALTER TABLE \"accounts\" ALTER COLUMN \"value\" SET NOT NULL;", + ] + ); +} + +// --------------------------------------------------------------------------- +// MySQL: all property changes in a single CHANGE COLUMN statement +// --------------------------------------------------------------------------- + +#[test] +fn alter_column_rename_mysql() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + )], + }; + + let mut renamed = make_column(0, 1, "full_name", Type::Text); + renamed.primary_key = false; + + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8)), renamed], + )], + }; + + let mut hints = RenameHints::new(); + hints.add_column_hint( + ColumnId { + table: TableId(0), + index: 1, + }, + ColumnId { + table: TableId(0), + index: 1, + }, + ); + + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::MYSQL); + let sql = serialize_migration(&stmts, "mysql"); + + assert_eq!( + sql, + vec!["ALTER TABLE `users` CHANGE COLUMN `name` `full_name` TEXT NOT NULL;",] + ); +} + +#[test] +fn alter_column_multiple_changes_mysql() { + let mut value = make_column(0, 1, "value", Type::Integer(4)); + value.nullable = true; + + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8)), value], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "value", Type::Text), + ], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::MYSQL); + let sql = serialize_migration(&stmts, "mysql"); + + assert_eq!( + sql, + vec!["ALTER TABLE `users` CHANGE COLUMN `value` `value` TEXT NOT NULL;",] + ); +} + +#[test] +fn alter_column_multiple_changes_with_table_rename_mysql() { + let mut value = make_column(0, 1, "value", Type::Integer(4)); + value.nullable = true; + + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8)), value], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "accounts", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "value", Type::Text), + ], + )], + }; + + let mut hints = RenameHints::new(); + hints.add_table_hint(TableId(0), TableId(0)); + + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::MYSQL); + let sql = serialize_migration(&stmts, "mysql"); + + assert_eq!( + sql, + vec![ + "ALTER TABLE `users` RENAME TO `accounts`;", + "ALTER TABLE `accounts` CHANGE COLUMN `value` `value` TEXT NOT NULL;", + ] + ); +} + +// --------------------------------------------------------------------------- +// SQLite: rename-only works with ALTER TABLE RENAME COLUMN +// --------------------------------------------------------------------------- + +#[test] +fn alter_column_rename_only_sqlite() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + )], + }; + + let mut renamed = make_column(0, 1, "full_name", Type::Text); + renamed.primary_key = false; + + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8)), renamed], + )], + }; + + let mut hints = RenameHints::new(); + hints.add_column_hint( + ColumnId { + table: TableId(0), + index: 1, + }, + ColumnId { + table: TableId(0), + index: 1, + }, + ); + + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!( + sql, + vec!["ALTER TABLE \"users\" RENAME COLUMN \"name\" TO \"full_name\";",] + ); +} + +// --------------------------------------------------------------------------- +// SQLite: non-rename changes require table recreation +// --------------------------------------------------------------------------- + +#[test] +fn alter_column_change_nullability_sqlite() { + // email: nullable → not null requires table recreation + let mut email = make_column(0, 1, "email", Type::Text); + email.nullable = true; + + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8)), email], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "email", Type::Text), + ], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql, vec![ + "PRAGMA foreign_keys = OFF;", + "CREATE TABLE \"_toasty_new_users\" (\n \"id\" BIGINT NOT NULL,\n \"email\" TEXT NOT NULL,\n PRIMARY KEY (\"id\")\n);", + "INSERT INTO \"_toasty_new_users\" (\"id\", \"email\") SELECT \"id\", \"email\" FROM \"users\";", + "DROP TABLE \"users\";", + "ALTER TABLE \"_toasty_new_users\" RENAME TO \"users\";", + "PRAGMA foreign_keys = ON;", + ]); +} + +#[test] +fn alter_column_change_type_sqlite() { + // value: Integer(4) → Text requires table recreation + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "value", Type::Integer(4)), + ], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "value", Type::Text), + ], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql, vec![ + "PRAGMA foreign_keys = OFF;", + "CREATE TABLE \"_toasty_new_users\" (\n \"id\" BIGINT NOT NULL,\n \"value\" TEXT NOT NULL,\n PRIMARY KEY (\"id\")\n);", + "INSERT INTO \"_toasty_new_users\" (\"id\", \"value\") SELECT \"id\", \"value\" FROM \"users\";", + "DROP TABLE \"users\";", + "ALTER TABLE \"_toasty_new_users\" RENAME TO \"users\";", + "PRAGMA foreign_keys = ON;", + ]); +} + +#[test] +fn alter_column_change_nullability_with_table_rename_sqlite() { + // Table renamed users → accounts AND email: nullable → not null + let mut email = make_column(0, 1, "email", Type::Text); + email.nullable = true; + + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8)), email], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "accounts", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "email", Type::Text), + ], + )], + }; + + let mut hints = RenameHints::new(); + hints.add_table_hint(TableId(0), TableId(0)); + + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + // The table rename happens first, then recreation uses the new name + assert_eq!(sql, vec![ + "ALTER TABLE \"users\" RENAME TO \"accounts\";", + "PRAGMA foreign_keys = OFF;", + "CREATE TABLE \"_toasty_new_accounts\" (\n \"id\" BIGINT NOT NULL,\n \"email\" TEXT NOT NULL,\n PRIMARY KEY (\"id\")\n);", + "INSERT INTO \"_toasty_new_accounts\" (\"id\", \"email\") SELECT \"id\", \"email\" FROM \"accounts\";", + "DROP TABLE \"accounts\";", + "ALTER TABLE \"_toasty_new_accounts\" RENAME TO \"accounts\";", + "PRAGMA foreign_keys = ON;", + ]); +} + +#[test] +fn alter_column_rename_and_change_type_sqlite() { + // Column renamed name → full_name AND type change Integer → Text + // Both changes together require table recreation + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Integer(4)), + ], + )], + }; + + let mut full_name = make_column(0, 1, "full_name", Type::Text); + full_name.primary_key = false; + + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8)), full_name], + )], + }; + + let mut hints = RenameHints::new(); + hints.add_column_hint( + ColumnId { + table: TableId(0), + index: 1, + }, + ColumnId { + table: TableId(0), + index: 1, + }, + ); + + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + // Column rename + type change → table recreation with new column name + assert_eq!(sql, vec![ + "PRAGMA foreign_keys = OFF;", + "CREATE TABLE \"_toasty_new_users\" (\n \"id\" BIGINT NOT NULL,\n \"full_name\" TEXT NOT NULL,\n PRIMARY KEY (\"id\")\n);", + "INSERT INTO \"_toasty_new_users\" (\"id\", \"full_name\") SELECT \"id\", \"name\" FROM \"users\";", + "DROP TABLE \"users\";", + "ALTER TABLE \"_toasty_new_users\" RENAME TO \"users\";", + "PRAGMA foreign_keys = ON;", + ]); +} diff --git a/crates/toasty-sql/tests/migration_alter_table.rs b/crates/toasty-sql/tests/migration_alter_table.rs new file mode 100644 index 000000000..439d136a8 --- /dev/null +++ b/crates/toasty-sql/tests/migration_alter_table.rs @@ -0,0 +1,212 @@ +use toasty_core::{ + driver::Capability, + schema::db::{ + Column, ColumnId, IndexId, PrimaryKey, RenameHints, Schema, SchemaDiff, Table, TableId, + Type, + }, + stmt as core_stmt, +}; +use toasty_sql::{ + migration::MigrationStatement, + serializer::{Params, Placeholder}, + Serializer, +}; + +struct NoParams; + +impl Params for NoParams { + fn push(&mut self, _: &core_stmt::Value, _: Option<&core_stmt::Type>) -> Placeholder { + Placeholder(0) + } +} + +fn make_column(table_id: usize, index: usize, name: &str, storage_ty: Type) -> Column { + Column { + id: ColumnId { + table: TableId(table_id), + index, + }, + name: name.to_string(), + ty: core_stmt::Type::String, + storage_ty, + nullable: false, + primary_key: index == 0, + auto_increment: false, + } +} + +fn make_table(id: usize, name: &str, columns: Vec) -> Table { + let pk_columns: Vec = columns + .iter() + .filter(|c| c.primary_key) + .map(|c| c.id) + .collect(); + + Table { + id: TableId(id), + name: name.to_string(), + columns, + primary_key: PrimaryKey { + columns: pk_columns, + index: IndexId { + table: TableId(id), + index: 0, + }, + }, + indices: vec![], + } +} + +fn serialize_migration(stmts: &[MigrationStatement<'_>], flavor: &str) -> Vec { + stmts + .iter() + .map(|ms| { + let serializer = match flavor { + "sqlite" => Serializer::sqlite(ms.schema()), + "postgresql" => Serializer::postgresql(ms.schema()), + "mysql" => Serializer::mysql(ms.schema()), + _ => panic!("unknown flavor: {flavor}"), + }; + serializer.serialize(ms.statement(), &mut NoParams) + }) + .collect() +} + +#[test] +fn rename_table_sqlite() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "accounts", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + )], + }; + + let mut hints = RenameHints::new(); + hints.add_table_hint(TableId(0), TableId(0)); + + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql.len(), 1); + assert_eq!(sql[0], "ALTER TABLE \"users\" RENAME TO \"accounts\";"); +} + +#[test] +fn rename_table_postgresql() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "accounts", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + )], + }; + + let mut hints = RenameHints::new(); + hints.add_table_hint(TableId(0), TableId(0)); + + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::POSTGRESQL); + let sql = serialize_migration(&stmts, "postgresql"); + + assert_eq!(sql.len(), 1); + assert_eq!(sql[0], "ALTER TABLE \"users\" RENAME TO \"accounts\";"); +} + +#[test] +fn rename_table_and_add_column() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8))], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "accounts", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "email", Type::Text), + ], + )], + }; + + let mut hints = RenameHints::new(); + hints.add_table_hint(TableId(0), TableId(0)); + + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql.len(), 2); + assert_eq!(sql[0], "ALTER TABLE \"users\" RENAME TO \"accounts\";"); + assert_eq!( + sql[1], + "ALTER TABLE \"accounts\" ADD COLUMN \"email\" TEXT NOT NULL;" + ); +} + +#[test] +fn rename_without_hint_is_drop_and_create() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "accounts", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql.len(), 2); + assert_eq!(sql[0], "DROP TABLE \"users\";"); + assert_eq!( + sql[1], + "CREATE TABLE \"accounts\" (\n \"id\" BIGINT NOT NULL,\n \"name\" TEXT NOT NULL,\n PRIMARY KEY (\"id\")\n);" + ); +} diff --git a/crates/toasty-sql/tests/migration_create_table.rs b/crates/toasty-sql/tests/migration_create_table.rs new file mode 100644 index 000000000..56dce54fb --- /dev/null +++ b/crates/toasty-sql/tests/migration_create_table.rs @@ -0,0 +1,336 @@ +use toasty_core::{ + driver::Capability, + schema::db::{ + Column, ColumnId, Index, IndexColumn, IndexId, IndexOp, IndexScope, PrimaryKey, + RenameHints, Schema, SchemaDiff, Table, TableId, Type, + }, + stmt as core_stmt, +}; +use toasty_sql::{ + migration::MigrationStatement, + serializer::{Params, Placeholder}, + Serializer, +}; + +struct NoParams; + +impl Params for NoParams { + fn push(&mut self, _: &core_stmt::Value, _: Option<&core_stmt::Type>) -> Placeholder { + Placeholder(0) + } +} + +fn make_column(table_id: usize, index: usize, name: &str, storage_ty: Type) -> Column { + Column { + id: ColumnId { + table: TableId(table_id), + index, + }, + name: name.to_string(), + ty: core_stmt::Type::String, + storage_ty, + nullable: false, + primary_key: index == 0, + auto_increment: false, + } +} + +fn make_table(id: usize, name: &str, columns: Vec, indices: Vec) -> Table { + let pk_columns: Vec = columns + .iter() + .filter(|c| c.primary_key) + .map(|c| c.id) + .collect(); + + Table { + id: TableId(id), + name: name.to_string(), + columns, + primary_key: PrimaryKey { + columns: pk_columns, + index: IndexId { + table: TableId(id), + index: 0, + }, + }, + indices, + } +} + +fn serialize_migration(stmts: &[MigrationStatement<'_>], flavor: &str) -> Vec { + stmts + .iter() + .map(|ms| { + let serializer = match flavor { + "sqlite" => Serializer::sqlite(ms.schema()), + "postgresql" => Serializer::postgresql(ms.schema()), + "mysql" => Serializer::mysql(ms.schema()), + _ => panic!("unknown flavor: {flavor}"), + }; + serializer.serialize(ms.statement(), &mut NoParams) + }) + .collect() +} + +#[test] +fn create_single_table_sqlite() { + let from = Schema::default(); + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + vec![], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql.len(), 1); + assert_eq!( + sql[0], + "CREATE TABLE \"users\" (\n \"id\" BIGINT NOT NULL,\n \"name\" TEXT NOT NULL,\n PRIMARY KEY (\"id\")\n);" + ); +} + +#[test] +fn create_single_table_postgresql() { + let from = Schema::default(); + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + vec![], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::POSTGRESQL); + let sql = serialize_migration(&stmts, "postgresql"); + + assert_eq!(sql.len(), 1); + assert_eq!( + sql[0], + "CREATE TABLE \"users\" (\n \"id\" BIGINT NOT NULL,\n \"name\" TEXT NOT NULL,\n PRIMARY KEY (\"id\")\n);" + ); +} + +#[test] +fn create_table_with_nullable_column() { + let from = Schema::default(); + + let mut name_col = make_column(0, 1, "email", Type::Text); + name_col.nullable = true; + + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8)), name_col], + vec![], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql.len(), 1); + assert!(sql[0].contains("\"email\" TEXT"), "got: {}", sql[0]); + assert!( + !sql[0].contains("\"email\" TEXT NOT NULL"), + "got: {}", + sql[0] + ); +} + +#[test] +fn create_table_with_auto_increment_sqlite() { + let from = Schema::default(); + + let mut id_col = make_column(0, 0, "id", Type::Integer(8)); + id_col.auto_increment = true; + + let to = Schema { + tables: vec![make_table( + 0, + "counters", + vec![id_col, make_column(0, 1, "value", Type::Integer(4))], + vec![], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql.len(), 1); + assert!( + sql[0].contains("AUTOINCREMENT") || sql[0].contains("PRIMARY KEY"), + "expected auto increment handling, got: {}", + sql[0] + ); +} + +#[test] +fn create_table_with_index() { + let from = Schema::default(); + + let columns = vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "email", Type::Text), + ]; + + let index = Index { + id: IndexId { + table: TableId(0), + index: 0, + }, + name: "idx_users_email".to_string(), + on: TableId(0), + columns: vec![IndexColumn { + column: ColumnId { + table: TableId(0), + index: 1, + }, + op: IndexOp::Eq, + scope: IndexScope::Local, + }], + unique: false, + primary_key: false, + }; + + let to = Schema { + tables: vec![make_table(0, "users", columns, vec![index])], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql.len(), 2); + assert!(sql[0].starts_with("CREATE TABLE"), "got: {}", sql[0]); + assert_eq!( + sql[1], + "CREATE INDEX \"idx_users_email\" ON \"users\" (\"email\");" + ); +} + +#[test] +fn create_table_with_unique_index() { + let from = Schema::default(); + + let columns = vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "email", Type::Text), + ]; + + let index = Index { + id: IndexId { + table: TableId(0), + index: 0, + }, + name: "idx_users_email_unique".to_string(), + on: TableId(0), + columns: vec![IndexColumn { + column: ColumnId { + table: TableId(0), + index: 1, + }, + op: IndexOp::Eq, + scope: IndexScope::Local, + }], + unique: true, + primary_key: false, + }; + + let to = Schema { + tables: vec![make_table(0, "users", columns, vec![index])], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql.len(), 2); + assert_eq!( + sql[1], + "CREATE UNIQUE INDEX \"idx_users_email_unique\" ON \"users\" (\"email\");" + ); +} + +#[test] +fn create_multiple_tables() { + let from = Schema::default(); + let to = Schema { + tables: vec![ + make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + vec![], + ), + make_table( + 1, + "posts", + vec![ + make_column(1, 0, "id", Type::Integer(8)), + make_column(1, 1, "title", Type::Text), + make_column(1, 2, "body", Type::Text), + ], + vec![], + ), + ], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql.len(), 2); + assert!(sql.iter().any(|s| s.contains("\"users\""))); + assert!(sql.iter().any(|s| s.contains("\"posts\""))); +} + +#[test] +fn create_table_varchar_mysql() { + let from = Schema::default(); + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::VarChar(191)), + ], + vec![], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::MYSQL); + let sql = serialize_migration(&stmts, "mysql"); + + assert_eq!(sql.len(), 1); + assert!(sql[0].contains("VARCHAR(191)"), "got: {}", sql[0]); +} diff --git a/crates/toasty-sql/tests/migration_drop_column.rs b/crates/toasty-sql/tests/migration_drop_column.rs new file mode 100644 index 000000000..aff5165d2 --- /dev/null +++ b/crates/toasty-sql/tests/migration_drop_column.rs @@ -0,0 +1,163 @@ +use toasty_core::{ + driver::Capability, + schema::db::{ + Column, ColumnId, IndexId, PrimaryKey, RenameHints, Schema, SchemaDiff, Table, TableId, + Type, + }, + stmt as core_stmt, +}; +use toasty_sql::{ + migration::MigrationStatement, + serializer::{Params, Placeholder}, + Serializer, +}; + +struct NoParams; + +impl Params for NoParams { + fn push(&mut self, _: &core_stmt::Value, _: Option<&core_stmt::Type>) -> Placeholder { + Placeholder(0) + } +} + +fn make_column(table_id: usize, index: usize, name: &str, storage_ty: Type) -> Column { + Column { + id: ColumnId { + table: TableId(table_id), + index, + }, + name: name.to_string(), + ty: core_stmt::Type::String, + storage_ty, + nullable: false, + primary_key: index == 0, + auto_increment: false, + } +} + +fn make_table(id: usize, name: &str, columns: Vec) -> Table { + let pk_columns: Vec = columns + .iter() + .filter(|c| c.primary_key) + .map(|c| c.id) + .collect(); + + Table { + id: TableId(id), + name: name.to_string(), + columns, + primary_key: PrimaryKey { + columns: pk_columns, + index: IndexId { + table: TableId(id), + index: 0, + }, + }, + indices: vec![], + } +} + +fn serialize_migration(stmts: &[MigrationStatement<'_>], flavor: &str) -> Vec { + stmts + .iter() + .map(|ms| { + let serializer = match flavor { + "sqlite" => Serializer::sqlite(ms.schema()), + "postgresql" => Serializer::postgresql(ms.schema()), + "mysql" => Serializer::mysql(ms.schema()), + _ => panic!("unknown flavor: {flavor}"), + }; + serializer.serialize(ms.statement(), &mut NoParams) + }) + .collect() +} + +#[test] +fn drop_column_sqlite() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8))], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql.len(), 1); + assert_eq!(sql[0], "ALTER TABLE \"users\" DROP COLUMN \"name\";"); +} + +#[test] +fn drop_column_postgresql() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "email", Type::Text), + ], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8))], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::POSTGRESQL); + let sql = serialize_migration(&stmts, "postgresql"); + + assert_eq!(sql.len(), 1); + assert_eq!(sql[0], "ALTER TABLE \"users\" DROP COLUMN \"email\";"); +} + +#[test] +fn drop_multiple_columns() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + make_column(0, 2, "email", Type::Text), + ], + )], + }; + let to = Schema { + tables: vec![make_table( + 0, + "users", + vec![make_column(0, 0, "id", Type::Integer(8))], + )], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql.len(), 2); + assert!(sql.iter().any(|s| s.contains("\"name\""))); + assert!(sql.iter().any(|s| s.contains("\"email\""))); + assert!(sql.iter().all(|s| s.contains("DROP COLUMN"))); +} diff --git a/crates/toasty-sql/tests/migration_drop_table.rs b/crates/toasty-sql/tests/migration_drop_table.rs new file mode 100644 index 000000000..e5f3a9681 --- /dev/null +++ b/crates/toasty-sql/tests/migration_drop_table.rs @@ -0,0 +1,262 @@ +use toasty_core::{ + driver::Capability, + schema::db::{ + Column, ColumnId, Index, IndexColumn, IndexId, IndexOp, IndexScope, PrimaryKey, + RenameHints, Schema, SchemaDiff, Table, TableId, Type, + }, + stmt as core_stmt, +}; +use toasty_sql::{ + migration::MigrationStatement, + serializer::{Params, Placeholder}, + Serializer, +}; + +struct NoParams; + +impl Params for NoParams { + fn push(&mut self, _: &core_stmt::Value, _: Option<&core_stmt::Type>) -> Placeholder { + Placeholder(0) + } +} + +fn make_column(table_id: usize, index: usize, name: &str, storage_ty: Type) -> Column { + Column { + id: ColumnId { + table: TableId(table_id), + index, + }, + name: name.to_string(), + ty: core_stmt::Type::String, + storage_ty, + nullable: false, + primary_key: index == 0, + auto_increment: false, + } +} + +fn make_table(id: usize, name: &str, columns: Vec, indices: Vec) -> Table { + let pk_columns: Vec = columns + .iter() + .filter(|c| c.primary_key) + .map(|c| c.id) + .collect(); + + Table { + id: TableId(id), + name: name.to_string(), + columns, + primary_key: PrimaryKey { + columns: pk_columns, + index: IndexId { + table: TableId(id), + index: 0, + }, + }, + indices, + } +} + +fn serialize_migration(stmts: &[MigrationStatement<'_>], flavor: &str) -> Vec { + stmts + .iter() + .map(|ms| { + let serializer = match flavor { + "sqlite" => Serializer::sqlite(ms.schema()), + "postgresql" => Serializer::postgresql(ms.schema()), + "mysql" => Serializer::mysql(ms.schema()), + _ => panic!("unknown flavor: {flavor}"), + }; + serializer.serialize(ms.statement(), &mut NoParams) + }) + .collect() +} + +#[test] +fn drop_single_table_sqlite() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + vec![], + )], + }; + let to = Schema::default(); + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql.len(), 1); + assert_eq!(sql[0], "DROP TABLE \"users\";"); +} + +#[test] +fn drop_single_table_postgresql() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + vec![], + )], + }; + let to = Schema::default(); + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::POSTGRESQL); + let sql = serialize_migration(&stmts, "postgresql"); + + assert_eq!(sql.len(), 1); + assert_eq!(sql[0], "DROP TABLE \"users\";"); +} + +#[test] +fn drop_multiple_tables() { + let from = Schema { + tables: vec![ + make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + vec![], + ), + make_table( + 1, + "posts", + vec![ + make_column(1, 0, "id", Type::Integer(8)), + make_column(1, 1, "title", Type::Text), + ], + vec![], + ), + ], + }; + let to = Schema::default(); + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql.len(), 2); + assert!(sql.iter().any(|s| s == "DROP TABLE \"users\";")); + assert!(sql.iter().any(|s| s == "DROP TABLE \"posts\";")); +} + +#[test] +fn drop_one_table_keep_another() { + let users = make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::Text), + ], + vec![], + ); + let posts = make_table( + 1, + "posts", + vec![ + make_column(1, 0, "id", Type::Integer(8)), + make_column(1, 1, "title", Type::Text), + ], + vec![], + ); + + let from = Schema { + tables: vec![users.clone(), posts], + }; + let to = Schema { + tables: vec![users], + }; + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + assert_eq!(sql.len(), 1); + assert_eq!(sql[0], "DROP TABLE \"posts\";"); +} + +#[test] +fn drop_table_with_index() { + let index = Index { + id: IndexId { + table: TableId(0), + index: 0, + }, + name: "idx_users_email".to_string(), + on: TableId(0), + columns: vec![IndexColumn { + column: ColumnId { + table: TableId(0), + index: 1, + }, + op: IndexOp::Eq, + scope: IndexScope::Local, + }], + unique: false, + primary_key: false, + }; + + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "email", Type::Text), + ], + vec![index], + )], + }; + let to = Schema::default(); + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::SQLITE); + let sql = serialize_migration(&stmts, "sqlite"); + + // Dropping a table should just drop the table; indices are dropped implicitly. + assert_eq!(sql.len(), 1); + assert_eq!(sql[0], "DROP TABLE \"users\";"); +} + +#[test] +fn drop_table_mysql() { + let from = Schema { + tables: vec![make_table( + 0, + "users", + vec![ + make_column(0, 0, "id", Type::Integer(8)), + make_column(0, 1, "name", Type::VarChar(191)), + ], + vec![], + )], + }; + let to = Schema::default(); + + let hints = RenameHints::new(); + let diff = SchemaDiff::from(&from, &to, &hints); + let stmts = MigrationStatement::from_diff(&diff, &Capability::MYSQL); + let sql = serialize_migration(&stmts, "mysql"); + + assert_eq!(sql.len(), 1); + assert_eq!(sql[0], "DROP TABLE `users`;"); +} diff --git a/crates/toasty/src/db.rs b/crates/toasty/src/db.rs index a5b7c21b4..cef589bf1 100644 --- a/crates/toasty/src/db.rs +++ b/crates/toasty/src/db.rs @@ -12,7 +12,7 @@ use tokio::{ use crate::{engine::Engine, stmt, Cursor, Model, Result, Statement}; -use toasty_core::{stmt::ValueStream, Schema}; +use toasty_core::{driver::Driver, stmt::ValueStream, Schema}; #[derive(Debug)] pub struct Db { @@ -120,9 +120,17 @@ impl Db { .await } + pub fn driver(&self) -> &dyn Driver { + self.engine.driver() + } + pub fn schema(&self) -> &Schema { &self.engine.schema } + + pub fn capability(&self) -> &Capability { + self.engine.capability() + } } impl Drop for Db { diff --git a/crates/toasty/src/db/connect.rs b/crates/toasty/src/db/connect.rs index 7daafd677..287e38a7a 100644 --- a/crates/toasty/src/db/connect.rs +++ b/crates/toasty/src/db/connect.rs @@ -1,7 +1,11 @@ use crate::Result; pub use toasty_core::driver::{operation::Operation, Capability, Connection, Response}; -use toasty_core::{async_trait, driver::Driver}; +use toasty_core::{ + async_trait, + driver::Driver, + schema::db::{Migration, SchemaDiff}, +}; use url::Url; @@ -85,4 +89,8 @@ impl Driver for Connect { async fn connect(&self) -> Result> { self.driver.connect().await } + + fn generate_migration(&self, schema_diff: &SchemaDiff<'_>) -> Migration { + self.driver.generate_migration(schema_diff) + } } diff --git a/crates/toasty/src/db/pool.rs b/crates/toasty/src/db/pool.rs index 9ad4a868a..bd06850eb 100644 --- a/crates/toasty/src/db/pool.rs +++ b/crates/toasty/src/db/pool.rs @@ -87,6 +87,11 @@ impl Pool { Ok(PoolConnection { inner: connection }) } + /// Returns the database driver this pool uses to create connections. + pub fn driver(&self) -> &dyn Driver { + self.inner.manager().driver.as_ref() + } + /// Returns the database driver's capabilities. pub fn capability(&self) -> &Capability { self.capability diff --git a/crates/toasty/src/engine.rs b/crates/toasty/src/engine.rs index ba911bf25..a4c98086b 100644 --- a/crates/toasty/src/engine.rs +++ b/crates/toasty/src/engine.rs @@ -17,7 +17,7 @@ mod verify; use crate::{db::Pool, Result}; use std::sync::Arc; use toasty_core::{ - driver::Capability, + driver::{Capability, Driver}, stmt::{self, Statement, ValueStream}, Schema, }; @@ -91,4 +91,9 @@ impl Engine { fn expr_cx_for<'a>(&'a self, target: impl stmt::IntoExprTarget<'a>) -> stmt::ExprContext<'a> { stmt::ExprContext::new_with_target(&self.schema, target) } + + /// Returns the database driver this engine is using. + pub(crate) fn driver(&self) -> &dyn Driver { + self.pool.driver() + } } diff --git a/examples/todo-with-cli/.gitignore b/examples/todo-with-cli/.gitignore new file mode 100644 index 000000000..c370cb644 --- /dev/null +++ b/examples/todo-with-cli/.gitignore @@ -0,0 +1 @@ +test.db diff --git a/examples/todo-with-cli/Cargo.toml b/examples/todo-with-cli/Cargo.toml new file mode 100644 index 000000000..f22f56b88 --- /dev/null +++ b/examples/todo-with-cli/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "example-todo-with-cli" +version = "0.1.0" +edition = "2024" +publish = false +default-run = "todo-app" + +[features] +default = [ "sqlite" ] +dynamodb = [ "toasty/dynamodb" ] +mysql = [ "toasty/mysql" ] +postgresql = [ "toasty/postgresql" ] +sqlite = [ "toasty/sqlite" ] + +[dependencies] +toasty.workspace = true +toasty-cli.workspace = true +tokio.workspace = true +anyhow = "1.0.100" + +[[bin]] +name = "todo-app" +path = "src/bin/app.rs" + +[[bin]] +name = "todo-cli" +path = "src/bin/cli.rs" diff --git a/examples/todo-with-cli/Toasty.toml b/examples/todo-with-cli/Toasty.toml new file mode 100644 index 000000000..4aafbc8e9 --- /dev/null +++ b/examples/todo-with-cli/Toasty.toml @@ -0,0 +1,5 @@ +[migration] +path = "toasty" +prefix_style = "Sequential" +checksums = false +statement_breakpoints = true diff --git a/examples/todo-with-cli/src/bin/app.rs b/examples/todo-with-cli/src/bin/app.rs new file mode 100644 index 000000000..1761bd6cc --- /dev/null +++ b/examples/todo-with-cli/src/bin/app.rs @@ -0,0 +1,80 @@ +use example_todo_with_cli::{Todo, User, create_db}; + +#[tokio::main] +async fn main() -> toasty::Result<()> { + let db = create_db().await?; + + println!("==> Creating users..."); + let user1 = User::create() + .name("Alice") + .email("alice@example.com") + .exec(&db) + .await?; + + let user2 = User::create() + .name("Bob") + .email("bob@example.com") + .exec(&db) + .await?; + + println!("Created users: {} and {}", user1.name, user2.name); + + println!("\n==> Creating todos..."); + let todo1 = user1 + .todos() + .create() + .title("Learn Rust") + .completed(false) + .exec(&db) + .await?; + + let todo2 = user1 + .todos() + .create() + .title("Build a web app") + .completed(false) + .exec(&db) + .await?; + + let _todo3 = user2 + .todos() + .create() + .title("Write documentation") + .completed(true) + .exec(&db) + .await?; + + println!("Created {} todos", 3); + + println!("\n==> Listing all users and their todos..."); + let users = User::all().collect::>(&db).await?; + + for user in users { + println!("\nUser: {} ({})", user.name, user.email); + + let mut todos = user.todos().all(&db).await?; + while let Some(todo) = todos.next().await { + let todo = todo?; + let status = if todo.completed { "✓" } else { " " }; + println!(" [{}] {}", status, todo.title); + } + } + + println!("\n==> Updating a todo..."); + let mut todo = Todo::get_by_id(&db, &todo1.id).await?; + todo.update().completed(true).exec(&db).await?; + println!("Marked '{}' as completed", todo.title); + + println!("\n==> Deleting a todo..."); + let todo = Todo::get_by_id(&db, &todo2.id).await?; + println!("Deleting '{}'", todo.title); + todo.delete(&db).await?; + + println!("\n==> Final count..."); + let todos = Todo::all().collect::>(&db).await?; + println!("Total todos remaining: {}", todos.len()); + + println!("\n>>> Application completed successfully! <<<"); + + Ok(()) +} diff --git a/examples/todo-with-cli/src/bin/cli.rs b/examples/todo-with-cli/src/bin/cli.rs new file mode 100644 index 000000000..e32e210bf --- /dev/null +++ b/examples/todo-with-cli/src/bin/cli.rs @@ -0,0 +1,19 @@ +use example_todo_with_cli::create_db; +use toasty_cli::{Config, ToastyCli}; + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + // Load configuration from Toasty.toml + let config = Config::load()?; + + // Create the database instance with our schema + let db = create_db().await?; + + // Create the CLI with our database and config + let cli = ToastyCli::with_config(db, config); + + // Parse and run CLI commands + cli.parse_and_run().await?; + + Ok(()) +} diff --git a/examples/todo-with-cli/src/lib.rs b/examples/todo-with-cli/src/lib.rs new file mode 100644 index 000000000..eb44cac1b --- /dev/null +++ b/examples/todo-with-cli/src/lib.rs @@ -0,0 +1,45 @@ +use toasty::stmt::Id; + +#[derive(Debug, toasty::Model)] +pub struct User { + #[key] + #[auto] + pub id: Id, + + pub name: String, + + #[unique] + pub email: String, + + #[has_many] + pub todos: toasty::HasMany, +} + +#[derive(Debug, toasty::Model)] +pub struct Todo { + #[key] + #[auto] + pub id: Id, + + #[index] + pub user_id: Id, + + #[belongs_to(key = user_id, references = id)] + pub user: toasty::BelongsTo, + + #[index] + pub title: String, + + pub completed: bool, +} + +/// Helper function to create a database instance with the schema +pub async fn create_db() -> toasty::Result { + let db = toasty::Db::builder() + .register::() + .register::() + .connect("sqlite:./test.db") + .await?; + + Ok(db) +} diff --git a/examples/todo-with-cli/toasty/history.toml b/examples/todo-with-cli/toasty/history.toml new file mode 100644 index 000000000..c162b9e0f --- /dev/null +++ b/examples/todo-with-cli/toasty/history.toml @@ -0,0 +1,6 @@ +version = 1 + +[[migrations]] +id = 1101269131896407524 +name = "0000_migration.sql" +snapshot_name = "0000_snapshot.toml" diff --git a/examples/todo-with-cli/toasty/migrations/0000_migration.sql b/examples/todo-with-cli/toasty/migrations/0000_migration.sql new file mode 100644 index 000000000..b6c01ce22 --- /dev/null +++ b/examples/todo-with-cli/toasty/migrations/0000_migration.sql @@ -0,0 +1,24 @@ +CREATE TABLE "users" ( + "id" TEXT NOT NULL, + "name" TEXT NOT NULL, + "email" TEXT NOT NULL, + PRIMARY KEY ("id") +); +-- #[toasty::breakpoint] +CREATE UNIQUE INDEX "index_users_by_id" ON "users" ("id"); +-- #[toasty::breakpoint] +CREATE UNIQUE INDEX "index_users_by_email" ON "users" ("email"); +-- #[toasty::breakpoint] +CREATE TABLE "todos" ( + "id" TEXT NOT NULL, + "user_id" TEXT NOT NULL, + "title" TEXT NOT NULL, + "completed" BOOLEAN NOT NULL, + PRIMARY KEY ("id") +); +-- #[toasty::breakpoint] +CREATE UNIQUE INDEX "index_todos_by_id" ON "todos" ("id"); +-- #[toasty::breakpoint] +CREATE INDEX "index_todos_by_user_id" ON "todos" ("user_id"); +-- #[toasty::breakpoint] +CREATE INDEX "index_todos_by_title" ON "todos" ("title"); \ No newline at end of file diff --git a/examples/todo-with-cli/toasty/snapshots/0000_snapshot.toml b/examples/todo-with-cli/toasty/snapshots/0000_snapshot.toml new file mode 100644 index 000000000..4b869b0eb --- /dev/null +++ b/examples/todo-with-cli/toasty/snapshots/0000_snapshot.toml @@ -0,0 +1,116 @@ +version = 1 + +[schema] + +[[schema.tables]] +id = 0 +name = "users" +primary_key = { columns = [{ table = 0, index = 0 }], index = { table = 0, index = 0 } } + +[[schema.tables.columns]] +id = { table = 0, index = 0 } +name = "id" +ty = "String" +storage_ty = "Text" +nullable = false +primary_key = false +auto_increment = false + +[[schema.tables.columns]] +id = { table = 0, index = 1 } +name = "name" +ty = "String" +storage_ty = "Text" +nullable = false +primary_key = false +auto_increment = false + +[[schema.tables.columns]] +id = { table = 0, index = 2 } +name = "email" +ty = "String" +storage_ty = "Text" +nullable = false +primary_key = false +auto_increment = false + +[[schema.tables.indices]] +id = { table = 0, index = 0 } +name = "index_users_by_id" +on = 0 +columns = [{ column = { table = 0, index = 0 }, op = "Eq", scope = "Partition" }] +unique = true +primary_key = true + +[[schema.tables.indices]] +id = { table = 0, index = 1 } +name = "index_users_by_email" +on = 0 +columns = [{ column = { table = 0, index = 2 }, op = "Eq", scope = "Partition" }] +unique = true +primary_key = false + +[[schema.tables]] +id = 1 +name = "todos" +primary_key = { columns = [{ table = 1, index = 0 }], index = { table = 1, index = 0 } } + +[[schema.tables.columns]] +id = { table = 1, index = 0 } +name = "id" +ty = "String" +storage_ty = "Text" +nullable = false +primary_key = false +auto_increment = false + +[[schema.tables.columns]] +id = { table = 1, index = 1 } +name = "user_id" +ty = "String" +storage_ty = "Text" +nullable = false +primary_key = false +auto_increment = false + +[[schema.tables.columns]] +id = { table = 1, index = 2 } +name = "title" +ty = "String" +storage_ty = "Text" +nullable = false +primary_key = false +auto_increment = false + +[[schema.tables.columns]] +id = { table = 1, index = 3 } +name = "completed" +ty = "Bool" +storage_ty = "Boolean" +nullable = false +primary_key = false +auto_increment = false + +[[schema.tables.indices]] +id = { table = 1, index = 0 } +name = "index_todos_by_id" +on = 1 +columns = [{ column = { table = 1, index = 0 }, op = "Eq", scope = "Partition" }] +unique = true +primary_key = true + +[[schema.tables.indices]] +id = { table = 1, index = 1 } +name = "index_todos_by_user_id" +on = 1 +columns = [{ column = { table = 1, index = 1 }, op = "Eq", scope = "Partition" }] +unique = false +primary_key = false + +[[schema.tables.indices]] +id = { table = 1, index = 2 } +name = "index_todos_by_title" +on = 1 +columns = [{ column = { table = 1, index = 2 }, op = "Eq", scope = "Partition" }] +unique = false +primary_key = false diff --git a/tests/src/logging_driver.rs b/tests/src/logging_driver.rs index 79d3bb001..2f64d3799 100644 --- a/tests/src/logging_driver.rs +++ b/tests/src/logging_driver.rs @@ -1,5 +1,8 @@ use std::sync::{Arc, Mutex}; -use toasty::driver::Driver; +use toasty::{ + driver::Driver, + schema::db::{AppliedMigration, Migration, SchemaDiff}, +}; use toasty_core::{ async_trait, driver::{Capability, Connection, Operation, Response, Rows}, @@ -38,6 +41,10 @@ impl Driver for LoggingDriver { ops_log: self.ops_log_handle(), })) } + + fn generate_migration(&self, schema_diff: &SchemaDiff<'_>) -> Migration { + self.inner.generate_migration(schema_diff) + } } #[derive(Debug)] @@ -90,6 +97,19 @@ impl Connection for LoggingConnection { async fn reset_db(&mut self, schema: &Schema) -> Result<()> { self.inner.reset_db(schema).await } + + async fn applied_migrations(&mut self) -> Result> { + self.inner.applied_migrations().await + } + + async fn apply_migration( + &mut self, + id: u64, + name: String, + migration: &Migration, + ) -> Result<()> { + self.inner.apply_migration(id, name, migration).await + } } /// Duplicate a Response, using ValueStream::dup() for value streams