diff --git a/Cargo.toml b/Cargo.toml index e209c51..40e0ace 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,16 +5,16 @@ resolver = "3" members = ["crates/*"] exclude = [ - "examples/basic", - "examples/filtering", - "examples/relations", - "examples/events", - "examples/hooks", - "examples/commands", - "examples/transactions", - "examples/soft-delete", - "examples/streams", - "examples/full-app", + "examples/basic", + "examples/filtering", + "examples/relations", + "examples/events", + "examples/hooks", + "examples/commands", + "examples/transactions", + "examples/soft-delete", + "examples/streams", + "examples/full-app", ] [workspace.package] @@ -26,9 +26,9 @@ license = "MIT" repository = "https://github.com/RAprogramm/entity-derive" [workspace.dependencies] -entity-core = { path = "crates/entity-core", version = "0.2.0" } -entity-derive = { path = "crates/entity-derive", version = "0.4.0" } -entity-derive-impl = { path = "crates/entity-derive-impl", version = "0.2.0" } +entity-core = { path = "crates/entity-core", version = "0.3.0" } +entity-derive = { path = "crates/entity-derive", version = "0.5.0" } +entity-derive-impl = { path = "crates/entity-derive-impl", version = "0.3.0" } syn = { version = "2", features = ["full", "extra-traits", "parsing"] } quote = "1" proc-macro2 = "1" diff --git a/README.md b/README.md index cd83d0e..5feb447 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@

+ entity-derive logo

entity-derive

One macro to rule them all diff --git a/crates/entity-core/Cargo.toml b/crates/entity-core/Cargo.toml index 67d8791..0a60cd1 100644 --- a/crates/entity-core/Cargo.toml +++ b/crates/entity-core/Cargo.toml @@ -3,7 +3,7 @@ [package] name = "entity-core" -version = "0.2.0" +version = "0.3.0" edition = "2024" rust-version = "1.92" authors = ["RAprogramm "] @@ -24,7 +24,9 @@ streams = ["serde", "serde_json", "futures"] [dependencies] async-trait = "0.1" -sqlx = { version = "0.8", optional = true, default-features = false, features = ["postgres"] } +sqlx = { version = "0.8", optional = true, default-features = false, features = [ + "postgres", +] } serde = { version = "1", features = ["derive"], optional = true } serde_json = { version = "1", optional = true } futures = { version = "0.3", optional = true } diff --git a/crates/entity-derive-impl/Cargo.toml b/crates/entity-derive-impl/Cargo.toml index e3be362..7ffe173 100644 --- a/crates/entity-derive-impl/Cargo.toml +++ b/crates/entity-derive-impl/Cargo.toml @@ -3,7 +3,7 @@ [package] name = "entity-derive-impl" -version = "0.2.0" +version = "0.3.0" edition = "2024" rust-version = "1.92" authors = ["RAprogramm "] @@ -34,7 +34,12 @@ uuid = { version = "1", features = ["v4", "v7", "serde"] } chrono = { version = "0.4", features = ["serde"] } serde = { version = "1", features = ["derive"] } async-trait = "0.1" -sqlx = { version = "0.8", features = ["runtime-tokio", "postgres", "uuid", "chrono"] } +sqlx = { version = "0.8", features = [ + "runtime-tokio", + "postgres", + "uuid", + "chrono", +] } tokio = { version = "1", features = ["macros", "rt-multi-thread"] } utoipa = { version = "5", features = ["chrono", "uuid"] } validator = { version = "0.20", features = ["derive"] } diff --git a/crates/entity-derive-impl/src/entity.rs b/crates/entity-derive-impl/src/entity.rs index e2974ba..52bb772 100644 --- a/crates/entity-derive-impl/src/entity.rs +++ b/crates/entity-derive-impl/src/entity.rs @@ -68,6 +68,7 @@ mod events; mod hooks; mod insertable; mod mappers; +mod migrations; pub mod parse; mod policy; mod projection; @@ -110,6 +111,7 @@ fn generate(entity: EntityDef) -> TokenStream { let insertable = insertable::generate(&entity); let mappers = mappers::generate(&entity); let sql = sql::generate(&entity); + let migrations = migrations::generate(&entity); let expanded = quote! { #dto @@ -127,6 +129,7 @@ fn generate(entity: EntityDef) -> TokenStream { #insertable #mappers #sql + #migrations }; expanded.into() diff --git a/crates/entity-derive-impl/src/entity/migrations.rs b/crates/entity-derive-impl/src/entity/migrations.rs new file mode 100644 index 0000000..7c0a777 --- /dev/null +++ b/crates/entity-derive-impl/src/entity/migrations.rs @@ -0,0 +1,118 @@ +// SPDX-FileCopyrightText: 2025-2026 RAprogramm +// SPDX-License-Identifier: MIT + +//! Migration generation for entity-derive. +//! +//! Generates `MIGRATION_UP` and `MIGRATION_DOWN` constants containing +//! SQL DDL statements for creating/dropping tables. +//! +//! # Features +//! +//! - Full type mapping (Rust → PostgreSQL) +//! - Column constraints (UNIQUE, CHECK, DEFAULT) +//! - Indexes (btree, hash, gin, gist, brin) +//! - Foreign keys with ON DELETE actions +//! - Composite indexes +//! +//! # Usage +//! +//! ```rust,ignore +//! #[derive(Entity)] +//! #[entity(table = "users", migrations)] +//! pub struct User { +//! #[id] +//! pub id: Uuid, +//! +//! #[column(unique, index)] +//! pub email: String, +//! } +//! +//! // Apply migration: +//! sqlx::query(User::MIGRATION_UP).execute(&pool).await?; +//! ``` + +mod postgres; +pub mod types; + +use proc_macro2::TokenStream; + +use super::parse::{DatabaseDialect, EntityDef}; + +/// Generate migration constants based on entity configuration. +/// +/// Returns empty `TokenStream` if migrations are not enabled. +pub fn generate(entity: &EntityDef) -> TokenStream { + if !entity.migrations { + return TokenStream::new(); + } + + match entity.dialect { + DatabaseDialect::Postgres => postgres::generate(entity), + DatabaseDialect::ClickHouse => TokenStream::new(), // TODO: future + DatabaseDialect::MongoDB => TokenStream::new() // N/A for document DB + } +} + +#[cfg(test)] +mod tests { + use syn::DeriveInput; + + use super::*; + + fn parse_entity(tokens: proc_macro2::TokenStream) -> EntityDef { + let input: DeriveInput = syn::parse_quote!(#tokens); + EntityDef::from_derive_input(&input).unwrap() + } + + #[test] + fn generate_returns_empty_when_migrations_disabled() { + let entity = parse_entity(quote::quote! { + #[entity(table = "users")] + pub struct User { + #[id] + pub id: uuid::Uuid, + } + }); + let result = generate(&entity); + assert!(result.is_empty()); + } + + #[test] + fn generate_returns_tokens_when_migrations_enabled() { + let entity = parse_entity(quote::quote! { + #[entity(table = "users", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + } + }); + let result = generate(&entity); + assert!(!result.is_empty()); + } + + #[test] + fn generate_returns_empty_for_clickhouse() { + let entity = parse_entity(quote::quote! { + #[entity(table = "users", dialect = "clickhouse", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + } + }); + let result = generate(&entity); + assert!(result.is_empty()); + } + + #[test] + fn generate_returns_empty_for_mongodb() { + let entity = parse_entity(quote::quote! { + #[entity(table = "users", dialect = "mongodb", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + } + }); + let result = generate(&entity); + assert!(result.is_empty()); + } +} diff --git a/crates/entity-derive-impl/src/entity/migrations/postgres.rs b/crates/entity-derive-impl/src/entity/migrations/postgres.rs new file mode 100644 index 0000000..1ba3a99 --- /dev/null +++ b/crates/entity-derive-impl/src/entity/migrations/postgres.rs @@ -0,0 +1,52 @@ +// SPDX-FileCopyrightText: 2025-2026 RAprogramm +// SPDX-License-Identifier: MIT + +//! PostgreSQL migration generation. +//! +//! Generates `MIGRATION_UP` and `MIGRATION_DOWN` constants for PostgreSQL. + +mod ddl; + +use proc_macro2::TokenStream; +use quote::quote; + +use crate::{entity::parse::EntityDef, utils::marker}; + +/// Generate migration constants for PostgreSQL. +/// +/// # Generated Code +/// +/// ```rust,ignore +/// impl User { +/// pub const MIGRATION_UP: &'static str = "CREATE TABLE..."; +/// pub const MIGRATION_DOWN: &'static str = "DROP TABLE..."; +/// } +/// ``` +pub fn generate(entity: &EntityDef) -> TokenStream { + let entity_name = entity.name(); + let vis = &entity.vis; + + let up_sql = ddl::generate_up(entity); + let down_sql = ddl::generate_down(entity); + + let marker = marker::generated(); + + quote! { + #marker + impl #entity_name { + /// SQL migration to create this entity's table, indexes, and constraints. + /// + /// # Usage + /// + /// ```rust,ignore + /// sqlx::query(User::MIGRATION_UP).execute(&pool).await?; + /// ``` + #vis const MIGRATION_UP: &'static str = #up_sql; + + /// SQL migration to drop this entity's table. + /// + /// Uses CASCADE to drop dependent objects. + #vis const MIGRATION_DOWN: &'static str = #down_sql; + } + } +} diff --git a/crates/entity-derive-impl/src/entity/migrations/postgres/ddl.rs b/crates/entity-derive-impl/src/entity/migrations/postgres/ddl.rs new file mode 100644 index 0000000..b6cf425 --- /dev/null +++ b/crates/entity-derive-impl/src/entity/migrations/postgres/ddl.rs @@ -0,0 +1,512 @@ +// SPDX-FileCopyrightText: 2025-2026 RAprogramm +// SPDX-License-Identifier: MIT + +//! DDL (Data Definition Language) generation for PostgreSQL. +//! +//! Generates CREATE TABLE, CREATE INDEX, and DROP TABLE statements. + +use convert_case::{Case, Casing}; + +use crate::entity::{ + migrations::types::{PostgresTypeMapper, TypeMapper}, + parse::{CompositeIndexDef, EntityDef, FieldDef} +}; + +/// Generate the complete UP migration SQL. +/// +/// Includes: +/// - CREATE TABLE with columns and constraints +/// - CREATE INDEX for single-column indexes +/// - CREATE INDEX for composite indexes +pub fn generate_up(entity: &EntityDef) -> String { + let mut sql = String::new(); + + // CREATE TABLE + sql.push_str(&generate_create_table(entity)); + + // Single-column indexes + for field in entity.all_fields() { + if field.column().has_index() { + sql.push_str(&generate_single_index(entity, field)); + } + } + + // Composite indexes + for idx in &entity.indexes { + sql.push_str(&generate_composite_index(entity, idx)); + } + + sql +} + +/// Generate the DOWN migration SQL. +pub fn generate_down(entity: &EntityDef) -> String { + format!( + "DROP TABLE IF EXISTS {} CASCADE;\n", + entity.full_table_name() + ) +} + +/// Generate CREATE TABLE statement. +fn generate_create_table(entity: &EntityDef) -> String { + let mapper = PostgresTypeMapper; + let full_table = entity.full_table_name(); + + let columns: Vec = entity + .all_fields() + .iter() + .map(|f| generate_column_def(f, &mapper, entity)) + .collect(); + + format!( + "CREATE TABLE IF NOT EXISTS {} (\n{}\n);\n", + full_table, + columns.join(",\n") + ) +} + +/// Generate a single column definition. +fn generate_column_def( + field: &FieldDef, + mapper: &PostgresTypeMapper, + entity: &EntityDef +) -> String { + let column_name = field.column_name(); + let sql_type = mapper.map_type(field.ty(), field.column()); + + let mut parts = vec![format!(" {}", column_name)]; + + // Type with array suffix + parts.push(sql_type.to_sql_string()); + + // PRIMARY KEY for #[id] fields + if field.is_id() { + parts.push("PRIMARY KEY".to_string()); + } else if !sql_type.nullable { + // NOT NULL unless nullable + parts.push("NOT NULL".to_string()); + } + + // UNIQUE constraint + if field.is_unique() { + parts.push("UNIQUE".to_string()); + } + + // DEFAULT value + if let Some(ref default) = field.column().default { + parts.push(format!("DEFAULT {}", default)); + } + + // CHECK constraint + if let Some(ref check) = field.column().check { + parts.push(format!("CHECK ({})", check)); + } + + // Foreign key REFERENCES from #[belongs_to] + if field.is_relation() + && let Some(parent) = field.belongs_to() + { + let parent_table = parent.to_string().to_case(Case::Snake); + // Use same schema as current entity for the reference + let ref_table = format!("{}.{}", entity.schema, pluralize(&parent_table)); + let mut fk_str = format!("REFERENCES {}(id)", ref_table); + + if let Some(action) = &field.storage.on_delete { + fk_str.push_str(&format!(" ON DELETE {}", action.as_sql())); + } + + parts.push(fk_str); + } + + parts.join(" ") +} + +/// Generate CREATE INDEX for a single column. +fn generate_single_index(entity: &EntityDef, field: &FieldDef) -> String { + let table = &entity.table; + let schema = &entity.schema; + let column = field.column_name(); + + let index_type = field.column().index.unwrap_or_default(); + let index_name = format!("idx_{}_{}", table, column); + let using = index_type.as_sql_using(); + + format!( + "CREATE INDEX IF NOT EXISTS {} ON {}.{}{} ({});\n", + index_name, schema, table, using, column + ) +} + +/// Generate CREATE INDEX for a composite index. +fn generate_composite_index(entity: &EntityDef, idx: &CompositeIndexDef) -> String { + let table = &entity.table; + let schema = &entity.schema; + + let index_name = idx.name_or_default(table); + let using = idx.index_type.as_sql_using(); + let unique_str = if idx.unique { "UNIQUE " } else { "" }; + let columns = idx.columns.join(", "); + + let mut sql = format!( + "CREATE {}INDEX IF NOT EXISTS {} ON {}.{}{} ({})", + unique_str, index_name, schema, table, using, columns + ); + + if let Some(ref where_clause) = idx.where_clause { + sql.push_str(&format!(" WHERE {}", where_clause)); + } + + sql.push_str(";\n"); + sql +} + +/// Simple pluralization for table names. +fn pluralize(s: &str) -> String { + if s.ends_with('s') || s.ends_with("sh") || s.ends_with("ch") || s.ends_with('x') { + format!("{}es", s) + } else if s.ends_with('y') && !s.ends_with("ay") && !s.ends_with("ey") && !s.ends_with("oy") { + format!("{}ies", &s[..s.len() - 1]) + } else { + format!("{}s", s) + } +} + +#[cfg(test)] +mod tests { + use syn::DeriveInput; + + use super::*; + use crate::entity::parse::EntityDef; + + fn parse_entity(tokens: proc_macro2::TokenStream) -> EntityDef { + let input: DeriveInput = syn::parse_quote!(#tokens); + EntityDef::from_derive_input(&input).unwrap() + } + + #[test] + fn pluralize_regular() { + assert_eq!(pluralize("user"), "users"); + assert_eq!(pluralize("post"), "posts"); + } + + #[test] + fn pluralize_es() { + assert_eq!(pluralize("status"), "statuses"); + assert_eq!(pluralize("match"), "matches"); + } + + #[test] + fn pluralize_ies() { + assert_eq!(pluralize("category"), "categories"); + assert_eq!(pluralize("company"), "companies"); + } + + #[test] + fn pluralize_ey_oy() { + assert_eq!(pluralize("key"), "keys"); + assert_eq!(pluralize("toy"), "toys"); + } + + #[test] + fn pluralize_sh() { + assert_eq!(pluralize("wish"), "wishes"); + assert_eq!(pluralize("bush"), "bushes"); + } + + #[test] + fn pluralize_x() { + assert_eq!(pluralize("box"), "boxes"); + assert_eq!(pluralize("fox"), "foxes"); + } + + #[test] + fn pluralize_ay() { + assert_eq!(pluralize("day"), "days"); + assert_eq!(pluralize("way"), "ways"); + } + + #[test] + fn generate_up_basic() { + let entity = parse_entity(quote::quote! { + #[entity(table = "users", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + #[field(create, response)] + pub name: String, + } + }); + let sql = generate_up(&entity); + assert!(sql.contains("CREATE TABLE IF NOT EXISTS public.users")); + assert!(sql.contains("id UUID PRIMARY KEY")); + assert!(sql.contains("name TEXT NOT NULL")); + } + + #[test] + fn generate_down_basic() { + let entity = parse_entity(quote::quote! { + #[entity(table = "users", schema = "core", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + } + }); + let sql = generate_down(&entity); + assert_eq!(sql, "DROP TABLE IF EXISTS core.users CASCADE;\n"); + } + + #[test] + fn generate_up_with_unique() { + let entity = parse_entity(quote::quote! { + #[entity(table = "users", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + #[field(create, response)] + #[column(unique)] + pub email: String, + } + }); + let sql = generate_up(&entity); + assert!(sql.contains("email TEXT NOT NULL UNIQUE")); + } + + #[test] + fn generate_up_with_default() { + let entity = parse_entity(quote::quote! { + #[entity(table = "users", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + #[field(create, response)] + #[column(default = "true")] + pub active: bool, + } + }); + let sql = generate_up(&entity); + assert!(sql.contains("DEFAULT true")); + } + + #[test] + fn generate_up_with_check() { + let entity = parse_entity(quote::quote! { + #[entity(table = "users", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + #[field(create, response)] + #[column(check = "age >= 0")] + pub age: i32, + } + }); + let sql = generate_up(&entity); + assert!(sql.contains("CHECK (age >= 0)")); + } + + #[test] + fn generate_up_with_index() { + let entity = parse_entity(quote::quote! { + #[entity(table = "users", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + #[field(create, response)] + #[column(index)] + pub status: String, + } + }); + let sql = generate_up(&entity); + assert!(sql.contains("CREATE INDEX IF NOT EXISTS idx_users_status")); + } + + #[test] + fn generate_up_with_gin_index() { + let entity = parse_entity(quote::quote! { + #[entity(table = "users", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + #[field(create, response)] + #[column(index = "gin")] + pub tags: Vec, + } + }); + let sql = generate_up(&entity); + assert!(sql.contains("USING gin")); + } + + #[test] + fn generate_up_with_nullable() { + let entity = parse_entity(quote::quote! { + #[entity(table = "users", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + #[field(create, response)] + pub bio: Option, + } + }); + let sql = generate_up(&entity); + assert!(sql.contains("bio TEXT")); + assert!(!sql.contains("bio TEXT NOT NULL")); + } + + #[test] + fn generate_up_with_varchar() { + let entity = parse_entity(quote::quote! { + #[entity(table = "users", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + #[field(create, response)] + #[column(varchar = 100)] + pub name: String, + } + }); + let sql = generate_up(&entity); + assert!(sql.contains("VARCHAR(100)")); + } + + #[test] + fn generate_up_with_belongs_to() { + let entity = parse_entity(quote::quote! { + #[entity(table = "posts", migrations)] + pub struct Post { + #[id] + pub id: uuid::Uuid, + #[field(create, response)] + #[belongs_to(User)] + pub user_id: uuid::Uuid, + } + }); + let sql = generate_up(&entity); + assert!(sql.contains("REFERENCES public.users(id)")); + } + + #[test] + fn generate_up_with_belongs_to_on_delete_cascade() { + let entity = parse_entity(quote::quote! { + #[entity(table = "posts", migrations)] + pub struct Post { + #[id] + pub id: uuid::Uuid, + #[field(create, response)] + #[belongs_to(User, on_delete = "cascade")] + pub user_id: uuid::Uuid, + } + }); + let sql = generate_up(&entity); + assert!(sql.contains("REFERENCES public.users(id) ON DELETE CASCADE")); + } + + #[test] + fn generate_composite_index_basic() { + let idx = CompositeIndexDef { + name: None, + columns: vec!["name".to_string(), "email".to_string()], + index_type: crate::entity::parse::IndexType::BTree, + unique: false, + where_clause: None + }; + let entity = parse_entity(quote::quote! { + #[entity(table = "users", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + #[field(create, response)] + pub name: String, + #[field(create, response)] + pub email: String, + } + }); + let sql = generate_composite_index(&entity, &idx); + assert!(sql.contains("CREATE INDEX IF NOT EXISTS idx_users_name_email")); + assert!(sql.contains("(name, email)")); + } + + #[test] + fn generate_composite_index_unique() { + let idx = CompositeIndexDef { + name: None, + columns: vec!["tenant_id".to_string(), "email".to_string()], + index_type: crate::entity::parse::IndexType::BTree, + unique: true, + where_clause: None + }; + let entity = parse_entity(quote::quote! { + #[entity(table = "users", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + } + }); + let sql = generate_composite_index(&entity, &idx); + assert!(sql.contains("CREATE UNIQUE INDEX")); + assert!(sql.contains("(tenant_id, email)")); + } + + #[test] + fn generate_composite_index_with_where() { + let idx = CompositeIndexDef { + name: Some("idx_active_users".to_string()), + columns: vec!["email".to_string()], + index_type: crate::entity::parse::IndexType::BTree, + unique: false, + where_clause: Some("active = true".to_string()) + }; + let entity = parse_entity(quote::quote! { + #[entity(table = "users", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + } + }); + let sql = generate_composite_index(&entity, &idx); + assert!(sql.contains("idx_active_users")); + assert!(sql.contains("WHERE active = true")); + } + + #[test] + fn generate_composite_index_gin() { + let idx = CompositeIndexDef { + name: None, + columns: vec!["tags".to_string()], + index_type: crate::entity::parse::IndexType::Gin, + unique: false, + where_clause: None + }; + let entity = parse_entity(quote::quote! { + #[entity(table = "posts", migrations)] + pub struct Post { + #[id] + pub id: uuid::Uuid, + } + }); + let sql = generate_composite_index(&entity, &idx); + assert!(sql.contains("USING gin")); + } + + #[test] + fn generate_up_with_composite_indexes() { + let mut entity = parse_entity(quote::quote! { + #[entity(table = "users", migrations)] + pub struct User { + #[id] + pub id: uuid::Uuid, + #[field(create, response)] + pub name: String, + #[field(create, response)] + pub email: String, + } + }); + entity.indexes.push(CompositeIndexDef { + name: None, + columns: vec!["name".to_string(), "email".to_string()], + index_type: crate::entity::parse::IndexType::BTree, + unique: false, + where_clause: None + }); + let sql = generate_up(&entity); + assert!(sql.contains("CREATE INDEX IF NOT EXISTS idx_users_name_email")); + } +} diff --git a/crates/entity-derive-impl/src/entity/migrations/types.rs b/crates/entity-derive-impl/src/entity/migrations/types.rs new file mode 100644 index 0000000..9e50c00 --- /dev/null +++ b/crates/entity-derive-impl/src/entity/migrations/types.rs @@ -0,0 +1,142 @@ +// SPDX-FileCopyrightText: 2025-2026 RAprogramm +// SPDX-License-Identifier: MIT + +//! Type mapping from Rust to database-specific SQL types. +//! +//! This module provides traits and implementations for mapping Rust types +//! to their SQL equivalents during migration generation. +//! +//! # Architecture +//! +//! ```text +//! ┌─────────────────────────────────────────────────────────────────────┐ +//! │ Type Mapping System │ +//! ├─────────────────────────────────────────────────────────────────────┤ +//! │ │ +//! │ Rust Type TypeMapper SQL Type │ +//! │ │ +//! │ Uuid ──► PostgresMapper ──► UUID │ +//! │ String ──► ──► TEXT / VARCHAR(n) │ +//! │ i32 ──► ──► INTEGER │ +//! │ DateTime ──► ──► TIMESTAMPTZ │ +//! │ Option ──► ──► T (nullable) │ +//! │ Vec ──► ──► T[] │ +//! │ │ +//! └─────────────────────────────────────────────────────────────────────┘ +//! ``` + +mod postgres; + +pub use postgres::PostgresTypeMapper; +use syn::Type; + +use crate::entity::parse::ColumnConfig; + +/// Mapped SQL type representation. +#[derive(Debug, Clone)] +pub struct SqlType { + /// SQL type name (e.g., "UUID", "TEXT", "INTEGER"). + pub name: String, + + /// Whether this type allows NULL values. + pub nullable: bool, + + /// Array dimension (0 = scalar, 1 = T[], 2 = T[][], etc.). + pub array_dim: usize +} + +impl SqlType { + /// Create a non-nullable SQL type. + #[cfg(test)] + #[must_use] + pub fn new(name: impl Into) -> Self { + Self { + name: name.into(), + nullable: false, + array_dim: 0 + } + } + + /// Create a nullable SQL type. + #[cfg(test)] + #[must_use] + pub fn nullable(name: impl Into) -> Self { + Self { + name: name.into(), + nullable: true, + array_dim: 0 + } + } + + /// Get the full SQL type string with array suffix. + #[must_use] + pub fn to_sql_string(&self) -> String { + if self.array_dim > 0 { + format!("{}{}", self.name, "[]".repeat(self.array_dim)) + } else { + self.name.clone() + } + } +} + +/// Trait for mapping Rust types to SQL types. +/// +/// Implement this trait for each database dialect. +pub trait TypeMapper { + /// Map a Rust type to its SQL representation. + /// + /// # Arguments + /// + /// * `ty` - The Rust type from syn + /// * `column` - Column configuration with overrides + /// + /// # Returns + /// + /// `SqlType` with name, nullable flag, and array dimension. + fn map_type(&self, ty: &Type, column: &ColumnConfig) -> SqlType; +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn sql_type_new() { + let ty = SqlType::new("INTEGER"); + assert_eq!(ty.name, "INTEGER"); + assert!(!ty.nullable); + assert_eq!(ty.array_dim, 0); + } + + #[test] + fn sql_type_nullable() { + let ty = SqlType::nullable("TEXT"); + assert!(ty.nullable); + } + + #[test] + fn sql_type_to_sql_string_scalar() { + let ty = SqlType::new("UUID"); + assert_eq!(ty.to_sql_string(), "UUID"); + } + + #[test] + fn sql_type_to_sql_string_array() { + let ty = SqlType { + name: "TEXT".to_string(), + nullable: false, + array_dim: 1 + }; + assert_eq!(ty.to_sql_string(), "TEXT[]"); + } + + #[test] + fn sql_type_to_sql_string_2d_array() { + let ty = SqlType { + name: "INTEGER".to_string(), + nullable: false, + array_dim: 2 + }; + assert_eq!(ty.to_sql_string(), "INTEGER[][]"); + } +} diff --git a/crates/entity-derive-impl/src/entity/migrations/types/postgres.rs b/crates/entity-derive-impl/src/entity/migrations/types/postgres.rs new file mode 100644 index 0000000..8e0582a --- /dev/null +++ b/crates/entity-derive-impl/src/entity/migrations/types/postgres.rs @@ -0,0 +1,318 @@ +// SPDX-FileCopyrightText: 2025-2026 RAprogramm +// SPDX-License-Identifier: MIT + +//! PostgreSQL type mapping. +//! +//! Maps Rust types to PostgreSQL types for migration generation. +//! +//! # Type Mapping Table +//! +//! | Rust Type | PostgreSQL Type | Notes | +//! |-----------|-----------------|-------| +//! | `Uuid` | `UUID` | | +//! | `String` | `TEXT` | Or `VARCHAR(n)` with `#[column(varchar = n)]` | +//! | `i16` | `SMALLINT` | | +//! | `i32` | `INTEGER` | | +//! | `i64` | `BIGINT` | | +//! | `f32` | `REAL` | | +//! | `f64` | `DOUBLE PRECISION` | | +//! | `bool` | `BOOLEAN` | | +//! | `DateTime` | `TIMESTAMPTZ` | | +//! | `NaiveDate` | `DATE` | | +//! | `NaiveTime` | `TIME` | | +//! | `NaiveDateTime` | `TIMESTAMP` | | +//! | `Option` | `T` | Nullable | +//! | `Vec` | `T[]` | PostgreSQL array | +//! | `serde_json::Value` | `JSONB` | | +//! | `Decimal` | `DECIMAL` | | +//! | `IpAddr` | `INET` | | + +use syn::Type; + +use super::{SqlType, TypeMapper}; +use crate::entity::parse::ColumnConfig; + +/// PostgreSQL type mapper. +/// +/// Converts Rust types to PostgreSQL SQL types with full support for: +/// - Primitive types (integers, floats, booleans) +/// - String types with optional VARCHAR length +/// - Date/time types from chrono +/// - UUID from uuid crate +/// - JSON from serde_json +/// - Arrays via Vec +/// - Nullable types via Option +pub struct PostgresTypeMapper; + +impl TypeMapper for PostgresTypeMapper { + fn map_type(&self, ty: &Type, column: &ColumnConfig) -> SqlType { + // Handle explicit SQL type override + if let Some(ref explicit) = column.sql_type { + return SqlType { + name: explicit.clone(), + nullable: is_option(ty) || column.nullable, + array_dim: 0 + }; + } + + // Handle Option + if let Some(inner) = extract_option_inner(ty) { + let mut result = self.map_type(inner, column); + result.nullable = true; + return result; + } + + // Handle Vec (PostgreSQL arrays) + if let Some(inner) = extract_vec_inner(ty) { + let mut result = self.map_type(inner, column); + result.array_dim += 1; + return result; + } + + // Map core types + let name = map_type_name(ty, column); + + SqlType { + name, + nullable: column.nullable, + array_dim: 0 + } + } +} + +/// Map a Rust type path to PostgreSQL type name. +fn map_type_name(ty: &Type, column: &ColumnConfig) -> String { + let type_str = type_path_string(ty); + + match type_str.as_str() { + // UUIDs + "Uuid" | "uuid::Uuid" => "UUID".to_string(), + + // Strings + "String" | "str" => { + if let Some(len) = column.varchar { + format!("VARCHAR({})", len) + } else { + "TEXT".to_string() + } + } + + // Integers + "i8" => "SMALLINT".to_string(), // PostgreSQL has no TINYINT + "i16" => "SMALLINT".to_string(), + "i32" => "INTEGER".to_string(), + "i64" => "BIGINT".to_string(), + "u8" => "SMALLINT".to_string(), + "u16" => "INTEGER".to_string(), + "u32" => "BIGINT".to_string(), + "u64" => "BIGINT".to_string(), // May overflow + + // Floats + "f32" => "REAL".to_string(), + "f64" => "DOUBLE PRECISION".to_string(), + + // Boolean + "bool" => "BOOLEAN".to_string(), + + // Date/Time (chrono) + "DateTime" | "chrono::DateTime" => "TIMESTAMPTZ".to_string(), + "NaiveDate" | "chrono::NaiveDate" => "DATE".to_string(), + "NaiveTime" | "chrono::NaiveTime" => "TIME".to_string(), + "NaiveDateTime" | "chrono::NaiveDateTime" => "TIMESTAMP".to_string(), + + // JSON + "Value" | "serde_json::Value" | "Json" | "sqlx::types::Json" => "JSONB".to_string(), + + // Decimal + "Decimal" | "rust_decimal::Decimal" | "BigDecimal" | "bigdecimal::BigDecimal" => { + "DECIMAL".to_string() + } + + // Network + "IpAddr" | "std::net::IpAddr" | "Ipv4Addr" | "Ipv6Addr" => "INET".to_string(), + "MacAddr" => "MACADDR".to_string(), + + // Binary + "Vec" | "bytes::Bytes" => "BYTEA".to_string(), + + // Fallback to TEXT for unknown types + _ => "TEXT".to_string() + } +} + +/// Extract the type path as a string. +fn type_path_string(ty: &Type) -> String { + if let Type::Path(type_path) = ty { + type_path + .path + .segments + .iter() + .map(|s| s.ident.to_string()) + .collect::>() + .join("::") + } else { + String::new() + } +} + +/// Check if a type is Option. +fn is_option(ty: &Type) -> bool { + extract_option_inner(ty).is_some() +} + +/// Extract the inner type from Option. +fn extract_option_inner(ty: &Type) -> Option<&Type> { + if let Type::Path(type_path) = ty + && let Some(segment) = type_path.path.segments.last() + && segment.ident == "Option" + && let syn::PathArguments::AngleBracketed(args) = &segment.arguments + && let Some(syn::GenericArgument::Type(inner)) = args.args.first() + { + return Some(inner); + } + None +} + +/// Extract the inner type from Vec. +fn extract_vec_inner(ty: &Type) -> Option<&Type> { + if let Type::Path(type_path) = ty + && let Some(segment) = type_path.path.segments.last() + && segment.ident == "Vec" + && let syn::PathArguments::AngleBracketed(args) = &segment.arguments + && let Some(syn::GenericArgument::Type(inner)) = args.args.first() + { + return Some(inner); + } + None +} + +#[cfg(test)] +mod tests { + use syn::parse_quote; + + use super::*; + + fn map_type(ty_tokens: proc_macro2::TokenStream) -> SqlType { + let ty: Type = parse_quote!(#ty_tokens); + let column = ColumnConfig::default(); + PostgresTypeMapper.map_type(&ty, &column) + } + + fn map_type_with_column(ty_tokens: proc_macro2::TokenStream, column: ColumnConfig) -> SqlType { + let ty: Type = parse_quote!(#ty_tokens); + PostgresTypeMapper.map_type(&ty, &column) + } + + #[test] + fn map_uuid() { + let ty = map_type(quote::quote! { Uuid }); + assert_eq!(ty.name, "UUID"); + assert!(!ty.nullable); + } + + #[test] + fn map_string() { + let ty = map_type(quote::quote! { String }); + assert_eq!(ty.name, "TEXT"); + } + + #[test] + fn map_string_varchar() { + let column = ColumnConfig { + varchar: Some(255), + ..Default::default() + }; + let ty = map_type_with_column(quote::quote! { String }, column); + assert_eq!(ty.name, "VARCHAR(255)"); + } + + #[test] + fn map_integers() { + assert_eq!(map_type(quote::quote! { i16 }).name, "SMALLINT"); + assert_eq!(map_type(quote::quote! { i32 }).name, "INTEGER"); + assert_eq!(map_type(quote::quote! { i64 }).name, "BIGINT"); + } + + #[test] + fn map_floats() { + assert_eq!(map_type(quote::quote! { f32 }).name, "REAL"); + assert_eq!(map_type(quote::quote! { f64 }).name, "DOUBLE PRECISION"); + } + + #[test] + fn map_bool() { + assert_eq!(map_type(quote::quote! { bool }).name, "BOOLEAN"); + } + + #[test] + fn map_datetime() { + let ty = map_type(quote::quote! { DateTime }); + assert_eq!(ty.name, "TIMESTAMPTZ"); + } + + #[test] + fn map_naive_date() { + assert_eq!(map_type(quote::quote! { NaiveDate }).name, "DATE"); + } + + #[test] + fn map_option_nullable() { + let ty = map_type(quote::quote! { Option }); + assert_eq!(ty.name, "TEXT"); + assert!(ty.nullable); + } + + #[test] + fn map_vec_to_array() { + let ty = map_type(quote::quote! { Vec }); + assert_eq!(ty.name, "TEXT"); + assert_eq!(ty.array_dim, 1); + assert_eq!(ty.to_sql_string(), "TEXT[]"); + } + + #[test] + fn map_vec_option() { + let ty = map_type(quote::quote! { Vec> }); + assert_eq!(ty.name, "INTEGER"); + assert!(ty.nullable); + assert_eq!(ty.array_dim, 1); + } + + #[test] + fn map_option_vec() { + let ty = map_type(quote::quote! { Option> }); + assert_eq!(ty.name, "INTEGER"); + assert!(ty.nullable); + assert_eq!(ty.array_dim, 1); + } + + #[test] + fn map_json() { + assert_eq!(map_type(quote::quote! { serde_json::Value }).name, "JSONB"); + } + + #[test] + fn map_explicit_sql_type() { + let column = ColumnConfig { + sql_type: Some("CITEXT".to_string()), + ..Default::default() + }; + let ty = map_type_with_column(quote::quote! { String }, column); + assert_eq!(ty.name, "CITEXT"); + } + + #[test] + fn map_decimal() { + assert_eq!(map_type(quote::quote! { Decimal }).name, "DECIMAL"); + } + + #[test] + fn map_ip_addr() { + assert_eq!(map_type(quote::quote! { IpAddr }).name, "INET"); + } + + #[test] + fn map_unknown_to_text() { + assert_eq!(map_type(quote::quote! { MyCustomType }).name, "TEXT"); + } +} diff --git a/crates/entity-derive-impl/src/entity/parse.rs b/crates/entity-derive-impl/src/entity/parse.rs index 5489b00..31112f4 100644 --- a/crates/entity-derive-impl/src/entity/parse.rs +++ b/crates/entity-derive-impl/src/entity/parse.rs @@ -120,10 +120,11 @@ mod uuid_version; pub use api::ApiConfig; pub use command::{CommandDef, CommandKindHint, CommandSource}; pub use dialect::DatabaseDialect; -pub use entity::{EntityDef, ProjectionDef}; +pub use entity::{CompositeIndexDef, EntityDef, ProjectionDef}; #[allow(unused_imports)] // Will be used for OpenAPI schema examples (#80) pub use field::ExampleValue; -pub use field::{FieldDef, FilterType}; +#[allow(unused_imports)] // Re-exported for migration generation tests +pub use field::{ColumnConfig, FieldDef, FilterType, IndexType}; pub use returning::ReturningMode; pub use sql_level::SqlLevel; pub use uuid_version::UuidVersion; diff --git a/crates/entity-derive-impl/src/entity/parse/entity.rs b/crates/entity-derive-impl/src/entity/parse/entity.rs index 5d100d0..c360ce5 100644 --- a/crates/entity-derive-impl/src/entity/parse/entity.rs +++ b/crates/entity-derive-impl/src/entity/parse/entity.rs @@ -142,10 +142,12 @@ mod attrs; mod constructor; mod def; mod helpers; +mod index; mod projection; pub use attrs::EntityAttrs; pub use def::EntityDef; +pub use index::CompositeIndexDef; pub use projection::{ProjectionDef, parse_projection_attrs}; #[cfg(test)] diff --git a/crates/entity-derive-impl/src/entity/parse/entity/attrs.rs b/crates/entity-derive-impl/src/entity/parse/entity/attrs.rs index eb8b08f..1aa02ef 100644 --- a/crates/entity-derive-impl/src/entity/parse/entity/attrs.rs +++ b/crates/entity-derive-impl/src/entity/parse/entity/attrs.rs @@ -245,5 +245,29 @@ pub struct EntityAttrs { /// .await?; /// ``` #[darling(default)] - pub transactions: bool + pub transactions: bool, + + /// Enable migration generation. + /// + /// When enabled, generates: + /// - `{Entity}::MIGRATION_UP` — SQL to create the table + /// - `{Entity}::MIGRATION_DOWN` — SQL to drop the table + /// + /// # Example + /// + /// ```rust,ignore + /// #[entity(table = "users", migrations)] + /// pub struct User { + /// #[id] + /// pub id: Uuid, + /// #[column(unique, index)] + /// pub email: String, + /// } + /// + /// // Generated: + /// // User::MIGRATION_UP → CREATE TABLE core.users (...) + /// // User::MIGRATION_DOWN → DROP TABLE core.users CASCADE + /// ``` + #[darling(default)] + pub migrations: bool } diff --git a/crates/entity-derive-impl/src/entity/parse/entity/constructor.rs b/crates/entity-derive-impl/src/entity/parse/entity/constructor.rs index 37d5ddc..740e8fe 100644 --- a/crates/entity-derive-impl/src/entity/parse/entity/constructor.rs +++ b/crates/entity-derive-impl/src/entity/parse/entity/constructor.rs @@ -61,7 +61,7 @@ use syn::DeriveInput; use super::{ super::{command::parse_command_attrs, field::FieldDef}, EntityAttrs, EntityDef, - helpers::{parse_api_attr, parse_has_many_attrs}, + helpers::{parse_api_attr, parse_has_many_attrs, parse_index_attrs}, parse_projection_attrs }; use crate::utils::docs::extract_doc_comments; @@ -130,6 +130,7 @@ impl EntityDef { let projections = parse_projection_attrs(&input.attrs); let command_defs = parse_command_attrs(&input.attrs); let api_config = parse_api_attr(&input.attrs); + let indexes = parse_index_attrs(&input.attrs); let doc = extract_doc_comments(&input.attrs); let id_field_index = fields.iter().position(|f| f.is_id()).ok_or_else(|| { @@ -160,7 +161,9 @@ impl EntityDef { streams: attrs.streams, transactions: attrs.transactions, api_config, - doc + doc, + migrations: attrs.migrations, + indexes }) } } diff --git a/crates/entity-derive-impl/src/entity/parse/entity/def.rs b/crates/entity-derive-impl/src/entity/parse/entity/def.rs index f1c9ddb..31a18f8 100644 --- a/crates/entity-derive-impl/src/entity/parse/entity/def.rs +++ b/crates/entity-derive-impl/src/entity/parse/entity/def.rs @@ -63,7 +63,7 @@ use super::{ api::ApiConfig, command::CommandDef, dialect::DatabaseDialect, field::FieldDef, returning::ReturningMode, sql_level::SqlLevel, uuid_version::UuidVersion }, - ProjectionDef + CompositeIndexDef, ProjectionDef }; /// Complete parsed entity definition. @@ -204,5 +204,16 @@ pub struct EntityDef { /// Documentation comment from the entity struct. /// /// Extracted from `///` comments for use in OpenAPI tag descriptions. - pub doc: Option + pub doc: Option, + + /// Whether to generate database migrations. + /// + /// When `true`, generates `MIGRATION_UP` and `MIGRATION_DOWN` constants + /// with SQL DDL statements for creating/dropping the table. + pub migrations: bool, + + /// Composite index definitions from `#[entity(index(...))]`. + /// + /// Each entry defines an index spanning multiple columns. + pub indexes: Vec } diff --git a/crates/entity-derive-impl/src/entity/parse/entity/helpers.rs b/crates/entity-derive-impl/src/entity/parse/entity/helpers.rs index c20382b..bbfb596 100644 --- a/crates/entity-derive-impl/src/entity/parse/entity/helpers.rs +++ b/crates/entity-derive-impl/src/entity/parse/entity/helpers.rs @@ -58,7 +58,13 @@ use syn::{Attribute, Ident}; -use super::super::api::{ApiConfig, parse_api_config}; +use super::{ + super::{ + api::{ApiConfig, parse_api_config}, + field::IndexType + }, + CompositeIndexDef +}; /// Parse `#[has_many(Entity)]` attributes from struct attributes. /// @@ -152,3 +158,359 @@ pub fn parse_api_attr(attrs: &[Attribute]) -> ApiConfig { ApiConfig::default() } + +/// Parse `index(...)` and `unique_index(...)` from `#[entity(...)]` attribute. +/// +/// Extracts composite index definitions from the entity attribute. +/// +/// # Syntax +/// +/// ```text +/// #[entity( +/// table = "users", +/// index(name, email), // Btree composite index +/// index(type = "gin", tags), // GIN index +/// unique_index(tenant_id, email), // Unique composite +/// index(name = "idx_custom", status), // Named index +/// )] +/// ``` +/// +/// # Returns +/// +/// Vector of `CompositeIndexDef` with parsed configurations. +pub fn parse_index_attrs(attrs: &[Attribute]) -> Vec { + let mut indexes = Vec::new(); + + for attr in attrs { + if !attr.path().is_ident("entity") { + continue; + } + + let _ = attr.parse_nested_meta(|meta| { + let is_index = meta.path.is_ident("index"); + let is_unique_index = meta.path.is_ident("unique_index"); + + if is_index || is_unique_index { + if let Ok(idx) = parse_index_content(&meta, is_unique_index) { + indexes.push(idx); + } + } else if meta.input.peek(syn::Token![=]) { + // Consume `key = value` style attributes (e.g., table = "users") + let _: syn::Token![=] = meta.input.parse()?; + let _: syn::Expr = meta.input.parse()?; + } else if meta.input.peek(syn::token::Paren) { + // Consume `key(...)` style attributes we don't handle + let content; + syn::parenthesized!(content in meta.input); + let _: proc_macro2::TokenStream = content.parse()?; + } + Ok(()) + }); + } + + indexes +} + +/// Parse the content of an index(...) or unique_index(...) attribute. +fn parse_index_content( + meta: &syn::meta::ParseNestedMeta<'_>, + unique: bool +) -> syn::Result { + let mut columns = Vec::new(); + let mut name = None; + let mut index_type = IndexType::default(); + let mut where_clause = None; + + meta.parse_nested_meta(|nested| { + // Check if this is a key = value option by peeking for `=` + let has_value = nested.input.peek(syn::Token![=]); + + if has_value && nested.path.is_ident("type") { + let _: syn::Token![=] = nested.input.parse()?; + let value: syn::LitStr = nested.input.parse()?; + index_type = IndexType::from_str(&value.value()).unwrap_or_default(); + } else if has_value && nested.path.is_ident("name") { + let _: syn::Token![=] = nested.input.parse()?; + let value: syn::LitStr = nested.input.parse()?; + name = Some(value.value()); + } else if has_value && nested.path.is_ident("where") { + let _: syn::Token![=] = nested.input.parse()?; + let value: syn::LitStr = nested.input.parse()?; + where_clause = Some(value.value()); + } else if let Some(ident) = nested.path.get_ident() { + // Treat any other identifier as a column name + columns.push(ident.to_string()); + } + Ok(()) + })?; + + if columns.is_empty() { + return Err(meta.error("index must have at least one column")); + } + + Ok(CompositeIndexDef { + name, + columns, + index_type, + unique, + where_clause + }) +} + +#[cfg(test)] +mod tests { + use syn::parse_quote; + + use super::*; + + // ========================================================================= + // parse_has_many_attrs tests + // ========================================================================= + + #[test] + fn has_many_empty() { + let attrs: Vec = vec![]; + let result = parse_has_many_attrs(&attrs); + assert!(result.is_empty()); + } + + #[test] + fn has_many_single() { + let attrs: Vec = vec![parse_quote!(#[has_many(Post)])]; + let result = parse_has_many_attrs(&attrs); + assert_eq!(result.len(), 1); + assert_eq!(result[0].to_string(), "Post"); + } + + #[test] + fn has_many_multiple() { + let attrs: Vec = vec![ + parse_quote!(#[has_many(Post)]), + parse_quote!(#[has_many(Comment)]), + parse_quote!(#[has_many(Like)]), + ]; + let result = parse_has_many_attrs(&attrs); + assert_eq!(result.len(), 3); + assert_eq!(result[0].to_string(), "Post"); + assert_eq!(result[1].to_string(), "Comment"); + assert_eq!(result[2].to_string(), "Like"); + } + + #[test] + fn has_many_ignores_other_attrs() { + let attrs: Vec = vec![ + parse_quote!(#[derive(Debug)]), + parse_quote!(#[has_many(Post)]), + parse_quote!(#[entity(table = "users")]), + ]; + let result = parse_has_many_attrs(&attrs); + assert_eq!(result.len(), 1); + assert_eq!(result[0].to_string(), "Post"); + } + + // ========================================================================= + // parse_api_attr tests + // ========================================================================= + + #[test] + fn api_attr_default_when_missing() { + let attrs: Vec = vec![]; + let result = parse_api_attr(&attrs); + assert!(result.tag.is_none()); + assert!(result.security.is_none()); + } + + #[test] + fn api_attr_ignores_non_entity() { + let attrs: Vec = vec![parse_quote!(#[derive(Debug)])]; + let result = parse_api_attr(&attrs); + assert!(result.tag.is_none()); + } + + #[test] + fn api_attr_with_tag() { + let attrs: Vec = + vec![parse_quote!(#[entity(table = "users", api(tag = "Users API"))])]; + let result = parse_api_attr(&attrs); + assert_eq!(result.tag, Some("Users API".to_string())); + } + + #[test] + fn api_attr_with_security() { + let attrs: Vec = + vec![parse_quote!(#[entity(table = "users", api(security = "bearer"))])]; + let result = parse_api_attr(&attrs); + assert!(result.security.is_some()); + } + + #[test] + fn api_attr_entity_without_api() { + let attrs: Vec = vec![parse_quote!(#[entity(table = "users")])]; + let result = parse_api_attr(&attrs); + assert!(result.tag.is_none()); + } + + // ========================================================================= + // parse_index_attrs tests + // ========================================================================= + + #[test] + fn index_attrs_empty() { + let attrs: Vec = vec![]; + let result = parse_index_attrs(&attrs); + assert!(result.is_empty()); + } + + #[test] + fn index_attrs_no_indexes() { + let attrs: Vec = vec![parse_quote!(#[entity(table = "users")])]; + let result = parse_index_attrs(&attrs); + assert!(result.is_empty()); + } + + #[test] + fn index_attrs_single_column() { + let attrs: Vec = + vec![parse_quote!(#[entity(table = "users", index(email))])]; + let result = parse_index_attrs(&attrs); + assert_eq!(result.len(), 1); + assert_eq!(result[0].columns, vec!["email"]); + assert!(!result[0].unique); + assert_eq!(result[0].index_type, IndexType::BTree); + } + + #[test] + fn index_attrs_multiple_columns() { + let attrs: Vec = + vec![parse_quote!(#[entity(table = "users", index(name, email))])]; + let result = parse_index_attrs(&attrs); + assert_eq!(result.len(), 1); + assert_eq!(result[0].columns, vec!["name", "email"]); + } + + #[test] + fn index_attrs_unique() { + let attrs: Vec = + vec![parse_quote!(#[entity(table = "users", unique_index(tenant_id, email))])]; + let result = parse_index_attrs(&attrs); + assert_eq!(result.len(), 1); + assert!(result[0].unique); + assert_eq!(result[0].columns, vec!["tenant_id", "email"]); + } + + #[test] + fn index_attrs_with_type_gin() { + let attrs: Vec = + vec![parse_quote!(#[entity(table = "posts", index(type = "gin", tags))])]; + let result = parse_index_attrs(&attrs); + assert_eq!(result.len(), 1); + assert_eq!(result[0].index_type, IndexType::Gin); + assert_eq!(result[0].columns, vec!["tags"]); + } + + #[test] + fn index_attrs_with_type_gist() { + let attrs: Vec = + vec![parse_quote!(#[entity(table = "locations", index(type = "gist", coordinates))])]; + let result = parse_index_attrs(&attrs); + assert_eq!(result.len(), 1); + assert_eq!(result[0].index_type, IndexType::Gist); + } + + #[test] + fn index_attrs_with_type_brin() { + let attrs: Vec = + vec![parse_quote!(#[entity(table = "logs", index(type = "brin", created_at))])]; + let result = parse_index_attrs(&attrs); + assert_eq!(result.len(), 1); + assert_eq!(result[0].index_type, IndexType::Brin); + } + + #[test] + fn index_attrs_with_type_hash() { + let attrs: Vec = + vec![parse_quote!(#[entity(table = "cache", index(type = "hash", key))])]; + let result = parse_index_attrs(&attrs); + assert_eq!(result.len(), 1); + assert_eq!(result[0].index_type, IndexType::Hash); + } + + #[test] + fn index_attrs_with_custom_name() { + let attrs: Vec = + vec![parse_quote!(#[entity(table = "users", index(name = "idx_custom", status))])]; + let result = parse_index_attrs(&attrs); + assert_eq!(result.len(), 1); + assert_eq!(result[0].name, Some("idx_custom".to_string())); + assert_eq!(result[0].columns, vec!["status"]); + } + + #[test] + fn index_attrs_with_where_clause() { + let attrs: Vec = vec![ + parse_quote!(#[entity(table = "users", index(email, where = "deleted_at IS NULL"))]), + ]; + let result = parse_index_attrs(&attrs); + assert_eq!(result.len(), 1); + assert_eq!( + result[0].where_clause, + Some("deleted_at IS NULL".to_string()) + ); + } + + #[test] + fn index_attrs_multiple_indexes() { + let attrs: Vec = vec![parse_quote!( + #[entity( + table = "users", + index(email), + unique_index(tenant_id, email), + index(type = "gin", tags) + )] + )]; + let result = parse_index_attrs(&attrs); + assert_eq!(result.len(), 3); + + assert_eq!(result[0].columns, vec!["email"]); + assert!(!result[0].unique); + + assert_eq!(result[1].columns, vec!["tenant_id", "email"]); + assert!(result[1].unique); + + assert_eq!(result[2].columns, vec!["tags"]); + assert_eq!(result[2].index_type, IndexType::Gin); + } + + #[test] + fn index_attrs_all_options() { + let attrs: Vec = vec![parse_quote!( + #[entity( + table = "users", + unique_index(name = "idx_active_users", type = "btree", email, where = "active = true") + )] + )]; + let result = parse_index_attrs(&attrs); + assert_eq!(result.len(), 1); + assert!(result[0].unique); + assert_eq!(result[0].name, Some("idx_active_users".to_string())); + assert_eq!(result[0].index_type, IndexType::BTree); + assert_eq!(result[0].columns, vec!["email"]); + assert_eq!(result[0].where_clause, Some("active = true".to_string())); + } + + #[test] + fn index_attrs_ignores_non_entity() { + let attrs: Vec = vec![parse_quote!(#[derive(Debug)])]; + let result = parse_index_attrs(&attrs); + assert!(result.is_empty()); + } + + #[test] + fn index_attrs_unknown_type_defaults_to_btree() { + let attrs: Vec = + vec![parse_quote!(#[entity(table = "users", index(type = "unknown", col))])]; + let result = parse_index_attrs(&attrs); + assert_eq!(result.len(), 1); + assert_eq!(result[0].index_type, IndexType::BTree); + } +} diff --git a/crates/entity-derive-impl/src/entity/parse/entity/index.rs b/crates/entity-derive-impl/src/entity/parse/entity/index.rs new file mode 100644 index 0000000..4954654 --- /dev/null +++ b/crates/entity-derive-impl/src/entity/parse/entity/index.rs @@ -0,0 +1,272 @@ +// SPDX-FileCopyrightText: 2025-2026 RAprogramm +// SPDX-License-Identifier: MIT + +//! Composite index definitions for entity-level indexes. +//! +//! Parsed from `#[entity(index(...))]` and `#[entity(unique_index(...))]` +//! attributes. +//! +//! # Examples +//! +//! ```rust,ignore +//! #[entity( +//! table = "users", +//! index(name, email), // Default btree composite +//! index(type = "gin", tags), // GIN index +//! unique_index(tenant_id, email), // Unique composite +//! index(name = "idx_custom", status), // Named index +//! index(status, where = "active = true") // Partial index +//! )] +//! pub struct User { ... } +//! ``` + +use crate::entity::parse::field::IndexType; + +/// Composite index definition from entity-level attributes. +/// +/// Represents an index spanning one or more columns. +#[derive(Debug, Clone)] +pub struct CompositeIndexDef { + /// Index name. Auto-generated if not specified. + /// + /// Format: `idx_{table}_{col1}_{col2}_...` + pub name: Option, + + /// Column names included in the index. + pub columns: Vec, + + /// Index type (btree, hash, gin, gist, brin). + pub index_type: IndexType, + + /// Whether this is a unique index. + pub unique: bool, + + /// WHERE clause for partial index (raw SQL). + /// + /// Example: `"active = true"`, `"deleted_at IS NULL"` + pub where_clause: Option +} + +impl CompositeIndexDef { + /// Create a new non-unique btree index. + #[cfg(test)] + #[must_use] + pub fn new(columns: Vec) -> Self { + Self { + name: None, + columns, + index_type: IndexType::default(), + unique: false, + where_clause: None + } + } + + /// Create a new unique btree index. + #[cfg(test)] + #[must_use] + pub fn unique(columns: Vec) -> Self { + Self { + name: None, + columns, + index_type: IndexType::default(), + unique: true, + where_clause: None + } + } + + /// Set the index name. + #[cfg(test)] + #[must_use] + pub fn with_name(mut self, name: String) -> Self { + self.name = Some(name); + self + } + + /// Set the index type. + #[cfg(test)] + #[must_use] + pub fn with_type(mut self, index_type: IndexType) -> Self { + self.index_type = index_type; + self + } + + /// Set the WHERE clause for partial index. + #[cfg(test)] + #[must_use] + pub fn with_where(mut self, where_clause: String) -> Self { + self.where_clause = Some(where_clause); + self + } + + /// Generate the default index name. + /// + /// Format: `idx_{table}_{col1}_{col2}_...` + #[must_use] + pub fn default_name(&self, table: &str) -> String { + format!("idx_{}_{}", table, self.columns.join("_")) + } + + /// Get the index name, using default if not set. + #[must_use] + pub fn name_or_default(&self, table: &str) -> String { + self.name + .clone() + .unwrap_or_else(|| self.default_name(table)) + } + + /// Check if this is a partial index. + #[cfg(test)] + #[must_use] + pub fn is_partial(&self) -> bool { + self.where_clause.is_some() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn new_creates_btree_index() { + let idx = CompositeIndexDef::new(vec!["name".to_string(), "email".to_string()]); + assert_eq!(idx.columns, vec!["name", "email"]); + assert_eq!(idx.index_type, IndexType::BTree); + assert!(!idx.unique); + assert!(idx.name.is_none()); + assert!(idx.where_clause.is_none()); + } + + #[test] + fn unique_creates_unique_index() { + let idx = CompositeIndexDef::unique(vec!["tenant_id".to_string(), "email".to_string()]); + assert!(idx.unique); + assert_eq!(idx.index_type, IndexType::BTree); + } + + #[test] + fn with_name_sets_name() { + let idx = + CompositeIndexDef::new(vec!["status".to_string()]).with_name("idx_custom".to_string()); + assert_eq!(idx.name, Some("idx_custom".to_string())); + } + + #[test] + fn with_type_sets_type() { + let idx = CompositeIndexDef::new(vec!["tags".to_string()]).with_type(IndexType::Gin); + assert_eq!(idx.index_type, IndexType::Gin); + } + + #[test] + fn with_where_sets_partial() { + let idx = CompositeIndexDef::new(vec!["status".to_string()]) + .with_where("active = true".to_string()); + assert!(idx.is_partial()); + assert_eq!(idx.where_clause, Some("active = true".to_string())); + } + + #[test] + fn default_name_format() { + let idx = CompositeIndexDef::new(vec!["name".to_string(), "email".to_string()]); + assert_eq!(idx.default_name("users"), "idx_users_name_email"); + } + + #[test] + fn name_or_default_uses_custom() { + let idx = + CompositeIndexDef::new(vec!["status".to_string()]).with_name("my_idx".to_string()); + assert_eq!(idx.name_or_default("users"), "my_idx"); + } + + #[test] + fn name_or_default_uses_generated() { + let idx = CompositeIndexDef::new(vec!["status".to_string()]); + assert_eq!(idx.name_or_default("users"), "idx_users_status"); + } + + #[test] + fn is_partial_false_without_where() { + let idx = CompositeIndexDef::new(vec!["status".to_string()]); + assert!(!idx.is_partial()); + } + + #[test] + fn with_type_gist() { + let idx = CompositeIndexDef::new(vec!["location".to_string()]).with_type(IndexType::Gist); + assert_eq!(idx.index_type, IndexType::Gist); + } + + #[test] + fn with_type_brin() { + let idx = + CompositeIndexDef::new(vec!["created_at".to_string()]).with_type(IndexType::Brin); + assert_eq!(idx.index_type, IndexType::Brin); + } + + #[test] + fn with_type_hash() { + let idx = CompositeIndexDef::new(vec!["key".to_string()]).with_type(IndexType::Hash); + assert_eq!(idx.index_type, IndexType::Hash); + } + + #[test] + fn single_column_index() { + let idx = CompositeIndexDef::new(vec!["email".to_string()]); + assert_eq!(idx.columns.len(), 1); + assert_eq!(idx.default_name("users"), "idx_users_email"); + } + + #[test] + fn multiple_columns_index() { + let idx = CompositeIndexDef::new(vec![ + "tenant_id".to_string(), + "user_id".to_string(), + "email".to_string(), + ]); + assert_eq!(idx.columns.len(), 3); + assert_eq!( + idx.default_name("users"), + "idx_users_tenant_id_user_id_email" + ); + } + + #[test] + fn unique_with_custom_name() { + let idx = CompositeIndexDef::unique(vec!["email".to_string()]) + .with_name("unique_email_idx".to_string()); + assert!(idx.unique); + assert_eq!(idx.name_or_default("users"), "unique_email_idx"); + } + + #[test] + fn unique_partial_index() { + let idx = CompositeIndexDef::unique(vec!["email".to_string()]) + .with_where("deleted_at IS NULL".to_string()); + assert!(idx.unique); + assert!(idx.is_partial()); + assert_eq!(idx.where_clause, Some("deleted_at IS NULL".to_string())); + } + + #[test] + fn composite_index_all_options() { + let idx = CompositeIndexDef::unique(vec!["tenant_id".to_string(), "email".to_string()]) + .with_name("idx_tenant_email".to_string()) + .with_type(IndexType::BTree) + .with_where("active = true".to_string()); + assert!(idx.unique); + assert!(idx.is_partial()); + assert_eq!(idx.name, Some("idx_tenant_email".to_string())); + assert_eq!(idx.index_type, IndexType::BTree); + assert_eq!(idx.where_clause, Some("active = true".to_string())); + } + + #[test] + fn chained_builder_pattern() { + let idx = CompositeIndexDef::new(vec!["col".to_string()]) + .with_name("my_idx".to_string()) + .with_type(IndexType::Gin) + .with_where("x > 0".to_string()); + assert_eq!(idx.name, Some("my_idx".to_string())); + assert_eq!(idx.index_type, IndexType::Gin); + assert_eq!(idx.where_clause, Some("x > 0".to_string())); + } +} diff --git a/crates/entity-derive-impl/src/entity/parse/field.rs b/crates/entity-derive-impl/src/entity/parse/field.rs index 73de35d..6596bb8 100644 --- a/crates/entity-derive-impl/src/entity/parse/field.rs +++ b/crates/entity-derive-impl/src/entity/parse/field.rs @@ -26,12 +26,14 @@ //! pub user_id: Uuid, //! ``` +mod column; mod example; mod expose; mod filter; mod storage; mod validation; +pub use column::{ColumnConfig, IndexType, ReferentialAction}; pub use example::ExampleValue; pub use expose::ExposeConfig; pub use filter::{FilterConfig, FilterType}; @@ -41,11 +43,32 @@ pub use validation::ValidationConfig; use crate::utils::docs::extract_doc_comments; -/// Parse `#[belongs_to(EntityName)]` attribute. +/// Parse `#[belongs_to(EntityName)]` or `#[belongs_to(EntityName, on_delete = +/// "cascade")]`. /// -/// Extracts the entity identifier from the attribute. -fn parse_belongs_to(attr: &Attribute) -> Option { - attr.parse_args::().ok() +/// Returns the entity identifier and optional ON DELETE action. +fn parse_belongs_to(attr: &Attribute) -> (Option, Option) { + // Try simple case: #[belongs_to(Entity)] + if let Ok(ident) = attr.parse_args::() { + return (Some(ident), None); + } + + // Try extended case: #[belongs_to(Entity, on_delete = "cascade")] + let mut entity = None; + let mut on_delete = None; + + let _ = attr.parse_nested_meta(|meta| { + if meta.path.is_ident("on_delete") { + let _: syn::Token![=] = meta.input.parse()?; + let value: syn::LitStr = meta.input.parse()?; + on_delete = ReferentialAction::from_str(&value.value()); + } else if let Some(ident) = meta.path.get_ident() { + entity = Some(ident.clone()); + } + Ok(()) + }); + + (entity, on_delete) } /// Field definition with all parsed attributes. @@ -65,6 +88,9 @@ fn parse_belongs_to(attr: &Attribute) -> Option { /// #[auto] // StorageConfig::is_auto = true /// #[field(response)] /// pub created_at: DateTime, +/// +/// #[column(unique, index)] // ColumnConfig +/// pub email: String, /// ``` #[derive(Debug)] pub struct FieldDef { @@ -83,6 +109,11 @@ pub struct FieldDef { /// Query filter configuration. pub filter: FilterConfig, + /// Column configuration for migrations. + /// + /// Parsed from `#[column(...)]` attributes for constraints and indexes. + pub column: ColumnConfig, + /// Documentation comment from the field. /// /// Extracted from `///` comments for use in OpenAPI descriptions. @@ -123,6 +154,7 @@ impl FieldDef { let mut expose = ExposeConfig::default(); let mut storage = StorageConfig::default(); let mut filter = FilterConfig::default(); + let mut column = ColumnConfig::default(); for attr in &field.attrs { if attr.path().is_ident("id") { @@ -132,9 +164,13 @@ impl FieldDef { } else if attr.path().is_ident("field") { expose = ExposeConfig::from_attr(attr); } else if attr.path().is_ident("belongs_to") { - storage.belongs_to = parse_belongs_to(attr); + let (entity, on_del) = parse_belongs_to(attr); + storage.belongs_to = entity; + storage.on_delete = on_del; } else if attr.path().is_ident("filter") { filter = FilterConfig::from_attr(attr); + } else if attr.path().is_ident("column") { + column = ColumnConfig::from_attr(attr); } } @@ -144,6 +180,7 @@ impl FieldDef { expose, storage, filter, + column, doc, validation, example @@ -281,6 +318,35 @@ impl FieldDef { pub fn has_example(&self) -> bool { self.example.is_some() } + + /// Get the column configuration. + /// + /// Returns parsed column constraints and index settings. + #[must_use] + pub fn column(&self) -> &ColumnConfig { + &self.column + } + + /// Check if this column has a UNIQUE constraint. + #[must_use] + pub fn is_unique(&self) -> bool { + self.column.unique + } + + /// Check if this column should be indexed. + #[must_use] + #[allow(dead_code)] // Public API for future use + pub fn has_index(&self) -> bool { + self.column.has_index() + } + + /// Get the database column name. + /// + /// Returns custom name if set, otherwise the field name. + #[must_use] + pub fn column_name(&self) -> String { + self.column.column_name(&self.name_str()).to_string() + } } #[cfg(test)] @@ -352,6 +418,29 @@ mod tests { assert!(field.is_relation()); assert!(field.belongs_to().is_some()); assert_eq!(field.belongs_to().unwrap().to_string(), "User"); + assert!(field.storage.on_delete.is_none()); + } + + #[test] + fn field_belongs_to_with_on_delete() { + let field = parse_field(quote::quote! { + #[belongs_to(User, on_delete = "cascade")] + pub user_id: uuid::Uuid + }); + assert!(field.is_relation()); + assert_eq!(field.belongs_to().unwrap().to_string(), "User"); + assert_eq!(field.storage.on_delete, Some(ReferentialAction::Cascade)); + } + + #[test] + fn field_belongs_to_with_on_delete_set_null() { + let field = parse_field(quote::quote! { + #[belongs_to(Organization, on_delete = "set null")] + pub org_id: uuid::Uuid + }); + assert!(field.is_relation()); + assert_eq!(field.belongs_to().unwrap().to_string(), "Organization"); + assert_eq!(field.storage.on_delete, Some(ReferentialAction::SetNull)); } #[test] @@ -425,4 +514,86 @@ mod tests { let field = parse_field(quote::quote! { pub email: String }); assert_eq!(field.name().to_string(), "email"); } + + #[test] + fn field_column_unique() { + let field = parse_field(quote::quote! { + #[column(unique)] + pub email: String + }); + assert!(field.is_unique()); + } + + #[test] + fn field_column_index() { + let field = parse_field(quote::quote! { + #[column(index)] + pub status: String + }); + assert!(field.has_index()); + assert_eq!(field.column().index, Some(IndexType::BTree)); + } + + #[test] + fn field_column_index_gin() { + let field = parse_field(quote::quote! { + #[column(index = "gin")] + pub tags: Vec + }); + assert!(field.has_index()); + assert_eq!(field.column().index, Some(IndexType::Gin)); + } + + #[test] + fn field_column_default() { + let field = parse_field(quote::quote! { + #[column(default = "true")] + pub is_active: bool + }); + assert_eq!(field.column().default, Some("true".to_string())); + } + + #[test] + fn field_column_check() { + let field = parse_field(quote::quote! { + #[column(check = "age >= 0")] + pub age: i32 + }); + assert_eq!(field.column().check, Some("age >= 0".to_string())); + } + + #[test] + fn field_column_varchar() { + let field = parse_field(quote::quote! { + #[column(varchar = 100)] + pub name: String + }); + assert_eq!(field.column().varchar, Some(100)); + } + + #[test] + fn field_column_custom_name() { + let field = parse_field(quote::quote! { + #[column(name = "user_email")] + pub email: String + }); + assert_eq!(field.column_name(), "user_email"); + } + + #[test] + fn field_column_default_name() { + let field = parse_field(quote::quote! { pub email: String }); + assert_eq!(field.column_name(), "email"); + } + + #[test] + fn field_column_multiple_attrs() { + let field = parse_field(quote::quote! { + #[column(unique, index, default = "NOW()")] + pub created_at: DateTime + }); + assert!(field.is_unique()); + assert!(field.has_index()); + assert_eq!(field.column().default, Some("NOW()".to_string())); + } } diff --git a/crates/entity-derive-impl/src/entity/parse/field/column.rs b/crates/entity-derive-impl/src/entity/parse/field/column.rs new file mode 100644 index 0000000..99a50c2 --- /dev/null +++ b/crates/entity-derive-impl/src/entity/parse/field/column.rs @@ -0,0 +1,463 @@ +// SPDX-FileCopyrightText: 2025-2026 RAprogramm +// SPDX-License-Identifier: MIT + +//! Column-level database configuration for migrations. +//! +//! Controls database-specific constraints, indexes, and type mappings. +//! +//! # Supported Attributes +//! +//! | Attribute | Example | SQL | +//! |-----------|---------|-----| +//! | `unique` | `#[column(unique)]` | `UNIQUE` | +//! | `index` | `#[column(index)]` | `CREATE INDEX` (btree) | +//! | `index` | `#[column(index = "gin")]` | `CREATE INDEX USING gin` | +//! | `default` | `#[column(default = "true")]` | `DEFAULT true` | +//! | `check` | `#[column(check = "age >= 0")]` | `CHECK (age >= 0)` | +//! | `varchar` | `#[column(varchar = 255)]` | `VARCHAR(255)` | +//! | `sql_type` | `#[column(sql_type = "JSONB")]` | Explicit type | +//! | `nullable` | `#[column(nullable)]` | Allow NULL | +//! | `name` | `#[column(name = "user_name")]` | Custom column name | + +use syn::{Attribute, Meta}; + +/// Index type for database indexes. +/// +/// PostgreSQL supports multiple index types optimized for different use cases. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub enum IndexType { + /// B-tree index (default). Best for equality and range queries. + #[default] + BTree, + + /// Hash index. Only for equality comparisons. + Hash, + + /// GIN (Generalized Inverted Index). For array/JSONB containment. + Gin, + + /// GiST (Generalized Search Tree). For geometric/full-text search. + Gist, + + /// BRIN (Block Range Index). For large sequential data. + Brin +} + +impl IndexType { + /// Parse index type from string. + /// + /// Returns `None` for unrecognized values. + #[must_use] + pub fn from_str(s: &str) -> Option { + match s.to_lowercase().as_str() { + "btree" | "b-tree" => Some(Self::BTree), + "hash" => Some(Self::Hash), + "gin" => Some(Self::Gin), + "gist" => Some(Self::Gist), + "brin" => Some(Self::Brin), + _ => None + } + } + + /// Get SQL USING clause for this index type. + /// + /// Returns empty string for btree (default). + #[must_use] + pub fn as_sql_using(&self) -> &'static str { + match self { + Self::BTree => "", + Self::Hash => " USING hash", + Self::Gin => " USING gin", + Self::Gist => " USING gist", + Self::Brin => " USING brin" + } + } +} + +/// Referential action for foreign key ON DELETE/ON UPDATE. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ReferentialAction { + /// Delete/update child rows when parent is deleted/updated. + Cascade, + + /// Set foreign key to NULL. + SetNull, + + /// Set foreign key to default value. + SetDefault, + + /// Prevent deletion/update if children exist (deferred check). + Restrict, + + /// Prevent deletion/update if children exist (immediate check). + NoAction +} + +impl ReferentialAction { + /// Parse referential action from string. + /// + /// Returns `None` for unrecognized values. + #[must_use] + pub fn from_str(s: &str) -> Option { + match s.to_lowercase().replace([' ', '_'], "").as_str() { + "cascade" => Some(Self::Cascade), + "setnull" => Some(Self::SetNull), + "setdefault" => Some(Self::SetDefault), + "restrict" => Some(Self::Restrict), + "noaction" => Some(Self::NoAction), + _ => None + } + } + + /// Get SQL representation of this action. + #[must_use] + pub fn as_sql(&self) -> &'static str { + match self { + Self::Cascade => "CASCADE", + Self::SetNull => "SET NULL", + Self::SetDefault => "SET DEFAULT", + Self::Restrict => "RESTRICT", + Self::NoAction => "NO ACTION" + } + } +} + +/// Column-level database configuration. +/// +/// Parsed from `#[column(...)]` attributes on entity fields. +/// +/// # Example +/// +/// ```rust,ignore +/// #[column(unique, index = "btree", default = "true")] +/// pub is_active: bool, +/// +/// #[column(varchar = 100, check = "length(name) > 0")] +/// pub name: String, +/// +/// #[column(sql_type = "JSONB")] +/// pub metadata: serde_json::Value, +/// ``` +#[derive(Debug, Clone, Default)] +pub struct ColumnConfig { + /// UNIQUE constraint on this column. + pub unique: bool, + + /// Index type if indexed. `None` means no index. + pub index: Option, + + /// DEFAULT value expression (raw SQL). + /// + /// Examples: `"true"`, `"NOW()"`, `"'pending'"`. + pub default: Option, + + /// CHECK constraint expression (raw SQL). + /// + /// Example: `"age >= 0"`, `"length(name) > 0"`. + pub check: Option, + + /// VARCHAR length. Converts `String` to `VARCHAR(n)`. + pub varchar: Option, + + /// Explicit SQL type override. + /// + /// Bypasses automatic type mapping. + pub sql_type: Option, + + /// Explicitly allow NULL even for non-Option types. + pub nullable: bool, + + /// Custom column name. Defaults to field name. + pub name: Option +} + +impl ColumnConfig { + /// Parse column config from `#[column(...)]` attribute. + /// + /// # Recognized Options + /// + /// - `unique` — Add UNIQUE constraint + /// - `index` — Create btree index + /// - `index = "type"` — Create index of specified type + /// - `default = "expr"` — Set DEFAULT value + /// - `check = "expr"` — Add CHECK constraint + /// - `varchar = N` — Use VARCHAR(N) instead of TEXT + /// - `sql_type = "TYPE"` — Override SQL type + /// - `nullable` — Allow NULL + /// - `name = "col"` — Custom column name + pub fn from_attr(attr: &Attribute) -> Self { + let mut config = Self::default(); + + if let Meta::List(meta_list) = &attr.meta { + let _ = meta_list.parse_nested_meta(|meta| { + if meta.path.is_ident("unique") { + config.unique = true; + } else if meta.path.is_ident("index") { + if meta.input.peek(syn::Token![=]) { + let _: syn::Token![=] = meta.input.parse()?; + let value: syn::LitStr = meta.input.parse()?; + config.index = + Some(IndexType::from_str(&value.value()).unwrap_or_default()); + } else { + config.index = Some(IndexType::default()); + } + } else if meta.path.is_ident("default") { + let _: syn::Token![=] = meta.input.parse()?; + let value: syn::LitStr = meta.input.parse()?; + config.default = Some(value.value()); + } else if meta.path.is_ident("check") { + let _: syn::Token![=] = meta.input.parse()?; + let value: syn::LitStr = meta.input.parse()?; + config.check = Some(value.value()); + } else if meta.path.is_ident("varchar") { + let _: syn::Token![=] = meta.input.parse()?; + let value: syn::LitInt = meta.input.parse()?; + config.varchar = value.base10_parse().ok(); + } else if meta.path.is_ident("sql_type") { + let _: syn::Token![=] = meta.input.parse()?; + let value: syn::LitStr = meta.input.parse()?; + config.sql_type = Some(value.value()); + } else if meta.path.is_ident("nullable") { + config.nullable = true; + } else if meta.path.is_ident("name") { + let _: syn::Token![=] = meta.input.parse()?; + let value: syn::LitStr = meta.input.parse()?; + config.name = Some(value.value()); + } + Ok(()) + }); + } + + config + } + + /// Check if this column has any constraints. + #[must_use] + #[allow(dead_code)] // Public API for future use + pub fn has_constraints(&self) -> bool { + self.unique || self.check.is_some() + } + + /// Check if this column should be indexed. + #[must_use] + pub fn has_index(&self) -> bool { + self.index.is_some() + } + + /// Get the column name, using custom name if set. + #[must_use] + pub fn column_name<'a>(&'a self, field_name: &'a str) -> &'a str { + self.name.as_deref().unwrap_or(field_name) + } +} + +#[cfg(test)] +mod tests { + use quote::quote; + use syn::parse_quote; + + use super::*; + + fn parse_column_attr(tokens: proc_macro2::TokenStream) -> ColumnConfig { + let attr: Attribute = parse_quote!(#[column(#tokens)]); + ColumnConfig::from_attr(&attr) + } + + #[test] + fn default_is_empty() { + let config = ColumnConfig::default(); + assert!(!config.unique); + assert!(config.index.is_none()); + assert!(config.default.is_none()); + assert!(config.check.is_none()); + assert!(config.varchar.is_none()); + assert!(config.sql_type.is_none()); + assert!(!config.nullable); + assert!(config.name.is_none()); + } + + #[test] + fn parse_unique() { + let config = parse_column_attr(quote! { unique }); + assert!(config.unique); + } + + #[test] + fn parse_index_default() { + let config = parse_column_attr(quote! { index }); + assert_eq!(config.index, Some(IndexType::BTree)); + } + + #[test] + fn parse_index_gin() { + let config = parse_column_attr(quote! { index = "gin" }); + assert_eq!(config.index, Some(IndexType::Gin)); + } + + #[test] + fn parse_index_hash() { + let config = parse_column_attr(quote! { index = "hash" }); + assert_eq!(config.index, Some(IndexType::Hash)); + } + + #[test] + fn parse_default_value() { + let config = parse_column_attr(quote! { default = "true" }); + assert_eq!(config.default, Some("true".to_string())); + } + + #[test] + fn parse_default_now() { + let config = parse_column_attr(quote! { default = "NOW()" }); + assert_eq!(config.default, Some("NOW()".to_string())); + } + + #[test] + fn parse_check_constraint() { + let config = parse_column_attr(quote! { check = "age >= 0" }); + assert_eq!(config.check, Some("age >= 0".to_string())); + } + + #[test] + fn parse_varchar() { + let config = parse_column_attr(quote! { varchar = 255 }); + assert_eq!(config.varchar, Some(255)); + } + + #[test] + fn parse_sql_type() { + let config = parse_column_attr(quote! { sql_type = "JSONB" }); + assert_eq!(config.sql_type, Some("JSONB".to_string())); + } + + #[test] + fn parse_nullable() { + let config = parse_column_attr(quote! { nullable }); + assert!(config.nullable); + } + + #[test] + fn parse_custom_name() { + let config = parse_column_attr(quote! { name = "user_name" }); + assert_eq!(config.name, Some("user_name".to_string())); + } + + #[test] + fn parse_multiple_attrs() { + let config = parse_column_attr(quote! { unique, index = "btree", default = "true" }); + assert!(config.unique); + assert_eq!(config.index, Some(IndexType::BTree)); + assert_eq!(config.default, Some("true".to_string())); + } + + #[test] + fn has_constraints_check() { + let config = parse_column_attr(quote! { unique }); + assert!(config.has_constraints()); + + let config2 = parse_column_attr(quote! { check = "x > 0" }); + assert!(config2.has_constraints()); + + let config3 = ColumnConfig::default(); + assert!(!config3.has_constraints()); + } + + #[test] + fn has_index_check() { + let config = parse_column_attr(quote! { index }); + assert!(config.has_index()); + + let config2 = ColumnConfig::default(); + assert!(!config2.has_index()); + } + + #[test] + fn column_name_default() { + let config = ColumnConfig::default(); + assert_eq!(config.column_name("email"), "email"); + } + + #[test] + fn column_name_custom() { + let config = parse_column_attr(quote! { name = "user_email" }); + assert_eq!(config.column_name("email"), "user_email"); + } + + #[test] + fn index_type_as_sql() { + assert_eq!(IndexType::BTree.as_sql_using(), ""); + assert_eq!(IndexType::Hash.as_sql_using(), " USING hash"); + assert_eq!(IndexType::Gin.as_sql_using(), " USING gin"); + assert_eq!(IndexType::Gist.as_sql_using(), " USING gist"); + assert_eq!(IndexType::Brin.as_sql_using(), " USING brin"); + } + + #[test] + fn index_type_from_str_all() { + assert_eq!(IndexType::from_str("btree"), Some(IndexType::BTree)); + assert_eq!(IndexType::from_str("b-tree"), Some(IndexType::BTree)); + assert_eq!(IndexType::from_str("BTREE"), Some(IndexType::BTree)); + assert_eq!(IndexType::from_str("hash"), Some(IndexType::Hash)); + assert_eq!(IndexType::from_str("HASH"), Some(IndexType::Hash)); + assert_eq!(IndexType::from_str("gin"), Some(IndexType::Gin)); + assert_eq!(IndexType::from_str("GIN"), Some(IndexType::Gin)); + assert_eq!(IndexType::from_str("gist"), Some(IndexType::Gist)); + assert_eq!(IndexType::from_str("GIST"), Some(IndexType::Gist)); + assert_eq!(IndexType::from_str("brin"), Some(IndexType::Brin)); + assert_eq!(IndexType::from_str("BRIN"), Some(IndexType::Brin)); + assert_eq!(IndexType::from_str("invalid"), None); + assert_eq!(IndexType::from_str("unknown"), None); + } + + #[test] + fn parse_index_gist() { + let config = parse_column_attr(quote! { index = "gist" }); + assert_eq!(config.index, Some(IndexType::Gist)); + } + + #[test] + fn parse_index_brin() { + let config = parse_column_attr(quote! { index = "brin" }); + assert_eq!(config.index, Some(IndexType::Brin)); + } + + #[test] + fn parse_index_unknown_defaults_to_btree() { + let config = parse_column_attr(quote! { index = "unknown" }); + assert_eq!(config.index, Some(IndexType::BTree)); + } + + #[test] + fn referential_action_from_str() { + assert_eq!( + ReferentialAction::from_str("cascade"), + Some(ReferentialAction::Cascade) + ); + assert_eq!( + ReferentialAction::from_str("SET NULL"), + Some(ReferentialAction::SetNull) + ); + assert_eq!( + ReferentialAction::from_str("set_default"), + Some(ReferentialAction::SetDefault) + ); + assert_eq!( + ReferentialAction::from_str("RESTRICT"), + Some(ReferentialAction::Restrict) + ); + assert_eq!( + ReferentialAction::from_str("no action"), + Some(ReferentialAction::NoAction) + ); + assert_eq!(ReferentialAction::from_str("invalid"), None); + } + + #[test] + fn referential_action_as_sql() { + assert_eq!(ReferentialAction::Cascade.as_sql(), "CASCADE"); + assert_eq!(ReferentialAction::SetNull.as_sql(), "SET NULL"); + assert_eq!(ReferentialAction::SetDefault.as_sql(), "SET DEFAULT"); + assert_eq!(ReferentialAction::Restrict.as_sql(), "RESTRICT"); + assert_eq!(ReferentialAction::NoAction.as_sql(), "NO ACTION"); + } +} diff --git a/crates/entity-derive-impl/src/entity/parse/field/storage.rs b/crates/entity-derive-impl/src/entity/parse/field/storage.rs index 71c77bc..1f55511 100644 --- a/crates/entity-derive-impl/src/entity/parse/field/storage.rs +++ b/crates/entity-derive-impl/src/entity/parse/field/storage.rs @@ -16,9 +16,18 @@ //! ``` //! //! This generates a `find_user` method in the repository. +//! +//! ## With ON DELETE action +//! +//! ```rust,ignore +//! #[belongs_to(User, on_delete = "cascade")] +//! pub user_id: Uuid, +//! ``` use syn::Ident; +use super::ReferentialAction; + /// Database storage configuration. /// /// Determines how the field is stored and managed in the database. @@ -28,12 +37,7 @@ use syn::Ident; /// - `#[id]` — Primary key with auto-generated UUID /// - `#[auto]` — Auto-generated value (timestamps) /// - `#[belongs_to(Entity)]` — Foreign key relation -/// -/// # Future attributes (planned) -/// -/// - `#[column(name = "...")]` — Custom column name -/// - `#[column(index)]` — Create index -/// - `#[column(unique)]` — Unique constraint +/// - `#[belongs_to(Entity, on_delete = "cascade")]` — FK with ON DELETE #[derive(Debug, Default, Clone)] pub struct StorageConfig { /// Primary key field (`#[id]`). @@ -56,6 +60,7 @@ pub struct StorageConfig { /// /// Stores the related entity name. When set, generates: /// - `find_{entity}(&self, id) -> Result>` method + /// - REFERENCES clause in migration (if migrations enabled) /// /// # Example /// @@ -64,7 +69,20 @@ pub struct StorageConfig { /// pub user_id: Uuid, /// // Generates: async fn find_user(&self, post_id: Uuid) -> Result> /// ``` - pub belongs_to: Option + pub belongs_to: Option, + + /// ON DELETE action for foreign key. + /// + /// Only applies when `belongs_to` is set. + /// + /// # Example + /// + /// ```rust,ignore + /// #[belongs_to(User, on_delete = "cascade")] + /// pub user_id: Uuid, + /// // Generates: REFERENCES users(id) ON DELETE CASCADE + /// ``` + pub on_delete: Option } impl StorageConfig { @@ -87,6 +105,7 @@ mod tests { assert!(!config.is_id); assert!(!config.is_auto); assert!(!config.is_relation()); + assert!(config.on_delete.is_none()); } #[test] @@ -94,8 +113,21 @@ mod tests { let config = StorageConfig { is_id: false, is_auto: false, - belongs_to: Some(Ident::new("User", Span::call_site())) + belongs_to: Some(Ident::new("User", Span::call_site())), + on_delete: None + }; + assert!(config.is_relation()); + } + + #[test] + fn belongs_to_with_on_delete() { + let config = StorageConfig { + is_id: false, + is_auto: false, + belongs_to: Some(Ident::new("User", Span::call_site())), + on_delete: Some(ReferentialAction::Cascade) }; assert!(config.is_relation()); + assert_eq!(config.on_delete, Some(ReferentialAction::Cascade)); } } diff --git a/crates/entity-derive-impl/src/entity/sql/postgres/helpers.rs b/crates/entity-derive-impl/src/entity/sql/postgres/helpers.rs index 512ec4a..503547e 100644 --- a/crates/entity-derive-impl/src/entity/sql/postgres/helpers.rs +++ b/crates/entity-derive-impl/src/entity/sql/postgres/helpers.rs @@ -209,3 +209,237 @@ pub fn generate_query_bindings(fields: &[&FieldDef]) -> TokenStream { quote! { #(#bindings)* } } + +#[cfg(test)] +mod tests { + use syn::{Field, parse_quote}; + + use super::*; + use crate::entity::parse::FieldDef; + + fn parse_field(tokens: proc_macro2::TokenStream) -> FieldDef { + let field: Field = parse_quote!(#tokens); + FieldDef::from_field(&field).unwrap() + } + + #[test] + fn join_columns_single() { + let field = parse_field(quote! { pub name: String }); + let result = join_columns(&[field]); + assert_eq!(result, "name"); + } + + #[test] + fn join_columns_multiple() { + let fields = vec![ + parse_field(quote! { pub id: Uuid }), + parse_field(quote! { pub name: String }), + parse_field(quote! { pub email: String }), + ]; + let result = join_columns(&fields); + assert_eq!(result, "id, name, email"); + } + + #[test] + fn join_columns_empty() { + let result = join_columns(&[]); + assert_eq!(result, ""); + } + + #[test] + fn insert_bindings_generates_bind_calls() { + let fields = vec![ + parse_field(quote! { pub id: Uuid }), + parse_field(quote! { pub name: String }), + ]; + let bindings = insert_bindings(&fields); + assert_eq!(bindings.len(), 2); + + let first = bindings[0].to_string(); + assert!(first.contains("bind"), "Expected 'bind' in: {}", first); + assert!( + first.contains("insertable"), + "Expected 'insertable' in: {}", + first + ); + assert!(first.contains("id"), "Expected 'id' in: {}", first); + + let second = bindings[1].to_string(); + assert!(second.contains("bind"), "Expected 'bind' in: {}", second); + assert!( + second.contains("insertable"), + "Expected 'insertable' in: {}", + second + ); + assert!(second.contains("name"), "Expected 'name' in: {}", second); + } + + #[test] + fn insert_bindings_empty() { + let bindings = insert_bindings(&[]); + assert!(bindings.is_empty()); + } + + #[test] + fn update_bindings_generates_bind_calls() { + let fields = [ + parse_field(quote! { pub name: String }), + parse_field(quote! { pub email: String }) + ]; + let refs: Vec<&FieldDef> = fields.iter().collect(); + let bindings = update_bindings(&refs); + assert_eq!(bindings.len(), 2); + + let first = bindings[0].to_string(); + assert!(first.contains("bind"), "Expected 'bind' in: {}", first); + assert!(first.contains("dto"), "Expected 'dto' in: {}", first); + assert!(first.contains("name"), "Expected 'name' in: {}", first); + } + + #[test] + fn update_bindings_empty() { + let bindings = update_bindings(&[]); + assert!(bindings.is_empty()); + } + + #[test] + fn where_conditions_eq_filter() { + let field = parse_field(quote! { + #[filter(eq)] + pub status: String + }); + let refs: Vec<&FieldDef> = vec![&field]; + let result = generate_where_conditions(&refs, false); + let code = result.to_string(); + assert!(code.contains("query . status . is_some")); + assert!(code.contains("= $")); + } + + #[test] + fn where_conditions_like_filter() { + let field = parse_field(quote! { + #[filter(like)] + pub name: String + }); + let refs: Vec<&FieldDef> = vec![&field]; + let result = generate_where_conditions(&refs, false); + let code = result.to_string(); + assert!(code.contains("query . name . is_some")); + assert!(code.contains("ILIKE")); + } + + #[test] + fn where_conditions_range_filter() { + let field = parse_field(quote! { + #[filter(range)] + pub age: i32 + }); + let refs: Vec<&FieldDef> = vec![&field]; + let result = generate_where_conditions(&refs, false); + let code = result.to_string(); + assert!(code.contains("age_from")); + assert!(code.contains("age_to")); + assert!(code.contains(">=")); + assert!(code.contains("<=")); + } + + #[test] + fn where_conditions_none_filter() { + let field = parse_field(quote! { pub name: String }); + let refs: Vec<&FieldDef> = vec![&field]; + let result = generate_where_conditions(&refs, false); + let code = result.to_string(); + // No conditions for None filter + assert!(!code.contains("query")); + } + + #[test] + fn where_conditions_with_soft_delete() { + let result = generate_where_conditions(&[], true); + let code = result.to_string(); + assert!(code.contains("deleted_at IS NULL")); + } + + #[test] + fn where_conditions_without_soft_delete() { + let result = generate_where_conditions(&[], false); + let code = result.to_string(); + assert!(!code.contains("deleted_at")); + } + + #[test] + fn query_bindings_eq_filter() { + let field = parse_field(quote! { + #[filter(eq)] + pub status: String + }); + let refs: Vec<&FieldDef> = vec![&field]; + let result = generate_query_bindings(&refs); + let code = result.to_string(); + assert!(code.contains("if let Some (ref v) = query . status")); + assert!(code.contains("q = q . bind (v)")); + } + + #[test] + fn query_bindings_like_filter() { + let field = parse_field(quote! { + #[filter(like)] + pub name: String + }); + let refs: Vec<&FieldDef> = vec![&field]; + let result = generate_query_bindings(&refs); + let code = result.to_string(); + assert!(code.contains("escaped")); + assert!(code.contains("format !")); + } + + #[test] + fn query_bindings_range_filter() { + let field = parse_field(quote! { + #[filter(range)] + pub age: i32 + }); + let refs: Vec<&FieldDef> = vec![&field]; + let result = generate_query_bindings(&refs); + let code = result.to_string(); + assert!(code.contains("age_from")); + assert!(code.contains("age_to")); + } + + #[test] + fn query_bindings_none_filter() { + let field = parse_field(quote! { pub name: String }); + let refs: Vec<&FieldDef> = vec![&field]; + let result = generate_query_bindings(&refs); + let code = result.to_string(); + // No bindings for None filter + assert!(!code.contains("bind")); + } + + #[test] + fn query_bindings_empty() { + let result = generate_query_bindings(&[]); + assert!(result.is_empty()); + } + + #[test] + fn where_conditions_multiple_filters() { + let fields = [ + parse_field(quote! { + #[filter(eq)] + pub status: String + }), + parse_field(quote! { + #[filter(like)] + pub name: String + }) + ]; + let refs: Vec<&FieldDef> = fields.iter().collect(); + let result = generate_where_conditions(&refs, false); + let code = result.to_string(); + assert!(code.contains("status")); + assert!(code.contains("name")); + assert!(code.contains("= $")); + assert!(code.contains("ILIKE")); + } +} diff --git a/crates/entity-derive-impl/src/lib.rs b/crates/entity-derive-impl/src/lib.rs index 083d65f..e63a57d 100644 --- a/crates/entity-derive-impl/src/lib.rs +++ b/crates/entity-derive-impl/src/lib.rs @@ -256,6 +256,7 @@ use proc_macro::TokenStream; /// | `sql` | No | `"full"` | SQL generation: `"full"`, `"trait"`, or `"none"` | /// | `dialect` | No | `"postgres"` | Database dialect: `"postgres"`, `"clickhouse"`, `"mongodb"` | /// | `uuid` | No | `"v7"` | UUID version for ID: `"v7"` (time-ordered) or `"v4"` (random) | +/// | `migrations` | No | `false` | Generate `MIGRATION_UP` and `MIGRATION_DOWN` constants | /// /// # Field Attributes /// @@ -268,11 +269,18 @@ use proc_macro::TokenStream; /// | `#[field(response)]` | Include in `Response`. | /// | `#[field(skip)]` | Exclude from ALL DTOs. Use for sensitive data. | /// | `#[belongs_to(Entity)]` | Foreign key relation. Generates `find_{entity}` method in repository. | +/// | `#[belongs_to(Entity, on_delete = "...")]` | Foreign key with ON DELETE action (`cascade`, `set null`, `restrict`). | /// | `#[has_many(Entity)]` | One-to-many relation (entity-level). Generates `find_{entities}` method. | /// | `#[projection(Name: f1, f2)]` | Entity-level. Defines a projection struct with specified fields. | /// | `#[filter]` | Exact match filter. Generates field in Query struct with `=` comparison. | /// | `#[filter(like)]` | ILIKE pattern filter. Generates field for text pattern matching. | /// | `#[filter(range)]` | Range filter. Generates `field_from` and `field_to` fields. | +/// | `#[column(unique)]` | Add UNIQUE constraint in migrations. | +/// | `#[column(index)]` | Add btree index in migrations. | +/// | `#[column(index = "gin")]` | Add index with specific type (btree, hash, gin, gist, brin). | +/// | `#[column(default = "...")]` | Set DEFAULT value in migrations. | +/// | `#[column(check = "...")]` | Add CHECK constraint in migrations. | +/// | `#[column(varchar = N)]` | Use VARCHAR(N) instead of TEXT in migrations. | /// /// Multiple attributes can be combined: `#[field(create, update, response)]` /// @@ -357,6 +365,41 @@ use proc_macro::TokenStream; /// // No repository trait or SQL implementation /// ``` /// +/// ## Migration Generation +/// +/// Generate compile-time SQL migrations with `migrations`: +/// +/// ```rust,ignore +/// #[derive(Entity)] +/// #[entity(table = "products", migrations)] +/// pub struct Product { +/// #[id] +/// pub id: Uuid, +/// +/// #[field(create, update, response)] +/// #[column(unique, index)] +/// pub sku: String, +/// +/// #[field(create, update, response)] +/// #[column(varchar = 200)] +/// pub name: String, +/// +/// #[field(create, update, response)] +/// #[column(check = "price >= 0")] +/// pub price: f64, +/// +/// #[belongs_to(Category, on_delete = "cascade")] +/// pub category_id: Uuid, +/// } +/// +/// // Generated constants: +/// // Product::MIGRATION_UP - CREATE TABLE, indexes, constraints +/// // Product::MIGRATION_DOWN - DROP TABLE CASCADE +/// +/// // Apply migration: +/// sqlx::query(Product::MIGRATION_UP).execute(&pool).await?; +/// ``` +/// /// # Security /// /// Use `#[field(skip)]` to prevent sensitive data from leaking: @@ -399,7 +442,7 @@ use proc_macro::TokenStream; Entity, attributes( entity, field, id, auto, validate, belongs_to, has_many, projection, filter, command, - example + example, column ) )] pub fn derive_entity(input: TokenStream) -> TokenStream { diff --git a/crates/entity-derive/Cargo.toml b/crates/entity-derive/Cargo.toml index 2ada274..7810606 100644 --- a/crates/entity-derive/Cargo.toml +++ b/crates/entity-derive/Cargo.toml @@ -3,7 +3,7 @@ [package] name = "entity-derive" -version = "0.4.0" +version = "0.5.0" edition = "2024" rust-version = "1.92" authors = ["RAprogramm "] @@ -25,8 +25,8 @@ api = [] validate = [] [dependencies] -entity-core = { path = "../entity-core", version = "0.2.0" } -entity-derive-impl = { path = "../entity-derive-impl", version = "0.2.0" } +entity-core = { path = "../entity-core", version = "0.3.0" } +entity-derive-impl = { path = "../entity-derive-impl", version = "0.3.0" } [dev-dependencies] trybuild = "1" @@ -35,7 +35,12 @@ chrono = { version = "0.4", features = ["serde"] } serde = { version = "1", features = ["derive"] } serde_json = "1" async-trait = "0.1" -sqlx = { version = "0.8", features = ["runtime-tokio", "postgres", "uuid", "chrono"] } +sqlx = { version = "0.8", features = [ + "runtime-tokio", + "postgres", + "uuid", + "chrono", +] } tokio = { version = "1", features = ["macros", "rt-multi-thread"] } utoipa = { version = "5", features = ["chrono", "uuid"] } validator = { version = "0.20", features = ["derive"] } diff --git a/crates/entity-derive/src/lib.rs b/crates/entity-derive/src/lib.rs index 4615074..d9d90ee 100644 --- a/crates/entity-derive/src/lib.rs +++ b/crates/entity-derive/src/lib.rs @@ -3,8 +3,8 @@ #![doc = include_str!("../README.md")] #![doc( - html_logo_url = "https://raw.githubusercontent.com/RAprogramm/entity-derive/main/assets/logo.svg", - html_favicon_url = "https://raw.githubusercontent.com/RAprogramm/entity-derive/main/assets/favicon.ico" + html_logo_url = "https://raw.githubusercontent.com/RAprogramm/entity-derive/main/logo.png", + html_favicon_url = "https://raw.githubusercontent.com/RAprogramm/entity-derive/main/logo.png" )] #![cfg_attr(docsrs, feature(doc_cfg))] #![warn(missing_docs)] diff --git a/crates/entity-derive/tests/cases/pass/migrations_basic.rs b/crates/entity-derive/tests/cases/pass/migrations_basic.rs new file mode 100644 index 0000000..709f08f --- /dev/null +++ b/crates/entity-derive/tests/cases/pass/migrations_basic.rs @@ -0,0 +1,37 @@ +// SPDX-FileCopyrightText: 2025-2026 RAprogramm +// SPDX-License-Identifier: MIT + +use chrono::{DateTime, Utc}; +use entity_derive::Entity; +use uuid::Uuid; + +#[derive(Entity)] +#[entity(table = "users", schema = "core", migrations)] +pub struct User { + #[id] + pub id: Uuid, + + #[field(create, update, response)] + pub name: String, + + #[field(create, response)] + pub email: String, + + #[field(response)] + #[auto] + pub created_at: DateTime, +} + +fn main() { + // Verify MIGRATION_UP is generated and contains expected SQL + let up = User::MIGRATION_UP; + assert!(up.contains("CREATE TABLE IF NOT EXISTS core.users")); + assert!(up.contains("id UUID PRIMARY KEY")); + assert!(up.contains("name TEXT NOT NULL")); + assert!(up.contains("email TEXT NOT NULL")); + assert!(up.contains("created_at TIMESTAMPTZ NOT NULL")); + + // Verify MIGRATION_DOWN is generated + let down = User::MIGRATION_DOWN; + assert!(down.contains("DROP TABLE IF EXISTS core.users CASCADE")); +} diff --git a/crates/entity-derive/tests/cases/pass/migrations_constraints.rs b/crates/entity-derive/tests/cases/pass/migrations_constraints.rs new file mode 100644 index 0000000..184deee --- /dev/null +++ b/crates/entity-derive/tests/cases/pass/migrations_constraints.rs @@ -0,0 +1,56 @@ +// SPDX-FileCopyrightText: 2025-2026 RAprogramm +// SPDX-License-Identifier: MIT + +use entity_derive::Entity; +use uuid::Uuid; + +#[derive(Entity)] +#[entity(table = "products", migrations)] +pub struct Product { + #[id] + pub id: Uuid, + + #[field(create, update, response)] + #[column(unique)] + pub sku: String, + + #[field(create, update, response)] + #[column(varchar = 200)] + pub name: String, + + #[field(create, update, response)] + #[column(default = "0")] + pub quantity: i32, + + #[field(create, update, response)] + #[column(check = "price >= 0")] + pub price: f64, + + #[field(create, update, response)] + #[column(index)] + pub category: String, + + #[field(create, update, response)] + #[column(index = "gin")] + pub tags: Vec, +} + +fn main() { + let up = Product::MIGRATION_UP; + + // Check UNIQUE constraint + assert!(up.contains("sku TEXT NOT NULL UNIQUE")); + + // Check VARCHAR + assert!(up.contains("name VARCHAR(200) NOT NULL")); + + // Check DEFAULT + assert!(up.contains("quantity INTEGER NOT NULL DEFAULT 0")); + + // Check CHECK constraint + assert!(up.contains("price DOUBLE PRECISION NOT NULL CHECK (price >= 0)")); + + // Check indexes are generated + assert!(up.contains("CREATE INDEX IF NOT EXISTS idx_products_category")); + assert!(up.contains("USING gin")); +} diff --git a/crates/entity-derive/tests/cases/pass/migrations_relations.rs b/crates/entity-derive/tests/cases/pass/migrations_relations.rs new file mode 100644 index 0000000..2d59d3a --- /dev/null +++ b/crates/entity-derive/tests/cases/pass/migrations_relations.rs @@ -0,0 +1,40 @@ +// SPDX-FileCopyrightText: 2025-2026 RAprogramm +// SPDX-License-Identifier: MIT + +use entity_derive::Entity; +use uuid::Uuid; + +// Stub types for belongs_to references +pub struct User; +pub struct Category; + +#[derive(Entity)] +#[entity(table = "posts", schema = "blog", migrations, sql = "none")] +pub struct Post { + #[id] + pub id: Uuid, + + #[field(create, update, response)] + pub title: String, + + #[field(create, response)] + #[belongs_to(User, on_delete = "cascade")] + pub author_id: Uuid, + + #[field(create, response)] + #[belongs_to(Category, on_delete = "set null")] + pub category_id: Option, +} + +fn main() { + let up = Post::MIGRATION_UP; + + // Check table creation + assert!(up.contains("CREATE TABLE IF NOT EXISTS blog.posts")); + + // Check foreign key with CASCADE + assert!(up.contains("author_id UUID NOT NULL REFERENCES blog.users(id) ON DELETE CASCADE")); + + // Check foreign key with SET NULL (nullable field) + assert!(up.contains("category_id UUID REFERENCES blog.categories(id) ON DELETE SET NULL")); +} diff --git a/logo.png b/logo.png new file mode 100644 index 0000000..f67efc5 Binary files /dev/null and b/logo.png differ diff --git a/logo.png.license b/logo.png.license new file mode 100644 index 0000000..f4247d6 --- /dev/null +++ b/logo.png.license @@ -0,0 +1,2 @@ +SPDX-FileCopyrightText: 2025-2026 RAprogramm +SPDX-License-Identifier: MIT