diff --git a/Cargo.toml b/Cargo.toml
index e209c51..40e0ace 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -5,16 +5,16 @@
resolver = "3"
members = ["crates/*"]
exclude = [
- "examples/basic",
- "examples/filtering",
- "examples/relations",
- "examples/events",
- "examples/hooks",
- "examples/commands",
- "examples/transactions",
- "examples/soft-delete",
- "examples/streams",
- "examples/full-app",
+ "examples/basic",
+ "examples/filtering",
+ "examples/relations",
+ "examples/events",
+ "examples/hooks",
+ "examples/commands",
+ "examples/transactions",
+ "examples/soft-delete",
+ "examples/streams",
+ "examples/full-app",
]
[workspace.package]
@@ -26,9 +26,9 @@ license = "MIT"
repository = "https://github.com/RAprogramm/entity-derive"
[workspace.dependencies]
-entity-core = { path = "crates/entity-core", version = "0.2.0" }
-entity-derive = { path = "crates/entity-derive", version = "0.4.0" }
-entity-derive-impl = { path = "crates/entity-derive-impl", version = "0.2.0" }
+entity-core = { path = "crates/entity-core", version = "0.3.0" }
+entity-derive = { path = "crates/entity-derive", version = "0.5.0" }
+entity-derive-impl = { path = "crates/entity-derive-impl", version = "0.3.0" }
syn = { version = "2", features = ["full", "extra-traits", "parsing"] }
quote = "1"
proc-macro2 = "1"
diff --git a/README.md b/README.md
index cd83d0e..5feb447 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,7 @@
+
entity-derive
One macro to rule them all
diff --git a/crates/entity-core/Cargo.toml b/crates/entity-core/Cargo.toml
index 67d8791..0a60cd1 100644
--- a/crates/entity-core/Cargo.toml
+++ b/crates/entity-core/Cargo.toml
@@ -3,7 +3,7 @@
[package]
name = "entity-core"
-version = "0.2.0"
+version = "0.3.0"
edition = "2024"
rust-version = "1.92"
authors = ["RAprogramm "]
@@ -24,7 +24,9 @@ streams = ["serde", "serde_json", "futures"]
[dependencies]
async-trait = "0.1"
-sqlx = { version = "0.8", optional = true, default-features = false, features = ["postgres"] }
+sqlx = { version = "0.8", optional = true, default-features = false, features = [
+ "postgres",
+] }
serde = { version = "1", features = ["derive"], optional = true }
serde_json = { version = "1", optional = true }
futures = { version = "0.3", optional = true }
diff --git a/crates/entity-derive-impl/Cargo.toml b/crates/entity-derive-impl/Cargo.toml
index e3be362..7ffe173 100644
--- a/crates/entity-derive-impl/Cargo.toml
+++ b/crates/entity-derive-impl/Cargo.toml
@@ -3,7 +3,7 @@
[package]
name = "entity-derive-impl"
-version = "0.2.0"
+version = "0.3.0"
edition = "2024"
rust-version = "1.92"
authors = ["RAprogramm "]
@@ -34,7 +34,12 @@ uuid = { version = "1", features = ["v4", "v7", "serde"] }
chrono = { version = "0.4", features = ["serde"] }
serde = { version = "1", features = ["derive"] }
async-trait = "0.1"
-sqlx = { version = "0.8", features = ["runtime-tokio", "postgres", "uuid", "chrono"] }
+sqlx = { version = "0.8", features = [
+ "runtime-tokio",
+ "postgres",
+ "uuid",
+ "chrono",
+] }
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
utoipa = { version = "5", features = ["chrono", "uuid"] }
validator = { version = "0.20", features = ["derive"] }
diff --git a/crates/entity-derive-impl/src/entity.rs b/crates/entity-derive-impl/src/entity.rs
index e2974ba..52bb772 100644
--- a/crates/entity-derive-impl/src/entity.rs
+++ b/crates/entity-derive-impl/src/entity.rs
@@ -68,6 +68,7 @@ mod events;
mod hooks;
mod insertable;
mod mappers;
+mod migrations;
pub mod parse;
mod policy;
mod projection;
@@ -110,6 +111,7 @@ fn generate(entity: EntityDef) -> TokenStream {
let insertable = insertable::generate(&entity);
let mappers = mappers::generate(&entity);
let sql = sql::generate(&entity);
+ let migrations = migrations::generate(&entity);
let expanded = quote! {
#dto
@@ -127,6 +129,7 @@ fn generate(entity: EntityDef) -> TokenStream {
#insertable
#mappers
#sql
+ #migrations
};
expanded.into()
diff --git a/crates/entity-derive-impl/src/entity/migrations.rs b/crates/entity-derive-impl/src/entity/migrations.rs
new file mode 100644
index 0000000..7c0a777
--- /dev/null
+++ b/crates/entity-derive-impl/src/entity/migrations.rs
@@ -0,0 +1,118 @@
+// SPDX-FileCopyrightText: 2025-2026 RAprogramm
+// SPDX-License-Identifier: MIT
+
+//! Migration generation for entity-derive.
+//!
+//! Generates `MIGRATION_UP` and `MIGRATION_DOWN` constants containing
+//! SQL DDL statements for creating/dropping tables.
+//!
+//! # Features
+//!
+//! - Full type mapping (Rust → PostgreSQL)
+//! - Column constraints (UNIQUE, CHECK, DEFAULT)
+//! - Indexes (btree, hash, gin, gist, brin)
+//! - Foreign keys with ON DELETE actions
+//! - Composite indexes
+//!
+//! # Usage
+//!
+//! ```rust,ignore
+//! #[derive(Entity)]
+//! #[entity(table = "users", migrations)]
+//! pub struct User {
+//! #[id]
+//! pub id: Uuid,
+//!
+//! #[column(unique, index)]
+//! pub email: String,
+//! }
+//!
+//! // Apply migration:
+//! sqlx::query(User::MIGRATION_UP).execute(&pool).await?;
+//! ```
+
+mod postgres;
+pub mod types;
+
+use proc_macro2::TokenStream;
+
+use super::parse::{DatabaseDialect, EntityDef};
+
+/// Generate migration constants based on entity configuration.
+///
+/// Returns empty `TokenStream` if migrations are not enabled.
+pub fn generate(entity: &EntityDef) -> TokenStream {
+ if !entity.migrations {
+ return TokenStream::new();
+ }
+
+ match entity.dialect {
+ DatabaseDialect::Postgres => postgres::generate(entity),
+ DatabaseDialect::ClickHouse => TokenStream::new(), // TODO: future
+ DatabaseDialect::MongoDB => TokenStream::new() // N/A for document DB
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use syn::DeriveInput;
+
+ use super::*;
+
+ fn parse_entity(tokens: proc_macro2::TokenStream) -> EntityDef {
+ let input: DeriveInput = syn::parse_quote!(#tokens);
+ EntityDef::from_derive_input(&input).unwrap()
+ }
+
+ #[test]
+ fn generate_returns_empty_when_migrations_disabled() {
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users")]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ }
+ });
+ let result = generate(&entity);
+ assert!(result.is_empty());
+ }
+
+ #[test]
+ fn generate_returns_tokens_when_migrations_enabled() {
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ }
+ });
+ let result = generate(&entity);
+ assert!(!result.is_empty());
+ }
+
+ #[test]
+ fn generate_returns_empty_for_clickhouse() {
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users", dialect = "clickhouse", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ }
+ });
+ let result = generate(&entity);
+ assert!(result.is_empty());
+ }
+
+ #[test]
+ fn generate_returns_empty_for_mongodb() {
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users", dialect = "mongodb", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ }
+ });
+ let result = generate(&entity);
+ assert!(result.is_empty());
+ }
+}
diff --git a/crates/entity-derive-impl/src/entity/migrations/postgres.rs b/crates/entity-derive-impl/src/entity/migrations/postgres.rs
new file mode 100644
index 0000000..1ba3a99
--- /dev/null
+++ b/crates/entity-derive-impl/src/entity/migrations/postgres.rs
@@ -0,0 +1,52 @@
+// SPDX-FileCopyrightText: 2025-2026 RAprogramm
+// SPDX-License-Identifier: MIT
+
+//! PostgreSQL migration generation.
+//!
+//! Generates `MIGRATION_UP` and `MIGRATION_DOWN` constants for PostgreSQL.
+
+mod ddl;
+
+use proc_macro2::TokenStream;
+use quote::quote;
+
+use crate::{entity::parse::EntityDef, utils::marker};
+
+/// Generate migration constants for PostgreSQL.
+///
+/// # Generated Code
+///
+/// ```rust,ignore
+/// impl User {
+/// pub const MIGRATION_UP: &'static str = "CREATE TABLE...";
+/// pub const MIGRATION_DOWN: &'static str = "DROP TABLE...";
+/// }
+/// ```
+pub fn generate(entity: &EntityDef) -> TokenStream {
+ let entity_name = entity.name();
+ let vis = &entity.vis;
+
+ let up_sql = ddl::generate_up(entity);
+ let down_sql = ddl::generate_down(entity);
+
+ let marker = marker::generated();
+
+ quote! {
+ #marker
+ impl #entity_name {
+ /// SQL migration to create this entity's table, indexes, and constraints.
+ ///
+ /// # Usage
+ ///
+ /// ```rust,ignore
+ /// sqlx::query(User::MIGRATION_UP).execute(&pool).await?;
+ /// ```
+ #vis const MIGRATION_UP: &'static str = #up_sql;
+
+ /// SQL migration to drop this entity's table.
+ ///
+ /// Uses CASCADE to drop dependent objects.
+ #vis const MIGRATION_DOWN: &'static str = #down_sql;
+ }
+ }
+}
diff --git a/crates/entity-derive-impl/src/entity/migrations/postgres/ddl.rs b/crates/entity-derive-impl/src/entity/migrations/postgres/ddl.rs
new file mode 100644
index 0000000..b6cf425
--- /dev/null
+++ b/crates/entity-derive-impl/src/entity/migrations/postgres/ddl.rs
@@ -0,0 +1,512 @@
+// SPDX-FileCopyrightText: 2025-2026 RAprogramm
+// SPDX-License-Identifier: MIT
+
+//! DDL (Data Definition Language) generation for PostgreSQL.
+//!
+//! Generates CREATE TABLE, CREATE INDEX, and DROP TABLE statements.
+
+use convert_case::{Case, Casing};
+
+use crate::entity::{
+ migrations::types::{PostgresTypeMapper, TypeMapper},
+ parse::{CompositeIndexDef, EntityDef, FieldDef}
+};
+
+/// Generate the complete UP migration SQL.
+///
+/// Includes:
+/// - CREATE TABLE with columns and constraints
+/// - CREATE INDEX for single-column indexes
+/// - CREATE INDEX for composite indexes
+pub fn generate_up(entity: &EntityDef) -> String {
+ let mut sql = String::new();
+
+ // CREATE TABLE
+ sql.push_str(&generate_create_table(entity));
+
+ // Single-column indexes
+ for field in entity.all_fields() {
+ if field.column().has_index() {
+ sql.push_str(&generate_single_index(entity, field));
+ }
+ }
+
+ // Composite indexes
+ for idx in &entity.indexes {
+ sql.push_str(&generate_composite_index(entity, idx));
+ }
+
+ sql
+}
+
+/// Generate the DOWN migration SQL.
+pub fn generate_down(entity: &EntityDef) -> String {
+ format!(
+ "DROP TABLE IF EXISTS {} CASCADE;\n",
+ entity.full_table_name()
+ )
+}
+
+/// Generate CREATE TABLE statement.
+fn generate_create_table(entity: &EntityDef) -> String {
+ let mapper = PostgresTypeMapper;
+ let full_table = entity.full_table_name();
+
+ let columns: Vec = entity
+ .all_fields()
+ .iter()
+ .map(|f| generate_column_def(f, &mapper, entity))
+ .collect();
+
+ format!(
+ "CREATE TABLE IF NOT EXISTS {} (\n{}\n);\n",
+ full_table,
+ columns.join(",\n")
+ )
+}
+
+/// Generate a single column definition.
+fn generate_column_def(
+ field: &FieldDef,
+ mapper: &PostgresTypeMapper,
+ entity: &EntityDef
+) -> String {
+ let column_name = field.column_name();
+ let sql_type = mapper.map_type(field.ty(), field.column());
+
+ let mut parts = vec![format!(" {}", column_name)];
+
+ // Type with array suffix
+ parts.push(sql_type.to_sql_string());
+
+ // PRIMARY KEY for #[id] fields
+ if field.is_id() {
+ parts.push("PRIMARY KEY".to_string());
+ } else if !sql_type.nullable {
+ // NOT NULL unless nullable
+ parts.push("NOT NULL".to_string());
+ }
+
+ // UNIQUE constraint
+ if field.is_unique() {
+ parts.push("UNIQUE".to_string());
+ }
+
+ // DEFAULT value
+ if let Some(ref default) = field.column().default {
+ parts.push(format!("DEFAULT {}", default));
+ }
+
+ // CHECK constraint
+ if let Some(ref check) = field.column().check {
+ parts.push(format!("CHECK ({})", check));
+ }
+
+ // Foreign key REFERENCES from #[belongs_to]
+ if field.is_relation()
+ && let Some(parent) = field.belongs_to()
+ {
+ let parent_table = parent.to_string().to_case(Case::Snake);
+ // Use same schema as current entity for the reference
+ let ref_table = format!("{}.{}", entity.schema, pluralize(&parent_table));
+ let mut fk_str = format!("REFERENCES {}(id)", ref_table);
+
+ if let Some(action) = &field.storage.on_delete {
+ fk_str.push_str(&format!(" ON DELETE {}", action.as_sql()));
+ }
+
+ parts.push(fk_str);
+ }
+
+ parts.join(" ")
+}
+
+/// Generate CREATE INDEX for a single column.
+fn generate_single_index(entity: &EntityDef, field: &FieldDef) -> String {
+ let table = &entity.table;
+ let schema = &entity.schema;
+ let column = field.column_name();
+
+ let index_type = field.column().index.unwrap_or_default();
+ let index_name = format!("idx_{}_{}", table, column);
+ let using = index_type.as_sql_using();
+
+ format!(
+ "CREATE INDEX IF NOT EXISTS {} ON {}.{}{} ({});\n",
+ index_name, schema, table, using, column
+ )
+}
+
+/// Generate CREATE INDEX for a composite index.
+fn generate_composite_index(entity: &EntityDef, idx: &CompositeIndexDef) -> String {
+ let table = &entity.table;
+ let schema = &entity.schema;
+
+ let index_name = idx.name_or_default(table);
+ let using = idx.index_type.as_sql_using();
+ let unique_str = if idx.unique { "UNIQUE " } else { "" };
+ let columns = idx.columns.join(", ");
+
+ let mut sql = format!(
+ "CREATE {}INDEX IF NOT EXISTS {} ON {}.{}{} ({})",
+ unique_str, index_name, schema, table, using, columns
+ );
+
+ if let Some(ref where_clause) = idx.where_clause {
+ sql.push_str(&format!(" WHERE {}", where_clause));
+ }
+
+ sql.push_str(";\n");
+ sql
+}
+
+/// Simple pluralization for table names.
+fn pluralize(s: &str) -> String {
+ if s.ends_with('s') || s.ends_with("sh") || s.ends_with("ch") || s.ends_with('x') {
+ format!("{}es", s)
+ } else if s.ends_with('y') && !s.ends_with("ay") && !s.ends_with("ey") && !s.ends_with("oy") {
+ format!("{}ies", &s[..s.len() - 1])
+ } else {
+ format!("{}s", s)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use syn::DeriveInput;
+
+ use super::*;
+ use crate::entity::parse::EntityDef;
+
+ fn parse_entity(tokens: proc_macro2::TokenStream) -> EntityDef {
+ let input: DeriveInput = syn::parse_quote!(#tokens);
+ EntityDef::from_derive_input(&input).unwrap()
+ }
+
+ #[test]
+ fn pluralize_regular() {
+ assert_eq!(pluralize("user"), "users");
+ assert_eq!(pluralize("post"), "posts");
+ }
+
+ #[test]
+ fn pluralize_es() {
+ assert_eq!(pluralize("status"), "statuses");
+ assert_eq!(pluralize("match"), "matches");
+ }
+
+ #[test]
+ fn pluralize_ies() {
+ assert_eq!(pluralize("category"), "categories");
+ assert_eq!(pluralize("company"), "companies");
+ }
+
+ #[test]
+ fn pluralize_ey_oy() {
+ assert_eq!(pluralize("key"), "keys");
+ assert_eq!(pluralize("toy"), "toys");
+ }
+
+ #[test]
+ fn pluralize_sh() {
+ assert_eq!(pluralize("wish"), "wishes");
+ assert_eq!(pluralize("bush"), "bushes");
+ }
+
+ #[test]
+ fn pluralize_x() {
+ assert_eq!(pluralize("box"), "boxes");
+ assert_eq!(pluralize("fox"), "foxes");
+ }
+
+ #[test]
+ fn pluralize_ay() {
+ assert_eq!(pluralize("day"), "days");
+ assert_eq!(pluralize("way"), "ways");
+ }
+
+ #[test]
+ fn generate_up_basic() {
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ #[field(create, response)]
+ pub name: String,
+ }
+ });
+ let sql = generate_up(&entity);
+ assert!(sql.contains("CREATE TABLE IF NOT EXISTS public.users"));
+ assert!(sql.contains("id UUID PRIMARY KEY"));
+ assert!(sql.contains("name TEXT NOT NULL"));
+ }
+
+ #[test]
+ fn generate_down_basic() {
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users", schema = "core", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ }
+ });
+ let sql = generate_down(&entity);
+ assert_eq!(sql, "DROP TABLE IF EXISTS core.users CASCADE;\n");
+ }
+
+ #[test]
+ fn generate_up_with_unique() {
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ #[field(create, response)]
+ #[column(unique)]
+ pub email: String,
+ }
+ });
+ let sql = generate_up(&entity);
+ assert!(sql.contains("email TEXT NOT NULL UNIQUE"));
+ }
+
+ #[test]
+ fn generate_up_with_default() {
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ #[field(create, response)]
+ #[column(default = "true")]
+ pub active: bool,
+ }
+ });
+ let sql = generate_up(&entity);
+ assert!(sql.contains("DEFAULT true"));
+ }
+
+ #[test]
+ fn generate_up_with_check() {
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ #[field(create, response)]
+ #[column(check = "age >= 0")]
+ pub age: i32,
+ }
+ });
+ let sql = generate_up(&entity);
+ assert!(sql.contains("CHECK (age >= 0)"));
+ }
+
+ #[test]
+ fn generate_up_with_index() {
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ #[field(create, response)]
+ #[column(index)]
+ pub status: String,
+ }
+ });
+ let sql = generate_up(&entity);
+ assert!(sql.contains("CREATE INDEX IF NOT EXISTS idx_users_status"));
+ }
+
+ #[test]
+ fn generate_up_with_gin_index() {
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ #[field(create, response)]
+ #[column(index = "gin")]
+ pub tags: Vec,
+ }
+ });
+ let sql = generate_up(&entity);
+ assert!(sql.contains("USING gin"));
+ }
+
+ #[test]
+ fn generate_up_with_nullable() {
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ #[field(create, response)]
+ pub bio: Option,
+ }
+ });
+ let sql = generate_up(&entity);
+ assert!(sql.contains("bio TEXT"));
+ assert!(!sql.contains("bio TEXT NOT NULL"));
+ }
+
+ #[test]
+ fn generate_up_with_varchar() {
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ #[field(create, response)]
+ #[column(varchar = 100)]
+ pub name: String,
+ }
+ });
+ let sql = generate_up(&entity);
+ assert!(sql.contains("VARCHAR(100)"));
+ }
+
+ #[test]
+ fn generate_up_with_belongs_to() {
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "posts", migrations)]
+ pub struct Post {
+ #[id]
+ pub id: uuid::Uuid,
+ #[field(create, response)]
+ #[belongs_to(User)]
+ pub user_id: uuid::Uuid,
+ }
+ });
+ let sql = generate_up(&entity);
+ assert!(sql.contains("REFERENCES public.users(id)"));
+ }
+
+ #[test]
+ fn generate_up_with_belongs_to_on_delete_cascade() {
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "posts", migrations)]
+ pub struct Post {
+ #[id]
+ pub id: uuid::Uuid,
+ #[field(create, response)]
+ #[belongs_to(User, on_delete = "cascade")]
+ pub user_id: uuid::Uuid,
+ }
+ });
+ let sql = generate_up(&entity);
+ assert!(sql.contains("REFERENCES public.users(id) ON DELETE CASCADE"));
+ }
+
+ #[test]
+ fn generate_composite_index_basic() {
+ let idx = CompositeIndexDef {
+ name: None,
+ columns: vec!["name".to_string(), "email".to_string()],
+ index_type: crate::entity::parse::IndexType::BTree,
+ unique: false,
+ where_clause: None
+ };
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ #[field(create, response)]
+ pub name: String,
+ #[field(create, response)]
+ pub email: String,
+ }
+ });
+ let sql = generate_composite_index(&entity, &idx);
+ assert!(sql.contains("CREATE INDEX IF NOT EXISTS idx_users_name_email"));
+ assert!(sql.contains("(name, email)"));
+ }
+
+ #[test]
+ fn generate_composite_index_unique() {
+ let idx = CompositeIndexDef {
+ name: None,
+ columns: vec!["tenant_id".to_string(), "email".to_string()],
+ index_type: crate::entity::parse::IndexType::BTree,
+ unique: true,
+ where_clause: None
+ };
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ }
+ });
+ let sql = generate_composite_index(&entity, &idx);
+ assert!(sql.contains("CREATE UNIQUE INDEX"));
+ assert!(sql.contains("(tenant_id, email)"));
+ }
+
+ #[test]
+ fn generate_composite_index_with_where() {
+ let idx = CompositeIndexDef {
+ name: Some("idx_active_users".to_string()),
+ columns: vec!["email".to_string()],
+ index_type: crate::entity::parse::IndexType::BTree,
+ unique: false,
+ where_clause: Some("active = true".to_string())
+ };
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "users", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ }
+ });
+ let sql = generate_composite_index(&entity, &idx);
+ assert!(sql.contains("idx_active_users"));
+ assert!(sql.contains("WHERE active = true"));
+ }
+
+ #[test]
+ fn generate_composite_index_gin() {
+ let idx = CompositeIndexDef {
+ name: None,
+ columns: vec!["tags".to_string()],
+ index_type: crate::entity::parse::IndexType::Gin,
+ unique: false,
+ where_clause: None
+ };
+ let entity = parse_entity(quote::quote! {
+ #[entity(table = "posts", migrations)]
+ pub struct Post {
+ #[id]
+ pub id: uuid::Uuid,
+ }
+ });
+ let sql = generate_composite_index(&entity, &idx);
+ assert!(sql.contains("USING gin"));
+ }
+
+ #[test]
+ fn generate_up_with_composite_indexes() {
+ let mut entity = parse_entity(quote::quote! {
+ #[entity(table = "users", migrations)]
+ pub struct User {
+ #[id]
+ pub id: uuid::Uuid,
+ #[field(create, response)]
+ pub name: String,
+ #[field(create, response)]
+ pub email: String,
+ }
+ });
+ entity.indexes.push(CompositeIndexDef {
+ name: None,
+ columns: vec!["name".to_string(), "email".to_string()],
+ index_type: crate::entity::parse::IndexType::BTree,
+ unique: false,
+ where_clause: None
+ });
+ let sql = generate_up(&entity);
+ assert!(sql.contains("CREATE INDEX IF NOT EXISTS idx_users_name_email"));
+ }
+}
diff --git a/crates/entity-derive-impl/src/entity/migrations/types.rs b/crates/entity-derive-impl/src/entity/migrations/types.rs
new file mode 100644
index 0000000..9e50c00
--- /dev/null
+++ b/crates/entity-derive-impl/src/entity/migrations/types.rs
@@ -0,0 +1,142 @@
+// SPDX-FileCopyrightText: 2025-2026 RAprogramm
+// SPDX-License-Identifier: MIT
+
+//! Type mapping from Rust to database-specific SQL types.
+//!
+//! This module provides traits and implementations for mapping Rust types
+//! to their SQL equivalents during migration generation.
+//!
+//! # Architecture
+//!
+//! ```text
+//! ┌─────────────────────────────────────────────────────────────────────┐
+//! │ Type Mapping System │
+//! ├─────────────────────────────────────────────────────────────────────┤
+//! │ │
+//! │ Rust Type TypeMapper SQL Type │
+//! │ │
+//! │ Uuid ──► PostgresMapper ──► UUID │
+//! │ String ──► ──► TEXT / VARCHAR(n) │
+//! │ i32 ──► ──► INTEGER │
+//! │ DateTime ──► ──► TIMESTAMPTZ │
+//! │ Option ──► ──► T (nullable) │
+//! │ Vec ──► ──► T[] │
+//! │ │
+//! └─────────────────────────────────────────────────────────────────────┘
+//! ```
+
+mod postgres;
+
+pub use postgres::PostgresTypeMapper;
+use syn::Type;
+
+use crate::entity::parse::ColumnConfig;
+
+/// Mapped SQL type representation.
+#[derive(Debug, Clone)]
+pub struct SqlType {
+ /// SQL type name (e.g., "UUID", "TEXT", "INTEGER").
+ pub name: String,
+
+ /// Whether this type allows NULL values.
+ pub nullable: bool,
+
+ /// Array dimension (0 = scalar, 1 = T[], 2 = T[][], etc.).
+ pub array_dim: usize
+}
+
+impl SqlType {
+ /// Create a non-nullable SQL type.
+ #[cfg(test)]
+ #[must_use]
+ pub fn new(name: impl Into) -> Self {
+ Self {
+ name: name.into(),
+ nullable: false,
+ array_dim: 0
+ }
+ }
+
+ /// Create a nullable SQL type.
+ #[cfg(test)]
+ #[must_use]
+ pub fn nullable(name: impl Into) -> Self {
+ Self {
+ name: name.into(),
+ nullable: true,
+ array_dim: 0
+ }
+ }
+
+ /// Get the full SQL type string with array suffix.
+ #[must_use]
+ pub fn to_sql_string(&self) -> String {
+ if self.array_dim > 0 {
+ format!("{}{}", self.name, "[]".repeat(self.array_dim))
+ } else {
+ self.name.clone()
+ }
+ }
+}
+
+/// Trait for mapping Rust types to SQL types.
+///
+/// Implement this trait for each database dialect.
+pub trait TypeMapper {
+ /// Map a Rust type to its SQL representation.
+ ///
+ /// # Arguments
+ ///
+ /// * `ty` - The Rust type from syn
+ /// * `column` - Column configuration with overrides
+ ///
+ /// # Returns
+ ///
+ /// `SqlType` with name, nullable flag, and array dimension.
+ fn map_type(&self, ty: &Type, column: &ColumnConfig) -> SqlType;
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn sql_type_new() {
+ let ty = SqlType::new("INTEGER");
+ assert_eq!(ty.name, "INTEGER");
+ assert!(!ty.nullable);
+ assert_eq!(ty.array_dim, 0);
+ }
+
+ #[test]
+ fn sql_type_nullable() {
+ let ty = SqlType::nullable("TEXT");
+ assert!(ty.nullable);
+ }
+
+ #[test]
+ fn sql_type_to_sql_string_scalar() {
+ let ty = SqlType::new("UUID");
+ assert_eq!(ty.to_sql_string(), "UUID");
+ }
+
+ #[test]
+ fn sql_type_to_sql_string_array() {
+ let ty = SqlType {
+ name: "TEXT".to_string(),
+ nullable: false,
+ array_dim: 1
+ };
+ assert_eq!(ty.to_sql_string(), "TEXT[]");
+ }
+
+ #[test]
+ fn sql_type_to_sql_string_2d_array() {
+ let ty = SqlType {
+ name: "INTEGER".to_string(),
+ nullable: false,
+ array_dim: 2
+ };
+ assert_eq!(ty.to_sql_string(), "INTEGER[][]");
+ }
+}
diff --git a/crates/entity-derive-impl/src/entity/migrations/types/postgres.rs b/crates/entity-derive-impl/src/entity/migrations/types/postgres.rs
new file mode 100644
index 0000000..8e0582a
--- /dev/null
+++ b/crates/entity-derive-impl/src/entity/migrations/types/postgres.rs
@@ -0,0 +1,318 @@
+// SPDX-FileCopyrightText: 2025-2026 RAprogramm
+// SPDX-License-Identifier: MIT
+
+//! PostgreSQL type mapping.
+//!
+//! Maps Rust types to PostgreSQL types for migration generation.
+//!
+//! # Type Mapping Table
+//!
+//! | Rust Type | PostgreSQL Type | Notes |
+//! |-----------|-----------------|-------|
+//! | `Uuid` | `UUID` | |
+//! | `String` | `TEXT` | Or `VARCHAR(n)` with `#[column(varchar = n)]` |
+//! | `i16` | `SMALLINT` | |
+//! | `i32` | `INTEGER` | |
+//! | `i64` | `BIGINT` | |
+//! | `f32` | `REAL` | |
+//! | `f64` | `DOUBLE PRECISION` | |
+//! | `bool` | `BOOLEAN` | |
+//! | `DateTime` | `TIMESTAMPTZ` | |
+//! | `NaiveDate` | `DATE` | |
+//! | `NaiveTime` | `TIME` | |
+//! | `NaiveDateTime` | `TIMESTAMP` | |
+//! | `Option` | `T` | Nullable |
+//! | `Vec` | `T[]` | PostgreSQL array |
+//! | `serde_json::Value` | `JSONB` | |
+//! | `Decimal` | `DECIMAL` | |
+//! | `IpAddr` | `INET` | |
+
+use syn::Type;
+
+use super::{SqlType, TypeMapper};
+use crate::entity::parse::ColumnConfig;
+
+/// PostgreSQL type mapper.
+///
+/// Converts Rust types to PostgreSQL SQL types with full support for:
+/// - Primitive types (integers, floats, booleans)
+/// - String types with optional VARCHAR length
+/// - Date/time types from chrono
+/// - UUID from uuid crate
+/// - JSON from serde_json
+/// - Arrays via Vec
+/// - Nullable types via Option
+pub struct PostgresTypeMapper;
+
+impl TypeMapper for PostgresTypeMapper {
+ fn map_type(&self, ty: &Type, column: &ColumnConfig) -> SqlType {
+ // Handle explicit SQL type override
+ if let Some(ref explicit) = column.sql_type {
+ return SqlType {
+ name: explicit.clone(),
+ nullable: is_option(ty) || column.nullable,
+ array_dim: 0
+ };
+ }
+
+ // Handle Option
+ if let Some(inner) = extract_option_inner(ty) {
+ let mut result = self.map_type(inner, column);
+ result.nullable = true;
+ return result;
+ }
+
+ // Handle Vec (PostgreSQL arrays)
+ if let Some(inner) = extract_vec_inner(ty) {
+ let mut result = self.map_type(inner, column);
+ result.array_dim += 1;
+ return result;
+ }
+
+ // Map core types
+ let name = map_type_name(ty, column);
+
+ SqlType {
+ name,
+ nullable: column.nullable,
+ array_dim: 0
+ }
+ }
+}
+
+/// Map a Rust type path to PostgreSQL type name.
+fn map_type_name(ty: &Type, column: &ColumnConfig) -> String {
+ let type_str = type_path_string(ty);
+
+ match type_str.as_str() {
+ // UUIDs
+ "Uuid" | "uuid::Uuid" => "UUID".to_string(),
+
+ // Strings
+ "String" | "str" => {
+ if let Some(len) = column.varchar {
+ format!("VARCHAR({})", len)
+ } else {
+ "TEXT".to_string()
+ }
+ }
+
+ // Integers
+ "i8" => "SMALLINT".to_string(), // PostgreSQL has no TINYINT
+ "i16" => "SMALLINT".to_string(),
+ "i32" => "INTEGER".to_string(),
+ "i64" => "BIGINT".to_string(),
+ "u8" => "SMALLINT".to_string(),
+ "u16" => "INTEGER".to_string(),
+ "u32" => "BIGINT".to_string(),
+ "u64" => "BIGINT".to_string(), // May overflow
+
+ // Floats
+ "f32" => "REAL".to_string(),
+ "f64" => "DOUBLE PRECISION".to_string(),
+
+ // Boolean
+ "bool" => "BOOLEAN".to_string(),
+
+ // Date/Time (chrono)
+ "DateTime" | "chrono::DateTime" => "TIMESTAMPTZ".to_string(),
+ "NaiveDate" | "chrono::NaiveDate" => "DATE".to_string(),
+ "NaiveTime" | "chrono::NaiveTime" => "TIME".to_string(),
+ "NaiveDateTime" | "chrono::NaiveDateTime" => "TIMESTAMP".to_string(),
+
+ // JSON
+ "Value" | "serde_json::Value" | "Json" | "sqlx::types::Json" => "JSONB".to_string(),
+
+ // Decimal
+ "Decimal" | "rust_decimal::Decimal" | "BigDecimal" | "bigdecimal::BigDecimal" => {
+ "DECIMAL".to_string()
+ }
+
+ // Network
+ "IpAddr" | "std::net::IpAddr" | "Ipv4Addr" | "Ipv6Addr" => "INET".to_string(),
+ "MacAddr" => "MACADDR".to_string(),
+
+ // Binary
+ "Vec" | "bytes::Bytes" => "BYTEA".to_string(),
+
+ // Fallback to TEXT for unknown types
+ _ => "TEXT".to_string()
+ }
+}
+
+/// Extract the type path as a string.
+fn type_path_string(ty: &Type) -> String {
+ if let Type::Path(type_path) = ty {
+ type_path
+ .path
+ .segments
+ .iter()
+ .map(|s| s.ident.to_string())
+ .collect::>()
+ .join("::")
+ } else {
+ String::new()
+ }
+}
+
+/// Check if a type is Option.
+fn is_option(ty: &Type) -> bool {
+ extract_option_inner(ty).is_some()
+}
+
+/// Extract the inner type from Option.
+fn extract_option_inner(ty: &Type) -> Option<&Type> {
+ if let Type::Path(type_path) = ty
+ && let Some(segment) = type_path.path.segments.last()
+ && segment.ident == "Option"
+ && let syn::PathArguments::AngleBracketed(args) = &segment.arguments
+ && let Some(syn::GenericArgument::Type(inner)) = args.args.first()
+ {
+ return Some(inner);
+ }
+ None
+}
+
+/// Extract the inner type from Vec.
+fn extract_vec_inner(ty: &Type) -> Option<&Type> {
+ if let Type::Path(type_path) = ty
+ && let Some(segment) = type_path.path.segments.last()
+ && segment.ident == "Vec"
+ && let syn::PathArguments::AngleBracketed(args) = &segment.arguments
+ && let Some(syn::GenericArgument::Type(inner)) = args.args.first()
+ {
+ return Some(inner);
+ }
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use syn::parse_quote;
+
+ use super::*;
+
+ fn map_type(ty_tokens: proc_macro2::TokenStream) -> SqlType {
+ let ty: Type = parse_quote!(#ty_tokens);
+ let column = ColumnConfig::default();
+ PostgresTypeMapper.map_type(&ty, &column)
+ }
+
+ fn map_type_with_column(ty_tokens: proc_macro2::TokenStream, column: ColumnConfig) -> SqlType {
+ let ty: Type = parse_quote!(#ty_tokens);
+ PostgresTypeMapper.map_type(&ty, &column)
+ }
+
+ #[test]
+ fn map_uuid() {
+ let ty = map_type(quote::quote! { Uuid });
+ assert_eq!(ty.name, "UUID");
+ assert!(!ty.nullable);
+ }
+
+ #[test]
+ fn map_string() {
+ let ty = map_type(quote::quote! { String });
+ assert_eq!(ty.name, "TEXT");
+ }
+
+ #[test]
+ fn map_string_varchar() {
+ let column = ColumnConfig {
+ varchar: Some(255),
+ ..Default::default()
+ };
+ let ty = map_type_with_column(quote::quote! { String }, column);
+ assert_eq!(ty.name, "VARCHAR(255)");
+ }
+
+ #[test]
+ fn map_integers() {
+ assert_eq!(map_type(quote::quote! { i16 }).name, "SMALLINT");
+ assert_eq!(map_type(quote::quote! { i32 }).name, "INTEGER");
+ assert_eq!(map_type(quote::quote! { i64 }).name, "BIGINT");
+ }
+
+ #[test]
+ fn map_floats() {
+ assert_eq!(map_type(quote::quote! { f32 }).name, "REAL");
+ assert_eq!(map_type(quote::quote! { f64 }).name, "DOUBLE PRECISION");
+ }
+
+ #[test]
+ fn map_bool() {
+ assert_eq!(map_type(quote::quote! { bool }).name, "BOOLEAN");
+ }
+
+ #[test]
+ fn map_datetime() {
+ let ty = map_type(quote::quote! { DateTime });
+ assert_eq!(ty.name, "TIMESTAMPTZ");
+ }
+
+ #[test]
+ fn map_naive_date() {
+ assert_eq!(map_type(quote::quote! { NaiveDate }).name, "DATE");
+ }
+
+ #[test]
+ fn map_option_nullable() {
+ let ty = map_type(quote::quote! { Option });
+ assert_eq!(ty.name, "TEXT");
+ assert!(ty.nullable);
+ }
+
+ #[test]
+ fn map_vec_to_array() {
+ let ty = map_type(quote::quote! { Vec });
+ assert_eq!(ty.name, "TEXT");
+ assert_eq!(ty.array_dim, 1);
+ assert_eq!(ty.to_sql_string(), "TEXT[]");
+ }
+
+ #[test]
+ fn map_vec_option() {
+ let ty = map_type(quote::quote! { Vec