@@ -190,6 +190,7 @@ def compile_foreign_key(
190190 attr_sql : list [str ],
191191 foreign_key_sql : list [str ],
192192 index_sql : list [str ],
193+ adapter ,
193194 fk_attribute_map : dict [str , tuple [str , str ]] | None = None ,
194195) -> None :
195196 """
@@ -212,6 +213,8 @@ def compile_foreign_key(
212213 SQL FOREIGN KEY constraints. Updated in place.
213214 index_sql : list[str]
214215 SQL INDEX declarations. Updated in place.
216+ adapter : DatabaseAdapter
217+ Database adapter for backend-specific SQL generation.
215218 fk_attribute_map : dict, optional
216219 Mapping of ``child_attr -> (parent_table, parent_attr)``. Updated in place.
217220
@@ -268,22 +271,21 @@ def compile_foreign_key(
268271 parent_attr = ref .heading [attr ].original_name
269272 fk_attribute_map [attr ] = (parent_table , parent_attr )
270273
271- # declare the foreign key
274+ # declare the foreign key using adapter for identifier quoting
275+ fk_cols = ", " .join (adapter .quote_identifier (col ) for col in ref .primary_key )
276+ pk_cols = ", " .join (adapter .quote_identifier (ref .heading [name ].original_name ) for name in ref .primary_key )
272277 foreign_key_sql .append (
273- "FOREIGN KEY (`{fk}`) REFERENCES {ref} (`{pk}`) ON UPDATE CASCADE ON DELETE RESTRICT" .format (
274- fk = "`,`" .join (ref .primary_key ),
275- pk = "`,`" .join (ref .heading [name ].original_name for name in ref .primary_key ),
276- ref = ref .support [0 ],
277- )
278+ f"FOREIGN KEY ({ fk_cols } ) REFERENCES { ref .support [0 ]} ({ pk_cols } ) ON UPDATE CASCADE ON DELETE RESTRICT"
278279 )
279280
280281 # declare unique index
281282 if is_unique :
282- index_sql .append ("UNIQUE INDEX ({attrs})" .format (attrs = "," .join ("`%s`" % attr for attr in ref .primary_key )))
283+ index_cols = ", " .join (adapter .quote_identifier (attr ) for attr in ref .primary_key )
284+ index_sql .append (f"UNIQUE INDEX ({ index_cols } )" )
283285
284286
285287def prepare_declare (
286- definition : str , context : dict
288+ definition : str , context : dict , adapter
287289) -> tuple [str , list [str ], list [str ], list [str ], list [str ], list [str ], dict [str , tuple [str , str ]]]:
288290 """
289291 Parse a table definition into its components.
@@ -294,6 +296,8 @@ def prepare_declare(
294296 DataJoint table definition string.
295297 context : dict
296298 Namespace for resolving foreign key references.
299+ adapter : DatabaseAdapter
300+ Database adapter for backend-specific SQL generation.
297301
298302 Returns
299303 -------
@@ -337,12 +341,13 @@ def prepare_declare(
337341 attribute_sql ,
338342 foreign_key_sql ,
339343 index_sql ,
344+ adapter ,
340345 fk_attribute_map ,
341346 )
342347 elif re .match (r"^(unique\s+)?index\s*.*$" , line , re .I ): # index
343- compile_index (line , index_sql )
348+ compile_index (line , index_sql , adapter )
344349 else :
345- name , sql , store = compile_attribute (line , in_key , foreign_key_sql , context )
350+ name , sql , store = compile_attribute (line , in_key , foreign_key_sql , context , adapter )
346351 if store :
347352 external_stores .append (store )
348353 if in_key and name not in primary_key :
@@ -363,36 +368,47 @@ def prepare_declare(
363368
364369
365370def declare (
366- full_table_name : str , definition : str , context : dict
367- ) -> tuple [str , list [str ], list [str ], dict [str , tuple [str , str ]]]:
371+ full_table_name : str , definition : str , context : dict , adapter
372+ ) -> tuple [str , list [str ], list [str ], dict [str , tuple [str , str ]], list [ str ] ]:
368373 r"""
369374 Parse a definition and generate SQL CREATE TABLE statement.
370375
371376 Parameters
372377 ----------
373378 full_table_name : str
374- Fully qualified table name (e.g., ```\`schema\`.\`table\```).
379+ Fully qualified table name (e.g., ```\`schema\`.\`table\``` or ```"schema"."table"``` ).
375380 definition : str
376381 DataJoint table definition string.
377382 context : dict
378383 Namespace for resolving foreign key references.
384+ adapter : DatabaseAdapter
385+ Database adapter for backend-specific SQL generation.
379386
380387 Returns
381388 -------
382389 tuple
383- Four -element tuple:
390+ Five -element tuple:
384391
385392 - sql : str - SQL CREATE TABLE statement
386393 - external_stores : list[str] - External store names used
387394 - primary_key : list[str] - Primary key attribute names
388395 - fk_attribute_map : dict - FK attribute lineage mapping
396+ - additional_ddl : list[str] - Additional DDL statements (COMMENT ON, etc.)
389397
390398 Raises
391399 ------
392400 DataJointError
393401 If table name exceeds max length or has no primary key.
394402 """
395- table_name = full_table_name .strip ("`" ).split ("." )[1 ]
403+ # Parse table name without assuming quote character
404+ # Extract schema.table from quoted name using adapter
405+ quote_char = adapter .quote_identifier ("x" )[0 ] # Get quote char from adapter
406+ parts = full_table_name .split ("." )
407+ if len (parts ) == 2 :
408+ table_name = parts [1 ].strip (quote_char )
409+ else :
410+ table_name = parts [0 ].strip (quote_char )
411+
396412 if len (table_name ) > MAX_TABLE_NAME_LENGTH :
397413 raise DataJointError (
398414 "Table name `{name}` exceeds the max length of {max_length}" .format (
@@ -408,35 +424,42 @@ def declare(
408424 index_sql ,
409425 external_stores ,
410426 fk_attribute_map ,
411- ) = prepare_declare (definition , context )
427+ ) = prepare_declare (definition , context , adapter )
412428
413429 # Add hidden job metadata for Computed/Imported tables (not parts)
414- # Note: table_name may still have backticks, strip them for prefix checking
415- clean_table_name = table_name .strip ("`" )
416430 if config .jobs .add_job_metadata :
417431 # Check if this is a Computed (__) or Imported (_) table, but not a Part (contains __ in middle)
418- is_computed = clean_table_name .startswith ("__" ) and "__" not in clean_table_name [2 :]
419- is_imported = clean_table_name .startswith ("_" ) and not clean_table_name .startswith ("__" )
432+ is_computed = table_name .startswith ("__" ) and "__" not in table_name [2 :]
433+ is_imported = table_name .startswith ("_" ) and not table_name .startswith ("__" )
420434 if is_computed or is_imported :
421- job_metadata_sql = [
422- "`_job_start_time` datetime(3) DEFAULT NULL" ,
423- "`_job_duration` float DEFAULT NULL" ,
424- "`_job_version` varchar(64) DEFAULT ''" ,
425- ]
435+ job_metadata_sql = adapter .job_metadata_columns ()
426436 attribute_sql .extend (job_metadata_sql )
427437
428438 if not primary_key :
429439 raise DataJointError ("Table must have a primary key" )
430440
441+ additional_ddl = [] # Track additional DDL statements (e.g., COMMENT ON for PostgreSQL)
442+
443+ # Build PRIMARY KEY clause using adapter
444+ pk_cols = ", " .join (adapter .quote_identifier (pk ) for pk in primary_key )
445+ pk_clause = f"PRIMARY KEY ({ pk_cols } )"
446+
447+ # Assemble CREATE TABLE
431448 sql = (
432- "CREATE TABLE IF NOT EXISTS %s (\n " % full_table_name
433- + ",\n " .join (attribute_sql + ["PRIMARY KEY (`" + "`,`" . join ( primary_key ) + "`)" ] + foreign_key_sql + index_sql )
434- + ' \n ) ENGINE=InnoDB, COMMENT "%s"' % table_comment
449+ f "CREATE TABLE IF NOT EXISTS { full_table_name } (\n "
450+ + ",\n " .join (attribute_sql + [pk_clause ] + foreign_key_sql + index_sql )
451+ + f" \n ) { adapter . table_options_clause ( table_comment ) } "
435452 )
436- return sql , external_stores , primary_key , fk_attribute_map
437453
454+ # Add table-level comment DDL if needed (PostgreSQL)
455+ table_comment_ddl = adapter .table_comment_ddl (full_table_name , table_comment )
456+ if table_comment_ddl :
457+ additional_ddl .append (table_comment_ddl )
458+
459+ return sql , external_stores , primary_key , fk_attribute_map , additional_ddl
438460
439- def _make_attribute_alter (new : list [str ], old : list [str ], primary_key : list [str ]) -> list [str ]:
461+
462+ def _make_attribute_alter (new : list [str ], old : list [str ], primary_key : list [str ], adapter ) -> list [str ]:
440463 """
441464 Generate SQL ALTER commands for attribute changes.
442465
@@ -448,6 +471,8 @@ def _make_attribute_alter(new: list[str], old: list[str], primary_key: list[str]
448471 Old attribute SQL declarations.
449472 primary_key : list[str]
450473 Primary key attribute names (cannot be altered).
474+ adapter : DatabaseAdapter
475+ Database adapter for backend-specific SQL generation.
451476
452477 Returns
453478 -------
@@ -459,8 +484,9 @@ def _make_attribute_alter(new: list[str], old: list[str], primary_key: list[str]
459484 DataJointError
460485 If an attribute is renamed twice or renamed from non-existent attribute.
461486 """
462- # parse attribute names
463- name_regexp = re .compile (r"^`(?P<name>\w+)`" )
487+ # parse attribute names - use adapter's quote character
488+ quote_char = re .escape (adapter .quote_identifier ("x" )[0 ])
489+ name_regexp = re .compile (rf"^{ quote_char } (?P<name>\w+){ quote_char } " )
464490 original_regexp = re .compile (r'COMMENT "{\s*(?P<name>\w+)\s*}' )
465491 matched = ((name_regexp .match (d ), original_regexp .search (d )) for d in new )
466492 new_names = dict ((d .group ("name" ), n and n .group ("name" )) for d , n in matched )
@@ -486,7 +512,7 @@ def _make_attribute_alter(new: list[str], old: list[str], primary_key: list[str]
486512
487513 # dropping attributes
488514 to_drop = [n for n in old_names if n not in renamed and n not in new_names ]
489- sql = ["DROP `%s`" % n for n in to_drop ]
515+ sql = [f "DROP { adapter . quote_identifier ( n ) } " for n in to_drop ]
490516 old_names = [name for name in old_names if name not in to_drop ]
491517
492518 # add or change attributes in order
@@ -503,25 +529,24 @@ def _make_attribute_alter(new: list[str], old: list[str], primary_key: list[str]
503529 if idx >= 1 and old_names [idx - 1 ] != (prev [1 ] or prev [0 ]):
504530 after = prev [0 ]
505531 if new_def not in old or after :
506- sql .append (
507- "{command} {new_def} {after}" .format (
508- command = (
509- "ADD"
510- if (old_name or new_name ) not in old_names
511- else "MODIFY"
512- if not old_name
513- else "CHANGE `%s`" % old_name
514- ),
515- new_def = new_def ,
516- after = "" if after is None else "AFTER `%s`" % after ,
517- )
518- )
532+ # Determine command type
533+ if (old_name or new_name ) not in old_names :
534+ command = "ADD"
535+ elif not old_name :
536+ command = "MODIFY"
537+ else :
538+ command = f"CHANGE { adapter .quote_identifier (old_name )} "
539+
540+ # Build after clause
541+ after_clause = "" if after is None else f"AFTER { adapter .quote_identifier (after )} "
542+
543+ sql .append (f"{ command } { new_def } { after_clause } " )
519544 prev = new_name , old_name
520545
521546 return sql
522547
523548
524- def alter (definition : str , old_definition : str , context : dict ) -> tuple [list [str ], list [str ]]:
549+ def alter (definition : str , old_definition : str , context : dict , adapter ) -> tuple [list [str ], list [str ]]:
525550 """
526551 Generate SQL ALTER commands for table definition changes.
527552
@@ -533,6 +558,8 @@ def alter(definition: str, old_definition: str, context: dict) -> tuple[list[str
533558 Current table definition.
534559 context : dict
535560 Namespace for resolving foreign key references.
561+ adapter : DatabaseAdapter
562+ Database adapter for backend-specific SQL generation.
536563
537564 Returns
538565 -------
@@ -555,7 +582,7 @@ def alter(definition: str, old_definition: str, context: dict) -> tuple[list[str
555582 index_sql ,
556583 external_stores ,
557584 _fk_attribute_map ,
558- ) = prepare_declare (definition , context )
585+ ) = prepare_declare (definition , context , adapter )
559586 (
560587 table_comment_ ,
561588 primary_key_ ,
@@ -564,7 +591,7 @@ def alter(definition: str, old_definition: str, context: dict) -> tuple[list[str
564591 index_sql_ ,
565592 external_stores_ ,
566593 _fk_attribute_map_ ,
567- ) = prepare_declare (old_definition , context )
594+ ) = prepare_declare (old_definition , context , adapter )
568595
569596 # analyze differences between declarations
570597 sql = list ()
@@ -575,13 +602,16 @@ def alter(definition: str, old_definition: str, context: dict) -> tuple[list[str
575602 if index_sql != index_sql_ :
576603 raise NotImplementedError ("table.alter cannot alter indexes (yet)" )
577604 if attribute_sql != attribute_sql_ :
578- sql .extend (_make_attribute_alter (attribute_sql , attribute_sql_ , primary_key ))
605+ sql .extend (_make_attribute_alter (attribute_sql , attribute_sql_ , primary_key , adapter ))
579606 if table_comment != table_comment_ :
580- sql .append ('COMMENT="%s"' % table_comment )
607+ # For MySQL: COMMENT="new comment"
608+ # For PostgreSQL: would need COMMENT ON TABLE, but that's not an ALTER TABLE clause
609+ # Keep MySQL syntax for now (ALTER TABLE ... COMMENT="...")
610+ sql .append (f'COMMENT="{ table_comment } "' )
581611 return sql , [e for e in external_stores if e not in external_stores_ ]
582612
583613
584- def compile_index (line : str , index_sql : list [str ]) -> None :
614+ def compile_index (line : str , index_sql : list [str ], adapter ) -> None :
585615 """
586616 Parse an index declaration and append SQL to index_sql.
587617
@@ -592,6 +622,8 @@ def compile_index(line: str, index_sql: list[str]) -> None:
592622 ``"unique index(attr)"``).
593623 index_sql : list[str]
594624 List of index SQL declarations. Updated in place.
625+ adapter : DatabaseAdapter
626+ Database adapter for backend-specific SQL generation.
595627
596628 Raises
597629 ------
@@ -604,7 +636,7 @@ def format_attribute(attr):
604636 if match is None :
605637 return attr
606638 if match ["path" ] is None :
607- return f"` { attr } `"
639+ return adapter . quote_identifier ( attr )
608640 return f"({ attr } )"
609641
610642 match = re .match (r"(?P<unique>unique\s+)?index\s*\(\s*(?P<args>.*)\)" , line , re .I )
@@ -621,7 +653,7 @@ def format_attribute(attr):
621653 )
622654
623655
624- def substitute_special_type (match : dict , category : str , foreign_key_sql : list [str ], context : dict ) -> None :
656+ def substitute_special_type (match : dict , category : str , foreign_key_sql : list [str ], context : dict , adapter ) -> None :
625657 """
626658 Substitute special types with their native SQL equivalents.
627659
@@ -640,6 +672,8 @@ def substitute_special_type(match: dict, category: str, foreign_key_sql: list[st
640672 Foreign key declarations (unused, kept for API compatibility).
641673 context : dict
642674 Namespace for codec lookup (unused, kept for API compatibility).
675+ adapter : DatabaseAdapter
676+ Database adapter for backend-specific type mapping.
643677 """
644678 if category == "CODEC" :
645679 # Codec - resolve to underlying dtype
@@ -660,19 +694,21 @@ def substitute_special_type(match: dict, category: str, foreign_key_sql: list[st
660694 # Recursively resolve if dtype is also a special type
661695 category = match_type (match ["type" ])
662696 if category in SPECIAL_TYPES :
663- substitute_special_type (match , category , foreign_key_sql , context )
697+ substitute_special_type (match , category , foreign_key_sql , context , adapter )
664698 elif category in CORE_TYPE_NAMES :
665- # Core DataJoint type - substitute with native SQL type if mapping exists
699+ # Core DataJoint type - substitute with native SQL type using adapter
666700 core_name = category .lower ()
667- sql_type = CORE_TYPE_SQL . get (core_name )
701+ sql_type = adapter . core_type_to_sql (core_name )
668702 if sql_type is not None :
669703 match ["type" ] = sql_type
670704 # else: type passes through as-is (json, date, datetime, char, varchar, enum)
671705 else :
672706 raise DataJointError (f"Unknown special type: { category } " )
673707
674708
675- def compile_attribute (line : str , in_key : bool , foreign_key_sql : list [str ], context : dict ) -> tuple [str , str , str | None ]:
709+ def compile_attribute (
710+ line : str , in_key : bool , foreign_key_sql : list [str ], context : dict , adapter
711+ ) -> tuple [str , str , str | None ]:
676712 """
677713 Convert an attribute definition from DataJoint format to SQL.
678714
@@ -686,6 +722,8 @@ def compile_attribute(line: str, in_key: bool, foreign_key_sql: list[str], conte
686722 Foreign key declarations (passed to type substitution).
687723 context : dict
688724 Namespace for codec lookup.
725+ adapter : DatabaseAdapter
726+ Database adapter for backend-specific SQL generation.
689727
690728 Returns
691729 -------
@@ -736,7 +774,7 @@ def compile_attribute(line: str, in_key: bool, foreign_key_sql: list[str], conte
736774 if category in SPECIAL_TYPES :
737775 # Core types and Codecs are recorded in comment for reconstruction
738776 match ["comment" ] = ":{type}:{comment}" .format (** match )
739- substitute_special_type (match , category , foreign_key_sql , context )
777+ substitute_special_type (match , category , foreign_key_sql , context , adapter )
740778 elif category in NATIVE_TYPES :
741779 # Native type - warn user
742780 logger .warning (
@@ -750,5 +788,12 @@ def compile_attribute(line: str, in_key: bool, foreign_key_sql: list[str], conte
750788 if ("blob" in final_type ) and match ["default" ] not in {"DEFAULT NULL" , "NOT NULL" }:
751789 raise DataJointError ("The default value for blob attributes can only be NULL in:\n {line}" .format (line = line ))
752790
753- sql = ("`{name}` {type} {default}" + (' COMMENT "{comment}"' if match ["comment" ] else "" )).format (** match )
791+ # Use adapter to format column definition
792+ sql = adapter .format_column_definition (
793+ name = match ["name" ],
794+ sql_type = match ["type" ],
795+ nullable = match ["nullable" ],
796+ default = match ["default" ] if match ["default" ] else None ,
797+ comment = match ["comment" ] if match ["comment" ] else None ,
798+ )
754799 return match ["name" ], sql , match .get ("store" )
0 commit comments