Skip to content

Commit b14e536

Browse files
Fix integration issues after AutoPopulate 2.0 rebase
- Update datetime pattern to allow precision (datetime(6)) - Fix jobs.delete() and jobs.drop() to handle undeclared tables - Fix jobs.ignore() to update existing pending jobs to ignore status - Rename _make_tuples to make (deprecated in AutoPopulate 2.0) - Update conftest.py jobs cleanup to handle list return type - Fix test_jobs_table_primary_key to ensure table is declared first - Broaden exception handling in fixture teardown 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
1 parent fb311ea commit b14e536

File tree

6 files changed

+27
-16
lines changed

6 files changed

+27
-16
lines changed

src/datajoint/declare.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@
3838
"blob": (r"blob$", "longblob"),
3939
# Temporal
4040
"date": (r"date$", None),
41-
"datetime": (r"datetime$", None),
41+
"datetime": (r"datetime(\s*\(\d+\))?$", None),
4242
# String types (with parameters)
4343
"char": (r"char\s*\(\d+\)$", None),
4444
"varchar": (r"varchar\s*\(\d+\)$", None),

src/datajoint/jobs.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -211,10 +211,14 @@ def completed(self) -> QueryExpression:
211211

212212
def delete(self) -> None:
213213
"""Delete jobs without confirmation (inherits from delete_quick)."""
214+
if not self.is_declared:
215+
return # Nothing to delete if table doesn't exist
214216
self.delete_quick()
215217

216218
def drop(self) -> None:
217219
"""Drop the jobs table without confirmation."""
220+
if not self.is_declared:
221+
return # Nothing to drop if table doesn't exist
218222
self.drop_quick()
219223

220224
def refresh(
@@ -420,8 +424,8 @@ def ignore(self, key: dict) -> None:
420424
"""
421425
Mark a key to be ignored (skipped during populate).
422426
423-
Only inserts new records. Existing job entries cannot be converted to
424-
ignore status - they must be cleared first.
427+
If the job already exists, updates its status to "ignore".
428+
If the job doesn't exist, creates a new job with "ignore" status.
425429
426430
Args:
427431
key: Primary key dict for the job
@@ -434,7 +438,8 @@ def ignore(self, key: dict) -> None:
434438
try:
435439
self._insert_job_with_status(job_key, "ignore")
436440
except DuplicateError:
437-
pass # Already tracked
441+
# Update existing job to ignore status
442+
self.update1({**job_key, "status": "ignore"})
438443

439444
def _insert_job_with_status(self, key: dict, status: str) -> None:
440445
"""Insert a new job with the given status."""

tests/conftest.py

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -349,7 +349,8 @@ def clean_autopopulate(experiment, trial, ephys):
349349
def clean_jobs(schema_any):
350350
"""Cleanup fixture for jobs tests."""
351351
try:
352-
schema_any.jobs.delete()
352+
for jobs_table in schema_any.jobs:
353+
jobs_table.delete()
353354
except DataJointError:
354355
pass
355356
yield
@@ -375,7 +376,8 @@ def schema_any(connection_test, prefix):
375376
schema_any = dj.Schema(prefix + "_test1", schema.LOCALS_ANY, connection=connection_test)
376377
assert schema.LOCALS_ANY, "LOCALS_ANY is empty"
377378
try:
378-
schema_any.jobs.delete()
379+
for jobs_table in schema_any.jobs:
380+
jobs_table.delete()
379381
except DataJointError:
380382
pass
381383
schema_any(schema.TTest)
@@ -418,9 +420,10 @@ def schema_any(connection_test, prefix):
418420
schema_any(schema.Longblob)
419421
yield schema_any
420422
try:
421-
schema_any.jobs.delete()
422-
except DataJointError:
423-
pass
423+
for jobs_table in schema_any.jobs:
424+
jobs_table.delete()
425+
except Exception:
426+
pass # Ignore cleanup errors (connection may be closed)
424427
schema_any.drop()
425428

426429

@@ -430,7 +433,8 @@ def schema_any_fresh(connection_test, prefix):
430433
schema_any = dj.Schema(prefix + "_test1_fresh", schema.LOCALS_ANY, connection=connection_test)
431434
assert schema.LOCALS_ANY, "LOCALS_ANY is empty"
432435
try:
433-
schema_any.jobs.delete()
436+
for jobs_table in schema_any.jobs:
437+
jobs_table.delete()
434438
except DataJointError:
435439
pass
436440
schema_any(schema.TTest)
@@ -473,9 +477,10 @@ def schema_any_fresh(connection_test, prefix):
473477
schema_any(schema.Longblob)
474478
yield schema_any
475479
try:
476-
schema_any.jobs.delete()
477-
except DataJointError:
478-
pass
480+
for jobs_table in schema_any.jobs:
481+
jobs_table.delete()
482+
except Exception:
483+
pass # Ignore cleanup errors (connection may be closed)
479484
schema_any.drop()
480485

481486

tests/schema.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,7 @@ class Channel(dj.Part):
204204
current = null : <djblob> # optional current to test null handling
205205
"""
206206

207-
def _make_tuples(self, key):
207+
def make(self, key):
208208
"""
209209
populate with random data
210210
"""
@@ -261,7 +261,7 @@ class SigIntTable(dj.Computed):
261261
-> SimpleSource
262262
"""
263263

264-
def _make_tuples(self, key):
264+
def make(self, key):
265265
raise KeyboardInterrupt
266266

267267

tests/schema_simple.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ class D(dj.Computed):
103103
-> L
104104
"""
105105

106-
def _make_tuples(self, key):
106+
def make(self, key):
107107
# make reference to a random tuple from L
108108
random.seed(str(key))
109109
lookup = list(L().fetch("KEY"))

tests/test_jobs.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ def test_jobs_table_name(self, schema_any):
3030
def test_jobs_table_primary_key(self, schema_any):
3131
"""Test that jobs table has FK-derived primary key."""
3232
jobs = schema.SigIntTable().jobs
33+
jobs._ensure_declared()
3334
# SigIntTable depends on SimpleSource with pk 'id'
3435
assert "id" in jobs.primary_key
3536

0 commit comments

Comments
 (0)