diff --git a/README.md b/README.md
index 3e56de3..9bd8a5f 100644
--- a/README.md
+++ b/README.md
@@ -7,9 +7,18 @@
-PostgreSQL integration for Taskiq with support for asyncpg, psqlpy and aiopg drivers.
+PostgreSQL integration for Taskiq with support for asyncpg, psqlpy, psycopg and aiopg drivers.
-See more example of usage in [the documentation](https://danfimov.github.io/taskiq-postgres/) or [examples directory](https://github.com/danfimov/taskiq-postgres/examples).
+## Features
+
+- **PostgreSQL Broker** - high-performance message broker using PostgreSQL LISTEN/NOTIFY;
+- **Result Backend** - persistent task result storage with configurable retention;
+- **Scheduler Source** - cron-like task scheduling with PostgreSQL persistence;
+- **Multiple Drivers** - support for asyncpg, psycopg3, psqlpy and aiopg;
+- **Flexible Configuration** - customizable table names, field types, and connection options;
+- **Multiple Serializers** - support for different serialization methods (Pickle, JSON, etc.).
+
+See usage guide in [documentation](https://danfimov.github.io/taskiq-postgres/) or explore examples in [separate directory](https://github.com/danfimov/taskiq-postgres/examples).
## Installation
@@ -22,6 +31,9 @@ pip install taskiq-postgres[asyncpg]
# with psqlpy
pip install taskiq-postgres[psqlpy]
+# with psycopg3
+pip install taskiq-postgres[psycopg]
+
# with aiopg
pip install taskiq-postgres[aiopg]
```
@@ -101,7 +113,7 @@ Your experience with other drivers will be pretty similar. Just change the impor
schedule=[
{
"cron": "*/1 * * * *", # type: str, either cron or time should be specified.
- "cron_offset": None, # type: str | timedelta | None, can be omitted.
+ "cron_offset": None, # type: str | None, can be omitted. For example "Europe/Berlin".
"time": None, # type: datetime | None, either cron or time should be specified.
"args": [], # type list[Any] | None, can be omitted.
"kwargs": {}, # type: dict[str, Any] | None, can be omitted.
diff --git a/docs/index.md b/docs/index.md
index 497944b..a234421 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -11,7 +11,7 @@ title: Overview
-PostgreSQL integration for Taskiq with support for asyncpg, psqlpy and aiopg drivers.
+PostgreSQL integration for Taskiq with support for asyncpg, psqlpy, psycopg and aiopg drivers.
## Motivation
@@ -22,7 +22,6 @@ To address this issue I created this library with a common interface for most po
- brokers;
- schedule sources.
-
## Installation
Depending on your preferred PostgreSQL driver, you can install this library with the corresponding extra:
@@ -39,6 +38,12 @@ Depending on your preferred PostgreSQL driver, you can install this library with
pip install taskiq-postgres[psqlpy]
```
+=== "psycopg"
+
+ ```bash
+ pip install taskiq-postgres[psycopg]
+ ```
+
=== "aiopg"
```bash
@@ -93,6 +98,36 @@ Depending on your preferred PostgreSQL driver, you can install this library with
broker = PSQLPyBroker(dsn).with_result_backend(PSQLPyResultBackend(dsn))
+ @broker.task("solve_all_problems")
+ async def best_task_ever() -> None:
+ """Solve all problems in the world."""
+ await asyncio.sleep(2)
+ print("All problems are solved!")
+
+
+ async def main():
+ await broker.startup()
+ task = await best_task_ever.kiq()
+ print(await task.wait_result())
+ await broker.shutdown()
+
+
+ if __name__ == "__main__":
+ asyncio.run(main())
+ ```
+
+ === "psycopg"
+
+ ```python
+ # broker_example.py
+ import asyncio
+ from taskiq_pg.psycopg import PsycopgBroker, PsycopgResultBackend
+
+
+ dsn = "postgres://taskiq_postgres:look_in_vault@localhost:5432/taskiq_postgres"
+ broker = PsycopgBroker(dsn).with_result_backend(PsycopgResultBackend(dsn))
+
+
@broker.task("solve_all_problems")
async def best_task_ever() -> None:
"""Solve all problems in the world."""
@@ -184,7 +219,7 @@ Your experience with other drivers will be pretty similar. Just change the impor
schedule=[
{
"cron": "*/1 * * * *", # type: str, either cron or time should be specified.
- "cron_offset": None, # type: str | timedelta | None, can be omitted.
+ "cron_offset": None, # type: str | None, can be omitted.
"time": None, # type: datetime | None, either cron or time should be specified.
"args": [], # type list[Any] | None, can be omitted.
"kwargs": {}, # type: dict[str, Any] | None, can be omitted.
@@ -223,7 +258,47 @@ Your experience with other drivers will be pretty similar. Just change the impor
schedule=[
{
"cron": "*/1 * * * *", # type: str, either cron or time should be specified.
- "cron_offset": None, # type: str | timedelta | None, can be omitted.
+ "cron_offset": None, # type: str | None, can be omitted.
+ "time": None, # type: datetime | None, either cron or time should be specified.
+ "args": [], # type list[Any] | None, can be omitted.
+ "kwargs": {}, # type: dict[str, Any] | None, can be omitted.
+ "labels": {}, # type: dict[str, Any] | None, can be omitted.
+ },
+ ],
+ )
+ async def best_task_ever() -> None:
+ """Solve all problems in the world."""
+ await asyncio.sleep(2)
+ print("All problems are solved!")
+
+ ```
+
+ === "psycopg"
+
+ ```python
+ # scheduler_example.py
+ import asyncio
+ from taskiq import TaskiqScheduler
+ from taskiq_pg.psycopg import PsycopgBroker, PsycopgScheduleSource
+
+
+ dsn = "postgres://taskiq_postgres:look_in_vault@localhost:5432/taskiq_postgres"
+ broker = PsycopgBroker(dsn)
+ scheduler = TaskiqScheduler(
+ broker=broker,
+ sources=[PsycopgScheduleSource(
+ dsn=dsn,
+ broker=broker,
+ )],
+ )
+
+
+ @broker.task(
+ task_name="solve_all_problems",
+ schedule=[
+ {
+ "cron": "*/1 * * * *", # type: str, either cron or time should be specified.
+ "cron_offset": None, # type: str | None, can be omitted.
"time": None, # type: datetime | None, either cron or time should be specified.
"args": [], # type list[Any] | None, can be omitted.
"kwargs": {}, # type: dict[str, Any] | None, can be omitted.
@@ -263,7 +338,7 @@ Your experience with other drivers will be pretty similar. Just change the impor
schedule=[
{
"cron": "*/1 * * * *", # type: str, either cron or time should be specified.
- "cron_offset": None, # type: str | timedelta | None, can be omitted.
+ "cron_offset": None, # type: str | None, can be omitted.
"time": None, # type: datetime | None, either cron or time should be specified.
"args": [], # type list[Any] | None, can be omitted.
"kwargs": {}, # type: dict[str, Any] | None, can be omitted.
diff --git a/examples/example_with_broker.py b/examples/example_with_broker.py
index 9bb245e..c7ef119 100644
--- a/examples/example_with_broker.py
+++ b/examples/example_with_broker.py
@@ -2,7 +2,7 @@
How to run:
1) Run worker in one terminal:
- uv run taskiq worker examples.example_with_broker:broker
+ uv run taskiq worker examples.example_with_broker:broker --workers 1
2) Run this script in another terminal:
uv run python -m examples.example_with_broker
@@ -10,11 +10,11 @@
import asyncio
-from taskiq_pg.asyncpg import AsyncpgBroker, AsyncpgResultBackend
+from taskiq_pg.psycopg import PsycopgBroker, PsycopgResultBackend
dsn = "postgres://taskiq_postgres:look_in_vault@localhost:5432/taskiq_postgres"
-broker = AsyncpgBroker(dsn).with_result_backend(AsyncpgResultBackend(dsn))
+broker = PsycopgBroker(dsn).with_result_backend(PsycopgResultBackend(dsn))
@broker.task("solve_all_problems")
diff --git a/examples/example_with_schedule_source.py b/examples/example_with_schedule_source.py
index 8d3a15f..7342b6c 100644
--- a/examples/example_with_schedule_source.py
+++ b/examples/example_with_schedule_source.py
@@ -2,7 +2,7 @@
How to run:
1) Run worker in one terminal:
- uv run taskiq worker examples.example_with_schedule_source:broker
+ uv run taskiq worker examples.example_with_schedule_source:broker --workers 1
2) Run scheduler in another terminal:
uv run taskiq scheduler examples.example_with_schedule_source:scheduler
@@ -12,15 +12,15 @@
from taskiq import TaskiqScheduler
-from taskiq_pg.asyncpg import AsyncpgBroker, AsyncpgScheduleSource
+from taskiq_pg.psycopg import PsycopgBroker, PsycopgScheduleSource
dsn = "postgres://taskiq_postgres:look_in_vault@localhost:5432/taskiq_postgres"
-broker = AsyncpgBroker(dsn)
+broker = PsycopgBroker(dsn)
scheduler = TaskiqScheduler(
broker=broker,
sources=[
- AsyncpgScheduleSource(
+ PsycopgScheduleSource(
dsn=dsn,
broker=broker,
),
diff --git a/pyproject.toml b/pyproject.toml
index 358a242..9f6b342 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -48,6 +48,9 @@ asyncpg = [
psqlpy = [
"psqlpy>=0.11.6",
]
+psycopg = [
+ "psycopg[binary,pool]>=3.2.10",
+]
[dependency-groups]
dev = [
@@ -60,6 +63,8 @@ dev = [
"pytest>=8.4.2",
"pytest-asyncio>=1.1.0",
"pytest-cov>=7.0.0",
+ # for database in tests
+ "sqlalchemy-utils>=0.42.0",
# pre-commit hooks
"prek>=0.2.4",
# docs
@@ -149,6 +154,7 @@ ignore = [
"RUF",
"PLR2004", # magic numbers in tests
+ "ANN", # missing type annotations in tests
]
"tests/test_linting.py" = [
"S603", # subprocess usage
diff --git a/src/taskiq_pg/asyncpg/broker.py b/src/taskiq_pg/asyncpg/broker.py
index 2315152..a330ff6 100644
--- a/src/taskiq_pg/asyncpg/broker.py
+++ b/src/taskiq_pg/asyncpg/broker.py
@@ -27,33 +27,34 @@
class AsyncpgBroker(BasePostgresBroker):
"""Broker that uses asyncpg as driver and PostgreSQL with LISTEN/NOTIFY mechanism."""
- read_conn: asyncpg.Connection[asyncpg.Record] | None = None
- write_pool: asyncpg.pool.Pool[asyncpg.Record] | None = None
+ _read_conn: asyncpg.Connection[asyncpg.Record] | None = None
+ _write_pool: asyncpg.pool.Pool[asyncpg.Record] | None = None
async def startup(self) -> None:
"""Initialize the broker."""
await super().startup()
- self.read_conn = await asyncpg.connect(self.dsn, **self.read_kwargs)
- self.write_pool = await asyncpg.create_pool(self.dsn, **self.write_kwargs)
+ self._read_conn = await asyncpg.connect(self.dsn, **self.read_kwargs)
+ self._write_pool = await asyncpg.create_pool(self.dsn, **self.write_kwargs)
- if self.read_conn is None:
- msg = "read_conn not initialized"
+ if self._read_conn is None:
+ msg = "_read_conn not initialized"
raise RuntimeError(msg)
- async with self.write_pool.acquire() as conn:
- _ = await conn.execute(CREATE_MESSAGE_TABLE_QUERY.format(self.table_name))
+ async with self._write_pool.acquire() as conn:
+ await conn.execute(CREATE_MESSAGE_TABLE_QUERY.format(self.table_name))
- await self.read_conn.add_listener(self.channel_name, self._notification_handler)
+ await self._read_conn.add_listener(self.channel_name, self._notification_handler)
self._queue = asyncio.Queue()
async def shutdown(self) -> None:
"""Close all connections on shutdown."""
await super().shutdown()
- if self.read_conn is not None:
- await self.read_conn.close()
- if self.write_pool is not None:
- await self.write_pool.close()
+ if self._read_conn is not None:
+ await self._read_conn.remove_listener(self.channel_name, self._notification_handler)
+ await self._read_conn.close()
+ if self._write_pool is not None:
+ await self._write_pool.close()
def _notification_handler(
self,
@@ -85,11 +86,11 @@ async def kick(self, message: BrokerMessage) -> None:
:param message: Message to send.
"""
- if self.write_pool is None:
+ if self._write_pool is None:
msg = "Please run startup before kicking."
raise ValueError(msg)
- async with self.write_pool.acquire() as conn:
+ async with self._write_pool.acquire() as conn:
# Insert the message into the database
message_inserted_id = tp.cast(
"int",
@@ -117,9 +118,9 @@ async def kick(self, message: BrokerMessage) -> None:
async def _schedule_notification(self, message_id: int, delay_seconds: int) -> None:
"""Schedule a notification to be sent after a delay."""
await asyncio.sleep(delay_seconds)
- if self.write_pool is None:
+ if self._write_pool is None:
return
- async with self.write_pool.acquire() as conn:
+ async with self._write_pool.acquire() as conn:
# Send NOTIFY
_ = await conn.execute(f"NOTIFY {self.channel_name}, '{message_id}'")
@@ -131,7 +132,7 @@ async def listen(self) -> AsyncGenerator[AckableMessage, None]:
:yields: AckableMessage instances.
"""
- if self.write_pool is None:
+ if self._write_pool is None:
msg = "Call startup before starting listening."
raise ValueError(msg)
if self._queue is None:
@@ -142,7 +143,7 @@ async def listen(self) -> AsyncGenerator[AckableMessage, None]:
try:
payload = await self._queue.get()
message_id = int(payload)
- async with self.write_pool.acquire() as conn:
+ async with self._write_pool.acquire() as conn:
claimed = await conn.fetchrow(
CLAIM_MESSAGE_QUERY.format(self.table_name),
message_id,
@@ -156,11 +157,11 @@ async def listen(self) -> AsyncGenerator[AckableMessage, None]:
message_data = message_str.encode()
async def ack(*, _message_id: int = message_id) -> None:
- if self.write_pool is None:
+ if self._write_pool is None:
msg = "Call startup before starting listening."
raise ValueError(msg)
- async with self.write_pool.acquire() as conn:
+ async with self._write_pool.acquire() as conn:
_ = await conn.execute(
DELETE_MESSAGE_QUERY.format(self.table_name),
_message_id,
diff --git a/src/taskiq_pg/psqlpy/broker.py b/src/taskiq_pg/psqlpy/broker.py
index 5f843a5..7103f71 100644
--- a/src/taskiq_pg/psqlpy/broker.py
+++ b/src/taskiq_pg/psqlpy/broker.py
@@ -24,7 +24,7 @@
from datetime import datetime
-logger = logging.getLogger("taskiq.asyncpg_broker")
+logger = logging.getLogger("taskiq.psqlpy_broker")
@dataclass
@@ -41,42 +41,47 @@ class MessageRow:
class PSQLPyBroker(BasePostgresBroker):
- """Broker that uses PostgreSQL and asyncpg with LISTEN/NOTIFY."""
+ """Broker that uses PostgreSQL and PSQLPy with LISTEN/NOTIFY."""
- read_conn: psqlpy.Connection | None = None
- write_pool: psqlpy.ConnectionPool | None = None
+ _read_conn: psqlpy.Connection
+ _write_pool: psqlpy.ConnectionPool
+ _listener: psqlpy.Listener
+ _queue: asyncio.Queue
async def startup(self) -> None:
"""Initialize the broker."""
await super().startup()
- self.read_conn = await psqlpy.connect(
+ self._read_conn = await psqlpy.connect(
dsn=self.dsn,
**self.read_kwargs,
)
- self.write_pool = psqlpy.ConnectionPool(
+ self._write_pool = psqlpy.ConnectionPool(
dsn=self.dsn,
**self.write_kwargs,
)
# create messages table if it doesn't exist
- async with self.write_pool.acquire() as conn:
- _ = await conn.execute(CREATE_MESSAGE_TABLE_QUERY.format(self.table_name))
+ async with self._write_pool.acquire() as conn:
+ await conn.execute(CREATE_MESSAGE_TABLE_QUERY.format(self.table_name))
# listen to notification channel
- listener = self.write_pool.listener()
- await listener.add_callback(self.channel_name, self._notification_handler)
- await listener.startup()
- listener.listen()
+ self._listener = self._write_pool.listener()
+ await self._listener.add_callback(self.channel_name, self._notification_handler)
+ await self._listener.startup()
+ self._listener.listen()
self._queue = asyncio.Queue()
async def shutdown(self) -> None:
"""Close all connections on shutdown."""
await super().shutdown()
- if self.read_conn is not None:
- self.read_conn.close()
- if self.write_pool is not None:
- self.write_pool.close()
+ if self._read_conn is not None:
+ self._read_conn.close()
+ if self._write_pool is not None:
+ self._write_pool.close()
+ if self._listener is not None:
+ self._listener.abort_listen()
+ await self._listener.shutdown()
async def _notification_handler(
self,
@@ -88,12 +93,7 @@ async def _notification_handler(
"""
Handle NOTIFY messages.
- From asyncpg.connection.add_listener docstring:
- A callable or a coroutine function receiving the following arguments:
- **connection**: a Connection the callback is registered with;
- **pid**: PID of the Postgres server that sent the notification;
- **channel**: name of the channel the notification was sent to;
- **payload**: the payload.
+ https://psqlpy-python.github.io/components/listener.html#usage
"""
logger.debug("Received notification on channel %s: %s", channel, payload)
if self._queue is not None:
@@ -107,11 +107,7 @@ async def kick(self, message: BrokerMessage) -> None:
:param message: Message to send.
"""
- if self.write_pool is None:
- msg = "Please run startup before kicking."
- raise ValueError(msg)
-
- async with self.write_pool.acquire() as conn:
+ async with self._write_pool.acquire() as conn:
# insert message into db table
message_inserted_id = tp.cast(
"int",
@@ -129,7 +125,7 @@ async def kick(self, message: BrokerMessage) -> None:
delay_value = tp.cast("str | None", message.labels.get("delay"))
if delay_value is not None:
delay_seconds = int(delay_value)
- _ = asyncio.create_task( # noqa: RUF006
+ asyncio.create_task( # noqa: RUF006
self._schedule_notification(message_inserted_id, delay_seconds),
)
else:
@@ -141,10 +137,7 @@ async def kick(self, message: BrokerMessage) -> None:
async def _schedule_notification(self, message_id: int, delay_seconds: int) -> None:
"""Schedule a notification to be sent after a delay."""
await asyncio.sleep(delay_seconds)
- if self.write_pool is None:
- msg = "Call startup before starting listening."
- raise ValueError(msg)
- async with self.write_pool.acquire() as conn:
+ async with self._write_pool.acquire() as conn:
# Send NOTIFY with message ID as payload
_ = await conn.execute(f"NOTIFY {self.channel_name}, '{message_id}'")
@@ -156,19 +149,12 @@ async def listen(self) -> AsyncGenerator[AckableMessage, None]:
:yields: AckableMessage instances.
"""
- if self.write_pool is None:
- msg = "Call startup before starting listening."
- raise ValueError(msg)
- if self._queue is None:
- msg = "Startup did not initialize the queue."
- raise ValueError(msg)
-
while True:
try:
payload = await self._queue.get()
message_id = int(payload) # payload is the message id
try:
- async with self.write_pool.acquire() as conn:
+ async with self._write_pool.acquire() as conn:
claimed_message = await conn.fetch_row(
CLAIM_MESSAGE_QUERY.format(self.table_name),
[message_id],
@@ -182,11 +168,7 @@ async def listen(self) -> AsyncGenerator[AckableMessage, None]:
message_data = message_row_result.message.encode()
async def ack(*, _message_id: int = message_id) -> None:
- if self.write_pool is None:
- msg = "Call startup before starting listening"
- raise ValueError(msg)
-
- async with self.write_pool.acquire() as conn:
+ async with self._write_pool.acquire() as conn:
_ = await conn.execute(
DELETE_MESSAGE_QUERY.format(self.table_name),
[_message_id],
diff --git a/src/taskiq_pg/psqlpy/schedule_source.py b/src/taskiq_pg/psqlpy/schedule_source.py
index f521070..7fb9fdc 100644
--- a/src/taskiq_pg/psqlpy/schedule_source.py
+++ b/src/taskiq_pg/psqlpy/schedule_source.py
@@ -2,6 +2,7 @@
from logging import getLogger
from psqlpy import ConnectionPool
+from psqlpy.extra_types import JSONB
from pydantic import ValidationError
from taskiq import ScheduledTask
@@ -26,16 +27,20 @@ async def _update_schedules_on_startup(self, schedules: list[ScheduledTask]) ->
"""Update schedules in the database on startup: truncate table and insert new ones."""
async with self._database_pool.acquire() as connection, connection.transaction():
await connection.execute(DELETE_ALL_SCHEDULES_QUERY.format(self._table_name))
- data_to_insert: list = [
- [
- uuid.UUID(schedule.schedule_id),
- schedule.task_name,
- schedule.model_dump(
- exclude={"schedule_id", "task_name"},
- ),
- ]
- for schedule in schedules
- ]
+ data_to_insert: list = []
+ for schedule in schedules:
+ schedule_dict = schedule.model_dump(
+ mode="json",
+ exclude={"schedule_id", "task_name"},
+ )
+ data_to_insert.append(
+ [
+ uuid.UUID(schedule.schedule_id),
+ schedule.task_name,
+ JSONB(schedule_dict),
+ ]
+ )
+
await connection.execute_many(
INSERT_SCHEDULE_QUERY.format(self._table_name),
data_to_insert,
diff --git a/src/taskiq_pg/psycopg/__init__.py b/src/taskiq_pg/psycopg/__init__.py
new file mode 100644
index 0000000..18b7a92
--- /dev/null
+++ b/src/taskiq_pg/psycopg/__init__.py
@@ -0,0 +1,10 @@
+from taskiq_pg.psycopg.broker import PsycopgBroker
+from taskiq_pg.psycopg.result_backend import PsycopgResultBackend
+from taskiq_pg.psycopg.schedule_source import PsycopgScheduleSource
+
+
+__all__ = [
+ "PsycopgBroker",
+ "PsycopgResultBackend",
+ "PsycopgScheduleSource",
+]
diff --git a/src/taskiq_pg/psycopg/broker.py b/src/taskiq_pg/psycopg/broker.py
new file mode 100644
index 0000000..edb89ba
--- /dev/null
+++ b/src/taskiq_pg/psycopg/broker.py
@@ -0,0 +1,160 @@
+from __future__ import annotations
+
+import asyncio
+import json
+import logging
+import typing as tp
+from contextlib import suppress
+
+import psycopg
+from psycopg import AsyncConnection, AsyncRawCursor, sql
+from psycopg_pool import AsyncConnectionPool
+from taskiq import AckableMessage, BrokerMessage
+
+from taskiq_pg._internal.broker import BasePostgresBroker
+from taskiq_pg.psycopg.queries import (
+ CLAIM_MESSAGE_QUERY,
+ CREATE_MESSAGE_TABLE_QUERY,
+ DELETE_MESSAGE_QUERY,
+ INSERT_MESSAGE_QUERY,
+)
+
+
+if tp.TYPE_CHECKING:
+ from collections.abc import AsyncGenerator
+
+
+logger = logging.getLogger("taskiq.psycopg_broker")
+
+
+class PsycopgBroker(BasePostgresBroker):
+ """Broker that uses PostgreSQL and psycopg with LISTEN/NOTIFY."""
+
+ _read_conn: AsyncConnection
+ _write_pool: AsyncConnectionPool
+ _notifies_iter: tp.AsyncIterator[tp.Any]
+
+ async def startup(self) -> None:
+ """Initialize the broker."""
+ await super().startup()
+ self._read_conn = await AsyncConnection.connect(
+ conninfo=self.dsn,
+ **self.read_kwargs,
+ autocommit=True,
+ cursor_factory=AsyncRawCursor,
+ )
+ self._write_pool = AsyncConnectionPool(
+ conninfo=self.dsn if self.dsn is not None else "",
+ open=False,
+ **self.write_kwargs,
+ )
+ await self._write_pool.open()
+
+ async with self._write_pool.connection() as connection, connection.cursor() as cursor:
+ await cursor.execute(sql.SQL(CREATE_MESSAGE_TABLE_QUERY).format(sql.Identifier(self.table_name)))
+
+ await self._read_conn.execute(sql.SQL("LISTEN {}").format(sql.Identifier(self.channel_name)))
+ self._notifies_iter = self._read_conn.notifies()
+
+ async def shutdown(self) -> None:
+ """Close all connections on shutdown."""
+ await super().shutdown()
+ if self._notifies_iter is not None:
+ with suppress(RuntimeError): # RuntimeError: aclose(): asynchronous generator is already running
+ await self._notifies_iter.aclose() # type: ignore[attr-defined]
+ if self._read_conn is not None:
+ await self._read_conn.notifies().aclose()
+ await self._read_conn.close()
+ if self._write_pool is not None:
+ await self._write_pool.close()
+
+ async def kick(self, message: BrokerMessage) -> None:
+ """
+ Send message to the channel.
+
+ Inserts the message into the database and sends a NOTIFY.
+
+ :param message: Message to send.
+ """
+ async with self._write_pool.connection() as connection, connection.cursor() as cursor:
+ # insert message into db table
+ await cursor.execute(
+ sql.SQL(INSERT_MESSAGE_QUERY).format(sql.Identifier(self.table_name)),
+ [
+ message.task_id,
+ message.task_name,
+ message.message.decode(),
+ json.dumps(message.labels),
+ ],
+ )
+ row = await cursor.fetchone()
+ if row is None:
+ msg = "failed to insert message"
+ raise RuntimeError(msg)
+ message_inserted_id = int(row[0])
+
+ delay_value = tp.cast("str | None", message.labels.get("delay"))
+ if delay_value is not None:
+ delay_seconds = int(delay_value)
+ await self._schedule_notification(message_inserted_id, delay_seconds)
+ else:
+ # Send NOTIFY with message ID as payload
+ await cursor.execute(
+ sql.SQL("NOTIFY {}, {}").format(
+ sql.Identifier(self.channel_name),
+ sql.Literal(str(message_inserted_id)),
+ ),
+ )
+
+ async def _schedule_notification(self, message_id: int, delay_seconds: int) -> None:
+ """Schedule a notification to be sent after a delay."""
+ await asyncio.sleep(delay_seconds)
+ async with self._write_pool.connection() as connection, connection.cursor() as cursor:
+ # Send NOTIFY with message ID as payload
+ await cursor.execute(
+ sql.SQL("NOTIFY {}, {}").format(
+ sql.Identifier(self.channel_name),
+ sql.Literal(str(message_id)),
+ )
+ )
+
+ async def _listen_context(self) -> AsyncGenerator[str, None]:
+ async for notify in self._notifies_iter:
+ yield notify.payload
+
+ async def listen(self) -> AsyncGenerator[AckableMessage, None]:
+ """
+ Listen to the channel.
+
+ Yields messages as they are received.
+
+ :yields: AckableMessage instances.
+ """
+ while True:
+ async for message_id_str in self._listen_context():
+ message_id = int(message_id_str) # payload is the message id
+ try:
+ async with self._write_pool.connection() as connection, connection.cursor() as cursor:
+ await cursor.execute(
+ sql.SQL(CLAIM_MESSAGE_QUERY).format(sql.Identifier(self.table_name)),
+ [message_id],
+ )
+ claimed_message = await cursor.fetchone()
+ if claimed_message is None:
+ continue
+ except psycopg.OperationalError: # message was claimed by another worker
+ continue
+ message_str = claimed_message[3]
+ if not isinstance(message_str, str):
+ msg = "Message is not a string"
+ raise TypeError(msg)
+ message_data = message_str.encode()
+
+ async def ack(*, _message_id: int = message_id) -> None:
+ async with self._write_pool.connection() as connection, connection.cursor() as cursor:
+ await cursor.execute(
+ sql.SQL(DELETE_MESSAGE_QUERY).format(sql.Identifier(self.table_name)),
+ [_message_id],
+ )
+
+ yield AckableMessage(data=message_data, ack=ack)
diff --git a/src/taskiq_pg/psycopg/queries.py b/src/taskiq_pg/psycopg/queries.py
new file mode 100644
index 0000000..3aff0a9
--- /dev/null
+++ b/src/taskiq_pg/psycopg/queries.py
@@ -0,0 +1,81 @@
+CREATE_TABLE_QUERY = """
+CREATE TABLE IF NOT EXISTS {} (
+ task_id {} UNIQUE,
+ result BYTEA
+)
+"""
+
+CREATE_INDEX_QUERY = """
+CREATE INDEX IF NOT EXISTS {} ON {} USING HASH (task_id)
+"""
+
+INSERT_RESULT_QUERY = """
+INSERT INTO {} VALUES (%s, %s)
+ON CONFLICT (task_id)
+DO UPDATE
+SET result = EXCLUDED.result;
+"""
+
+IS_RESULT_EXISTS_QUERY = """
+SELECT EXISTS(
+ SELECT 1 FROM {} WHERE task_id = %s
+);
+"""
+
+SELECT_RESULT_QUERY = """
+SELECT result FROM {} WHERE task_id = %s;
+"""
+
+DELETE_RESULT_QUERY = """
+DELETE FROM {} WHERE task_id = %s;
+"""
+
+CREATE_MESSAGE_TABLE_QUERY = """
+CREATE TABLE IF NOT EXISTS {} (
+ id SERIAL PRIMARY KEY,
+ task_id VARCHAR NOT NULL,
+ task_name VARCHAR NOT NULL,
+ message TEXT NOT NULL,
+ labels JSONB NOT NULL,
+ status TEXT NOT NULL DEFAULT 'pending',
+ created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
+);
+"""
+
+INSERT_MESSAGE_QUERY = """
+INSERT INTO {} (task_id, task_name, message, labels)
+VALUES (%s, %s, %s, %s)
+RETURNING id
+"""
+
+CLAIM_MESSAGE_QUERY = "UPDATE {} SET status = 'processing' WHERE id = %s AND status = 'pending' RETURNING *"
+
+DELETE_MESSAGE_QUERY = "DELETE FROM {} WHERE id = %s"
+
+CREATE_SCHEDULES_TABLE_QUERY = """
+CREATE TABLE IF NOT EXISTS {} (
+ id UUID PRIMARY KEY,
+ task_name VARCHAR(100) NOT NULL,
+ schedule JSONB NOT NULL,
+ created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
+ updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
+);
+"""
+
+INSERT_SCHEDULE_QUERY = """
+INSERT INTO {} (id, task_name, schedule)
+VALUES (%s, %s, %s)
+ON CONFLICT (id) DO UPDATE
+SET task_name = EXCLUDED.task_name,
+ schedule = EXCLUDED.schedule,
+ updated_at = NOW();
+"""
+
+SELECT_SCHEDULES_QUERY = """
+SELECT id, task_name, schedule
+FROM {};
+"""
+
+DELETE_ALL_SCHEDULES_QUERY = """
+DELETE FROM {};
+"""
diff --git a/src/taskiq_pg/psycopg/result_backend.py b/src/taskiq_pg/psycopg/result_backend.py
new file mode 100644
index 0000000..8b3ecc6
--- /dev/null
+++ b/src/taskiq_pg/psycopg/result_backend.py
@@ -0,0 +1,141 @@
+from __future__ import annotations
+
+import typing as tp
+
+from psycopg import sql
+from psycopg_pool import AsyncConnectionPool
+from taskiq import TaskiqResult
+from taskiq.compat import model_dump, model_validate
+
+from taskiq_pg._internal.result_backend import BasePostgresResultBackend, ReturnType
+from taskiq_pg.exceptions import ResultIsMissingError
+from taskiq_pg.psycopg.queries import (
+ CREATE_INDEX_QUERY,
+ CREATE_TABLE_QUERY,
+ DELETE_RESULT_QUERY,
+ INSERT_RESULT_QUERY,
+ IS_RESULT_EXISTS_QUERY,
+ SELECT_RESULT_QUERY,
+)
+
+
+class PsycopgResultBackend(BasePostgresResultBackend):
+ """Result backend for TaskIQ based on psycopg."""
+
+ _database_pool: AsyncConnectionPool
+
+ async def startup(self) -> None:
+ """
+ Initialize the result backend.
+
+ Construct new connection pool
+ and create new table for results if not exists.
+ """
+ self._database_pool = AsyncConnectionPool(
+ conninfo=self.dsn if self.dsn is not None else "",
+ open=False,
+ **self.connect_kwargs,
+ )
+ await self._database_pool.open()
+ async with self._database_pool.connection() as connection, connection.cursor() as cursor:
+ await cursor.execute(
+ query=sql.SQL(CREATE_TABLE_QUERY).format(
+ sql.Identifier(self.table_name),
+ sql.SQL(self.field_for_task_id),
+ ),
+ )
+ await cursor.execute(
+ query=sql.SQL(CREATE_INDEX_QUERY).format(
+ sql.Identifier(self.table_name + "_task_id_idx"),
+ sql.Identifier(self.table_name),
+ ),
+ )
+
+ async def shutdown(self) -> None:
+ """Close the connection pool."""
+ if getattr(self, "_database_pool", None) is not None:
+ await self._database_pool.close()
+
+ async def set_result(
+ self,
+ task_id: str,
+ result: TaskiqResult[ReturnType],
+ ) -> None:
+ """
+ Set result to the PostgreSQL table.
+
+ :param task_id: ID of the task.
+ :param result: result of the task.
+ """
+ async with self._database_pool.connection() as connection, connection.cursor() as cursor:
+ await cursor.execute(
+ query=sql.SQL(INSERT_RESULT_QUERY).format(
+ sql.Identifier(self.table_name),
+ ),
+ params=[
+ task_id,
+ self.serializer.dumpb(model_dump(result)),
+ ],
+ )
+
+ async def is_result_ready(self, task_id: str) -> bool:
+ """
+ Returns whether the result is ready.
+
+ :param task_id: ID of the task.
+
+ :returns: True if the result is ready else False.
+ """
+ async with self._database_pool.connection() as connection, connection.cursor() as cursor:
+ execute_result = await cursor.execute(
+ query=sql.SQL(IS_RESULT_EXISTS_QUERY).format(
+ sql.Identifier(self.table_name),
+ ),
+ params=[task_id],
+ )
+ row = await execute_result.fetchone()
+ return bool(row and row[0])
+
+ async def get_result(
+ self,
+ task_id: str,
+ with_logs: bool = False,
+ ) -> TaskiqResult[ReturnType]:
+ """
+ Retrieve result from the task.
+
+ :param task_id: task's id.
+ :param with_logs: if True it will download task's logs.
+ :raises ResultIsMissingError: if there is no result when trying to get it.
+ :return: TaskiqResult.
+ """
+ async with self._database_pool.connection() as connection, connection.cursor() as cursor:
+ execute_result = await cursor.execute(
+ query=sql.SQL(SELECT_RESULT_QUERY).format(
+ sql.Identifier(self.table_name),
+ ),
+ params=[task_id],
+ )
+ result = await execute_result.fetchone()
+ if result is None:
+ msg = f"Cannot find record with task_id = {task_id} in PostgreSQL"
+ raise ResultIsMissingError(msg)
+ result_in_bytes: tp.Final = result[0]
+
+ if not self.keep_results:
+ await cursor.execute(
+ query=sql.SQL(DELETE_RESULT_QUERY).format(
+ sql.Identifier(self.table_name),
+ ),
+ params=[task_id],
+ )
+
+ taskiq_result: tp.Final = model_validate(
+ TaskiqResult[ReturnType],
+ self.serializer.loadb(result_in_bytes),
+ )
+
+ if not with_logs:
+ taskiq_result.log = None
+
+ return taskiq_result
diff --git a/src/taskiq_pg/psycopg/schedule_source.py b/src/taskiq_pg/psycopg/schedule_source.py
new file mode 100644
index 0000000..4a35e90
--- /dev/null
+++ b/src/taskiq_pg/psycopg/schedule_source.py
@@ -0,0 +1,129 @@
+import uuid
+from logging import getLogger
+
+from psycopg_pool import AsyncConnectionPool
+from pydantic import ValidationError
+from taskiq import ScheduledTask
+
+from taskiq_pg._internal import BasePostgresScheduleSource
+from taskiq_pg.psycopg.queries import (
+ CREATE_SCHEDULES_TABLE_QUERY,
+ DELETE_ALL_SCHEDULES_QUERY,
+ INSERT_SCHEDULE_QUERY,
+ SELECT_SCHEDULES_QUERY,
+)
+
+
+logger = getLogger("taskiq_pg.psycopg_schedule_source")
+
+
+class PsycopgScheduleSource(BasePostgresScheduleSource):
+ """Schedule source that uses psycopg to store schedules in PostgreSQL."""
+
+ _database_pool: AsyncConnectionPool
+
+ async def _update_schedules_on_startup(self, schedules: list[ScheduledTask]) -> None:
+ """Update schedules in the database on startup: truncate table and insert new ones."""
+ async with self._database_pool.connection() as connection, connection.cursor() as cursor:
+ await cursor.execute(DELETE_ALL_SCHEDULES_QUERY.format(self._table_name))
+ data_to_insert: list = [
+ [
+ uuid.UUID(schedule.schedule_id),
+ schedule.task_name,
+ schedule.model_dump_json(
+ exclude={"schedule_id", "task_name"},
+ ),
+ ]
+ for schedule in schedules
+ ]
+ await cursor.executemany(
+ INSERT_SCHEDULE_QUERY.format(self._table_name),
+ data_to_insert,
+ )
+
+ def _get_schedules_from_broker_tasks(self) -> list[ScheduledTask]:
+ """Extract schedules from the broker's registered tasks."""
+ scheduled_tasks_for_creation: list[ScheduledTask] = []
+ for task_name, task in self._broker.get_all_tasks().items():
+ if "schedule" not in task.labels:
+ logger.debug("Task %s has no schedule, skipping", task_name)
+ continue
+ if not isinstance(task.labels["schedule"], list):
+ logger.warning(
+ "Schedule for task %s is not a list, skipping",
+ task_name,
+ )
+ continue
+ for schedule in task.labels["schedule"]:
+ try:
+ new_schedule = ScheduledTask.model_validate(
+ {
+ "task_name": task_name,
+ "labels": schedule.get("labels", {}),
+ "args": schedule.get("args", []),
+ "kwargs": schedule.get("kwargs", {}),
+ "schedule_id": str(uuid.uuid4()),
+ "cron": schedule.get("cron", None),
+ "cron_offset": schedule.get("cron_offset", None),
+ "time": schedule.get("time", None),
+ },
+ )
+ scheduled_tasks_for_creation.append(new_schedule)
+ except ValidationError:
+ logger.exception(
+ "Schedule for task %s is not valid, skipping",
+ task_name,
+ )
+ continue
+ return scheduled_tasks_for_creation
+
+ async def startup(self) -> None:
+ """
+ Initialize the schedule source.
+
+ Construct new connection pool, create new table for schedules if not exists
+ and fill table with schedules from task labels.
+ """
+ self._database_pool = AsyncConnectionPool(
+ conninfo=self.dsn if self.dsn is not None else "",
+ open=False,
+ **self._connect_kwargs,
+ )
+ await self._database_pool.open()
+
+ async with self._database_pool.connection() as connection, connection.cursor() as cursor:
+ await cursor.execute(
+ CREATE_SCHEDULES_TABLE_QUERY.format(self._table_name),
+ )
+ scheduled_tasks_for_creation = self._get_schedules_from_broker_tasks()
+ await self._update_schedules_on_startup(scheduled_tasks_for_creation)
+
+ async def shutdown(self) -> None:
+ """Close the connection pool."""
+ if getattr(self, "_database_pool", None) is not None:
+ await self._database_pool.close()
+
+ async def get_schedules(self) -> list["ScheduledTask"]:
+ """Fetch schedules from the database."""
+ schedules = []
+ async with self._database_pool.connection() as connection, connection.cursor() as cursor:
+ rows_with_schedules = await cursor.execute(
+ SELECT_SCHEDULES_QUERY.format(self._table_name),
+ )
+ rows = await rows_with_schedules.fetchall()
+ for schedule_id, task_name, schedule in rows:
+ schedules.append(
+ ScheduledTask.model_validate(
+ {
+ "schedule_id": str(schedule_id),
+ "task_name": task_name,
+ "labels": schedule["labels"],
+ "args": schedule["args"],
+ "kwargs": schedule["kwargs"],
+ "cron": schedule["cron"],
+ "cron_offset": schedule["cron_offset"],
+ "time": schedule["time"],
+ },
+ ),
+ )
+ return schedules
diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py
index 1d06bc8..98a6b8e 100644
--- a/tests/integration/conftest.py
+++ b/tests/integration/conftest.py
@@ -1,11 +1,59 @@
import os
+import typing as tp
+import uuid
+import asyncpg
import pytest
+from sqlalchemy import create_engine, text
+from sqlalchemy_utils import create_database, database_exists
-@pytest.fixture(scope="session")
-def pg_dsn() -> str:
- return os.getenv(
+@pytest.fixture(scope="session", autouse=True)
+def clean_database():
+ dsn = os.getenv(
"TASKIQ_PG_DSN",
"postgres://taskiq_postgres:look_in_vault@localhost:5432/taskiq_postgres",
)
+ temporary_prefix = uuid.uuid4().hex
+ dsn_for_sqlalchemy = dsn.replace("postgres://", "postgresql://") + f"{temporary_prefix}"
+ if not database_exists(dsn_for_sqlalchemy):
+ create_database(dsn_for_sqlalchemy)
+ try:
+ yield dsn + temporary_prefix
+ finally:
+ engine = create_engine(dsn.replace("postgres://", "postgresql://"))
+ dbname = "taskiq_postgres" + temporary_prefix
+ with engine.connect() as conn:
+ # DROP DATABASE can only be executed outside of a transaction
+ conn = conn.execution_options(isolation_level="AUTOCOMMIT") # noqa: PLW2901
+ conn.execute(
+ text(
+ "SELECT pg_terminate_backend(pid) "
+ "FROM pg_stat_activity "
+ "WHERE datname = :dbname AND pid <> pg_backend_pid()"
+ ),
+ {"dbname": dbname},
+ )
+ conn.execute(text(f'DROP DATABASE "{dbname}"'))
+ engine.dispose()
+
+
+@pytest.fixture
+async def pg_dsn(clean_database: str):
+ connection = await asyncpg.connect(dsn=clean_database)
+ transaction = connection.transaction()
+ try:
+ await transaction.start()
+ yield clean_database
+ finally:
+ await transaction.rollback()
+ await connection.close()
+
+
+@pytest.fixture
+async def connection(pg_dsn: str) -> tp.AsyncGenerator[asyncpg.Connection, None]:
+ connection = await asyncpg.connect(dsn=pg_dsn)
+ try:
+ yield connection
+ finally:
+ await connection.close()
diff --git a/tests/integration/test_broker.py b/tests/integration/test_broker.py
new file mode 100644
index 0000000..6ad488d
--- /dev/null
+++ b/tests/integration/test_broker.py
@@ -0,0 +1,146 @@
+from __future__ import annotations
+
+import asyncio
+import typing as tp
+import uuid
+
+import pytest
+from taskiq import BrokerMessage
+
+from taskiq_pg.asyncpg import AsyncpgBroker
+from taskiq_pg.psqlpy import PSQLPyBroker
+from taskiq_pg.psycopg import PsycopgBroker
+
+
+if tp.TYPE_CHECKING:
+ import asyncpg
+
+
+@pytest.fixture
+async def broker(
+ pg_dsn: str,
+ request: pytest.FixtureRequest,
+) -> tp.AsyncGenerator[AsyncpgBroker | PSQLPyBroker | PsycopgBroker, None]:
+ broker = request.param(dsn=pg_dsn)
+ await broker.startup()
+ try:
+ yield broker
+ finally:
+ await broker.shutdown()
+
+
+@pytest.fixture
+async def broker_2(
+ pg_dsn: str,
+ request: pytest.FixtureRequest,
+) -> tp.AsyncGenerator[AsyncpgBroker | PSQLPyBroker | PsycopgBroker, None]:
+ broker = request.param(dsn=pg_dsn)
+ await broker.startup()
+ try:
+ yield broker
+ finally:
+ await broker.shutdown()
+
+
+@pytest.mark.parametrize(
+ "broker",
+ [
+ AsyncpgBroker,
+ PSQLPyBroker,
+ PsycopgBroker,
+ ],
+ indirect=True,
+)
+async def test_when_message_was_published__then_worker_received_message(
+ broker,
+) -> None:
+ # given
+ message: BrokerMessage = BrokerMessage(
+ task_id=uuid.uuid4().hex,
+ task_name="example:best_task_ever",
+ message=b'{"hello":"world"}',
+ labels={},
+ )
+ await broker.startup()
+ listener = broker.listen()
+ task: asyncio.Task = asyncio.create_task(listener.__anext__())
+
+ # when
+ await broker.kick(message)
+ done, pending = await asyncio.wait(
+ {task},
+ timeout=5.0,
+ )
+
+ # then
+ assert len(done) == 1, "Worker should receive message"
+ assert len(pending) == 0, "Worker should receive message"
+
+
+@pytest.mark.integration
+@pytest.mark.parametrize(
+ ("broker", "broker_2"),
+ [
+ (AsyncpgBroker, AsyncpgBroker),
+ (PSQLPyBroker, PSQLPyBroker),
+ (PsycopgBroker, PsycopgBroker),
+ ],
+ indirect=True,
+)
+async def test_when_two_workers_listen__then_single_message_processed_once(
+ connection: asyncpg.Connection,
+ broker,
+ broker_2,
+) -> None:
+ # given
+ table_name: str = "taskiq_messages"
+ task_id: str = uuid.uuid4().hex
+ message: BrokerMessage = BrokerMessage(
+ task_id=task_id,
+ task_name="example:best_task_ever",
+ message=b'{"hello":"world"}',
+ labels={},
+ )
+
+ # when
+ await broker.startup()
+ await broker_2.startup()
+
+ # Запускаем ожидание первого сообщения у обоих слушателей до публикации,
+ # чтобы оба гарантированно получили NOTIFY.
+ task_1: asyncio.Task = asyncio.create_task(broker.listen().__anext__())
+ task_2: asyncio.Task = asyncio.create_task(broker_2.listen().__anext__())
+
+ await broker.kick(message)
+
+ done, _ = await asyncio.wait(
+ {task_1, task_2},
+ timeout=5.0,
+ return_when=asyncio.FIRST_COMPLETED,
+ )
+
+ # Then: только один слушатель получает сообщение
+ assert len(done) == 1, "Ровно один воркер должен получить сообщение"
+ winner_task: asyncio.Task = next(iter(done))
+ ack_message = tp.cast("tp.Any", winner_task.result())
+
+ # До подтверждения проверяем, что статус в таблице = 'processing'
+ row = await connection.fetchrow(
+ f"SELECT id, status FROM {table_name} WHERE task_id = $1",
+ task_id,
+ )
+ assert row is not None, "Сообщение должно существовать в таблице"
+ assert row["status"] == "processing", "Сообщение должно быть помечено как processing после claim"
+
+ # Подтверждаем обработку победившим воркером
+ await ack_message.ack()
+
+ # И проверяем, что запись удалена
+ cnt: int = tp.cast(
+ "int",
+ await connection.fetchval(
+ f"SELECT COUNT(*) FROM {table_name} WHERE task_id = $1",
+ task_id,
+ ),
+ )
+ assert cnt == 0, "Запись должна быть удалена после ack"
diff --git a/tests/integration/test_broker_single_delivery.py b/tests/integration/test_broker_single_delivery.py
deleted file mode 100644
index 960e390..0000000
--- a/tests/integration/test_broker_single_delivery.py
+++ /dev/null
@@ -1,101 +0,0 @@
-from __future__ import annotations
-
-import asyncio
-import typing as tp
-import uuid
-from contextlib import suppress
-
-import asyncpg
-import pytest
-from taskiq import BrokerMessage
-
-from taskiq_pg.asyncpg import AsyncpgBroker
-from taskiq_pg.psqlpy import PSQLPyBroker
-
-
-@pytest.mark.integration
-@pytest.mark.parametrize(
- "broker_class",
- [
- AsyncpgBroker,
- PSQLPyBroker,
- ],
-)
-async def test_when_two_workers_listen__then_single_message_processed_once(
- pg_dsn: str,
- broker_class: type[AsyncpgBroker | PSQLPyBroker],
-) -> None:
- # Given: уникальные имена таблицы и канала, два брокера, одна задача
- table_name: str = f"taskiq_messages_{uuid.uuid4().hex}"
- channel_name: str = f"taskiq_channel_{uuid.uuid4().hex}"
- task_id: str = uuid.uuid4().hex
-
- broker1 = broker_class(dsn=pg_dsn, table_name=table_name, channel_name=channel_name)
- broker2 = broker_class(dsn=pg_dsn, table_name=table_name, channel_name=channel_name)
-
- # Подключение для проверок состояния в таблице
- conn: asyncpg.Connection = await asyncpg.connect(dsn=pg_dsn)
-
- # Сообщение для публикации
- message: BrokerMessage = BrokerMessage(
- task_id=task_id,
- task_name="example:best_task_ever",
- message=b'{"hello":"world"}',
- labels={},
- )
-
- # When: стартуем брокеры и два слушателя, публикуем одно сообщение
- await broker1.startup()
- await broker2.startup()
-
- agen1 = broker1.listen()
- agen2 = broker2.listen()
-
- # Запускаем ожидание первого сообщения у обоих слушателей до публикации,
- # чтобы оба гарантированно получили NOTIFY.
- t1: asyncio.Task = asyncio.create_task(agen1.__anext__())
- t2: asyncio.Task = asyncio.create_task(agen2.__anext__())
-
- try:
- await broker1.kick(message)
-
- done, _ = await asyncio.wait(
- {t1, t2},
- timeout=5.0,
- return_when=asyncio.FIRST_COMPLETED,
- )
-
- # Then: только один слушатель получает сообщение
- assert len(done) == 1, "Ровно один воркер должен получить сообщение"
- winner_task: asyncio.Task = next(iter(done))
- ack_message = tp.cast("tp.Any", winner_task.result())
-
- # До подтверждения проверяем, что статус в таблице = 'processing'
- row = await conn.fetchrow(
- f"SELECT id, status FROM {table_name} WHERE task_id = $1",
- task_id,
- )
- assert row is not None, "Сообщение должно существовать в таблице"
- assert row["status"] == "processing", "Сообщение должно быть помечено как processing после claim"
-
- # Подтверждаем обработку победившим воркером
- await ack_message.ack()
-
- # И проверяем, что запись удалена
- cnt: int = tp.cast(
- "int",
- await conn.fetchval(
- f"SELECT COUNT(*) FROM {table_name} WHERE task_id = $1",
- task_id,
- ),
- )
- assert cnt == 0, "Запись должна быть удалена после ack"
- finally:
- with suppress(Exception):
- await broker1.shutdown()
- await broker2.shutdown()
-
- try:
- await conn.execute(f"DROP TABLE IF EXISTS {table_name}")
- finally:
- await conn.close()
diff --git a/tests/integration/test_result_backend.py b/tests/integration/test_result_backend.py
index 24763a5..3a6c943 100644
--- a/tests/integration/test_result_backend.py
+++ b/tests/integration/test_result_backend.py
@@ -6,9 +6,10 @@
import asyncpg
import pytest
-from taskiq_pg.aiopg.result_backend import AiopgResultBackend
-from taskiq_pg.asyncpg.result_backend import AsyncpgResultBackend
-from taskiq_pg.psqlpy.result_backend import PSQLPyResultBackend
+from taskiq_pg.aiopg import AiopgResultBackend
+from taskiq_pg.asyncpg import AsyncpgResultBackend
+from taskiq_pg.psqlpy import PSQLPyResultBackend
+from taskiq_pg.psycopg import PsycopgResultBackend
@pytest.mark.integration
@@ -18,6 +19,7 @@
AsyncpgResultBackend,
AiopgResultBackend,
PSQLPyResultBackend,
+ PsycopgResultBackend,
],
)
@pytest.mark.parametrize(
@@ -30,7 +32,7 @@
)
async def test_when_startup_called__then_table_is_created(
pg_dsn: str,
- result_backend_class: type[AsyncpgResultBackend | AiopgResultBackend | PSQLPyResultBackend],
+ result_backend_class: type[AsyncpgResultBackend | AiopgResultBackend | PSQLPyResultBackend | PsycopgResultBackend],
field_type: tp.Literal["VarChar", "Text", "Uuid"],
expected_pg_type: str,
) -> None:
diff --git a/tests/integration/test_schedule_source.py b/tests/integration/test_schedule_source.py
index 62bfb7a..0179d9f 100644
--- a/tests/integration/test_schedule_source.py
+++ b/tests/integration/test_schedule_source.py
@@ -1,52 +1,56 @@
from __future__ import annotations
-import json
import typing as tp
-import uuid
+from contextlib import asynccontextmanager
+from datetime import timedelta
-import asyncpg
import pytest
+from sqlalchemy_utils.types.enriched_datetime.arrow_datetime import datetime
from taskiq_pg.aiopg import AiopgScheduleSource
from taskiq_pg.asyncpg import AsyncpgScheduleSource
from taskiq_pg.psqlpy import PSQLPyBroker, PSQLPyScheduleSource
+from taskiq_pg.psycopg import PsycopgScheduleSource
if tp.TYPE_CHECKING:
from taskiq import ScheduledTask
+ from taskiq_pg._internal import BasePostgresBroker, BasePostgresScheduleSource
-@pytest.mark.integration
-@pytest.mark.parametrize(
- "schedule_source_class",
- [
- PSQLPyScheduleSource,
- AiopgScheduleSource,
- AsyncpgScheduleSource,
- ],
-)
-async def test_when_broker_has_tasks_with_schedule_labels__then_startup_persists_schedules_in_db(
- pg_dsn: str,
- schedule_source_class: type[PSQLPyScheduleSource | AiopgScheduleSource | AsyncpgScheduleSource],
-) -> None:
- # Given: unique table, broker with a task having schedule labels, schedule source
- table_name: str = f"taskiq_schedules_{uuid.uuid4().hex}"
+
+@asynccontextmanager
+async def _get_schedule_source(
+ schedule_source_class: type[BasePostgresScheduleSource],
+ broker: BasePostgresBroker,
+ dsn: str,
+):
+ schedule_source = schedule_source_class(broker, dsn)
+ try:
+ yield schedule_source
+ finally:
+ await schedule_source.shutdown()
+
+
+@pytest.fixture
+def broker_with_scheduled_tasks(pg_dsn: str) -> PSQLPyBroker:
+ """Test broker with two tasks: one with one schedule and second with two schedules."""
broker = PSQLPyBroker(dsn=pg_dsn)
@broker.task(
- task_name="tests:scheduled_task",
+ task_name="tests:two_schedules",
schedule=[
{
- "cron": "*/5 * * * *",
- "cron_offset": None,
+ "cron": "*/10 * * * *",
+ "cron_offset": "Europe/Berlin",
"time": None,
- "args": [1, 2],
- "kwargs": {"a": "b"},
- "labels": {"source": "test"},
+ "args": [42],
+ "kwargs": {"x": 10},
+ "labels": {"foo": "bar"},
},
{
- "cron": "0 0 * * *",
- "cron_offset": None,
+ "cron": "0 1 * * *",
+ "cron_offset": timedelta(hours=1),
"time": None,
"args": [],
"kwargs": {},
@@ -54,43 +58,46 @@ async def test_when_broker_has_tasks_with_schedule_labels__then_startup_persists
},
],
)
- async def scheduled_task() -> None: # noqa: ARG001
+ async def _two_schedules() -> None:
return None
- source = schedule_source_class(dsn=pg_dsn, broker=broker, table_name=table_name)
- conn: asyncpg.Connection = await asyncpg.connect(dsn=pg_dsn)
+ @broker.task(
+ task_name="tests:one_schedule",
+ schedule=[
+ {
+ "cron_offset": None,
+ "time": datetime(2024, 1, 1, 12, 0, 0),
+ "args": [],
+ "kwargs": {},
+ "labels": {},
+ },
+ ],
+ )
+ async def _one_schedule() -> None:
+ return None
- try:
- # When: starting schedule source should truncate table and insert schedules from broker labels
- await source.startup()
-
- # Then: schedules are persisted in DB
- cnt: int = tp.cast("int", await conn.fetchval(f"SELECT COUNT(*) FROM {table_name}"))
- assert cnt == 2
-
- rows = await conn.fetch(
- f"SELECT task_name, schedule FROM {table_name} ORDER BY task_name, schedule->>'cron'",
- )
- assert len(rows) == 2 # noqa: PLR2004
- assert all(r["task_name"] == "tests:scheduled_task" for r in rows)
-
- first_schedule = json.loads(tp.cast("str", rows[0]["schedule"]))
- assert first_schedule["cron"] == "0 0 * * *"
- assert first_schedule["args"] == []
- assert first_schedule["kwargs"] == {}
- assert first_schedule["labels"] == {}
-
- second_schedule = json.loads(tp.cast("str", rows[1]["schedule"]))
- assert second_schedule["cron"] == "*/5 * * * *"
- assert second_schedule["args"] == [1, 2]
- assert second_schedule["kwargs"] == {"a": "b"}
- assert second_schedule["labels"] == {"source": "test"}
- finally:
- await source.shutdown()
- try:
- await conn.execute(f"DROP TABLE IF EXISTS {table_name}")
- finally:
- await conn.close()
+ @broker.task(
+ task_name="tests:without_schedule",
+ )
+ async def _without_schedule() -> None:
+ return None
+
+ @broker.task(task_name="tests:invalid_schedule", schedule={})
+ async def _invalid_schedule() -> None:
+ return None
+
+ @broker.task(
+ task_name="tests:invalid_schedule_2",
+ schedule=[
+ {
+ "invalid": "data",
+ }
+ ],
+ )
+ async def _invalid_schedule_2() -> None:
+ return None
+
+ return broker
@pytest.mark.integration
@@ -100,81 +107,23 @@ async def scheduled_task() -> None: # noqa: ARG001
PSQLPyScheduleSource,
AiopgScheduleSource,
AsyncpgScheduleSource,
+ PsycopgScheduleSource,
],
)
-async def test_when_schedule_rows_exist_in_db__then_get_schedules_returns_scheduled_tasks(
+async def test_when_labels_contain_schedules__then_get_schedules_returns_scheduled_tasks(
pg_dsn: str,
+ broker_with_scheduled_tasks: PSQLPyBroker,
schedule_source_class: type[PSQLPyScheduleSource | AiopgScheduleSource | AsyncpgScheduleSource],
) -> None:
- # Given: unique table, started schedule source with empty broker schedules, and rows inserted directly into DB
- table_name: str = f"taskiq_schedules_{uuid.uuid4().hex}"
- broker = PSQLPyBroker(dsn=pg_dsn)
- source = schedule_source_class(dsn=pg_dsn, broker=broker, table_name=table_name)
- conn: asyncpg.Connection = await asyncpg.connect(dsn=pg_dsn)
-
- try:
- # Create table and ensure it's empty by starting up the source
- await source.startup()
-
- schedule_id1 = uuid.uuid4()
- schedule_id2 = uuid.uuid4()
-
- schedule_payload1 = {
- "cron": "*/10 * * * *",
- "cron_offset": None,
- "time": None,
- "args": [42],
- "kwargs": {"x": 10},
- "labels": {"foo": "bar"},
- }
- schedule_payload2 = {
- "cron": "0 1 * * *",
- "cron_offset": None,
- "time": None,
- "args": [],
- "kwargs": {},
- "labels": {},
- }
-
- await conn.execute(
- f"INSERT INTO {table_name} (id, task_name, schedule) VALUES ($1, $2, $3)",
- str(schedule_id1),
- "tests:other_task",
- json.dumps(schedule_payload1),
- )
- await conn.execute(
- f"INSERT INTO {table_name} (id, task_name, schedule) VALUES ($1, $2, $3)",
- str(schedule_id2),
- "tests:other_task",
- json.dumps(schedule_payload2),
- )
-
- # When: fetching schedules via schedule source
- schedules: list[ScheduledTask] = await source.get_schedules()
-
- # Then: schedules are correctly deserialized and returned
- assert len(schedules) == 2 # noqa: PLR2004
-
- ids = {s.schedule_id for s in schedules}
- assert str(schedule_id1) in ids
- assert str(schedule_id2) in ids
-
- s1 = next(s for s in schedules if s.schedule_id == str(schedule_id1))
- assert s1.task_name == "tests:other_task"
- assert s1.args == [42]
- assert s1.kwargs == {"x": 10}
- assert s1.labels == {"foo": "bar"}
- assert s1.cron == "*/10 * * * *"
-
- s2 = next(s for s in schedules if s.schedule_id == str(schedule_id2))
- assert s2.task_name == "tests:other_task"
- assert s2.args == []
- assert s2.kwargs == {}
- assert s2.labels == {}
- assert s2.cron == "0 1 * * *"
- finally:
- await source.shutdown()
- try:
- await conn.execute(f"DROP TABLE IF EXISTS {table_name}")
- finally:
- await conn.close()
+ # When
+ async with _get_schedule_source(schedule_source_class, broker_with_scheduled_tasks, pg_dsn) as schedule_source:
+ await schedule_source.startup()
+ schedules: list[ScheduledTask] = await schedule_source.get_schedules()
+
+ # Then
+ assert len(schedules) == 3
+ assert {item.cron for item in schedules} == {"*/10 * * * *", "0 1 * * *", None}
+ assert {item.cron_offset for item in schedules} == {None, "Europe/Berlin", "PT1H"}
+ assert {item.task_name for item in schedules} == {"tests:one_schedule", "tests:two_schedules"}
+ assert {item.time for item in schedules} == {datetime(2024, 1, 1, 12, 0, 0), None}
+ assert all(item.schedule_id is not None for item in schedules)
diff --git a/uv.lock b/uv.lock
index 81df24f..21e73c9 100644
--- a/uv.lock
+++ b/uv.lock
@@ -320,6 +320,50 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/f7/ec/67fbef5d497f86283db54c22eec6f6140243aae73265799baaaa19cd17fb/ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", size = 11034, upload-time = "2022-05-02T15:47:14.552Z" },
]
+[[package]]
+name = "greenlet"
+version = "3.2.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/03/b8/704d753a5a45507a7aab61f18db9509302ed3d0a27ac7e0359ec2905b1a6/greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d", size = 188260, upload-time = "2025-08-07T13:24:33.51Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7d/ed/6bfa4109fcb23a58819600392564fea69cdc6551ffd5e69ccf1d52a40cbc/greenlet-3.2.4-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8c68325b0d0acf8d91dde4e6f930967dd52a5302cd4062932a6b2e7c2969f47c", size = 271061, upload-time = "2025-08-07T13:17:15.373Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/fc/102ec1a2fc015b3a7652abab7acf3541d58c04d3d17a8d3d6a44adae1eb1/greenlet-3.2.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:94385f101946790ae13da500603491f04a76b6e4c059dab271b3ce2e283b2590", size = 629475, upload-time = "2025-08-07T13:42:54.009Z" },
+ { url = "https://files.pythonhosted.org/packages/c5/26/80383131d55a4ac0fb08d71660fd77e7660b9db6bdb4e8884f46d9f2cc04/greenlet-3.2.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f10fd42b5ee276335863712fa3da6608e93f70629c631bf77145021600abc23c", size = 640802, upload-time = "2025-08-07T13:45:25.52Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/7c/e7833dbcd8f376f3326bd728c845d31dcde4c84268d3921afcae77d90d08/greenlet-3.2.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c8c9e331e58180d0d83c5b7999255721b725913ff6bc6cf39fa2a45841a4fd4b", size = 636703, upload-time = "2025-08-07T13:53:12.622Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/49/547b93b7c0428ede7b3f309bc965986874759f7d89e4e04aeddbc9699acb/greenlet-3.2.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58b97143c9cc7b86fc458f215bd0932f1757ce649e05b640fea2e79b54cedb31", size = 635417, upload-time = "2025-08-07T13:18:25.189Z" },
+ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d", size = 584358, upload-time = "2025-08-07T13:18:23.708Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5", size = 1113550, upload-time = "2025-08-07T13:42:37.467Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f", size = 1137126, upload-time = "2025-08-07T13:18:20.239Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c", size = 298654, upload-time = "2025-08-07T13:50:00.469Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" },
+ { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/8f/95d48d7e3d433e6dae5b1682e4292242a53f22df82e6d3dda81b1701a960/greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3", size = 644646, upload-time = "2025-08-07T13:45:26.523Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/5e/405965351aef8c76b8ef7ad370e5da58d57ef6068df197548b015464001a/greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633", size = 640519, upload-time = "2025-08-07T13:53:13.928Z" },
+ { url = "https://files.pythonhosted.org/packages/25/5d/382753b52006ce0218297ec1b628e048c4e64b155379331f25a7316eb749/greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079", size = 639707, upload-time = "2025-08-07T13:18:27.146Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" },
+ { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" },
+ { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100, upload-time = "2025-08-07T13:44:12.287Z" },
+ { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" },
+ { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/16/035dcfcc48715ccd345f3a93183267167cdd162ad123cd93067d86f27ce4/greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968", size = 655185, upload-time = "2025-08-07T13:45:27.624Z" },
+ { url = "https://files.pythonhosted.org/packages/31/da/0386695eef69ffae1ad726881571dfe28b41970173947e7c558d9998de0f/greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9", size = 649926, upload-time = "2025-08-07T13:53:15.251Z" },
+ { url = "https://files.pythonhosted.org/packages/68/88/69bf19fd4dc19981928ceacbc5fd4bb6bc2215d53199e367832e98d1d8fe/greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6", size = 651839, upload-time = "2025-08-07T13:18:30.281Z" },
+ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" },
+ { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" },
+ { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" },
+ { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" },
+ { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/0b/bc13f787394920b23073ca3b6c4a7a21396301ed75a655bcb47196b50e6e/greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc", size = 655191, upload-time = "2025-08-07T13:45:29.752Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/d6/6adde57d1345a8d0f14d31e4ab9c23cfe8e2cd39c3baf7674b4b0338d266/greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a", size = 649516, upload-time = "2025-08-07T13:53:16.314Z" },
+ { url = "https://files.pythonhosted.org/packages/7f/3b/3a3328a788d4a473889a2d403199932be55b1b0060f4ddd96ee7cdfcad10/greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504", size = 652169, upload-time = "2025-08-07T13:18:32.861Z" },
+ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" },
+ { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" },
+ { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" },
+]
+
[[package]]
name = "griffe"
version = "1.14.0"
@@ -774,6 +818,82 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b9/bc/2f6e5ba5ca7890c56bfe59b95536557f15ddc17171234edda342e53415c7/psqlpy-0.11.6-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:820fbeac25919fb463de3a50594ee12cd434fa539e483adfda87404dfa602b73", size = 5073593, upload-time = "2025-08-14T17:22:52.906Z" },
]
+[[package]]
+name = "psycopg"
+version = "3.2.10"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+ { name = "tzdata", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a9/f1/0258a123c045afaf3c3b60c22ccff077bceeb24b8dc2c593270899353bd0/psycopg-3.2.10.tar.gz", hash = "sha256:0bce99269d16ed18401683a8569b2c5abd94f72f8364856d56c0389bcd50972a", size = 160380, upload-time = "2025-09-08T09:13:37.775Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4a/90/422ffbbeeb9418c795dae2a768db860401446af0c6768bc061ce22325f58/psycopg-3.2.10-py3-none-any.whl", hash = "sha256:ab5caf09a9ec42e314a21f5216dbcceac528e0e05142e42eea83a3b28b320ac3", size = 206586, upload-time = "2025-09-08T09:07:50.121Z" },
+]
+
+[package.optional-dependencies]
+binary = [
+ { name = "psycopg-binary", marker = "implementation_name != 'pypy'" },
+]
+pool = [
+ { name = "psycopg-pool" },
+]
+
+[[package]]
+name = "psycopg-binary"
+version = "3.2.10"
+source = { registry = "https://pypi.org/simple" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6d/0c/24c3717da5fbbf32c7a01efc4fd2013c29d89bba53c1760c5eb144029341/psycopg_binary-3.2.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:037dc92fc7d3f2adae7680e17216934c15b919d6528b908ac2eb52aecc0addcf", size = 3995298, upload-time = "2025-09-08T09:07:55.239Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/77/b75012e582f7d75213f2fe13c93ad52634c852bf9d7117a2a1d79be389a1/psycopg_binary-3.2.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84f7e8c5e5031db342ae697c2e8fb48cd708ba56990573b33e53ce626445371d", size = 4066585, upload-time = "2025-09-08T09:08:00.813Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/0c/bf1d016d2a957b522c3f2fa09aef04e18f652cdfce40c48459c116737933/psycopg_binary-3.2.10-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a5a81104d88780018005fe17c37fa55b4afbb6dd3c205963cc56c025d5f1cc32", size = 4625245, upload-time = "2025-09-08T09:08:05.295Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/89/42bd027fcd1da82d4828d203dfee4c0aba9412c4685d4b47ef098061f0df/psycopg_binary-3.2.10-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:0c23e88e048bbc33f32f5a35981707c9418723d469552dd5ac4e956366e58492", size = 4721755, upload-time = "2025-09-08T09:08:11.246Z" },
+ { url = "https://files.pythonhosted.org/packages/86/3e/6359d3d57a13a3a556635f76fb26f45d3377a6d4be23d45824525c2a67a6/psycopg_binary-3.2.10-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9c9f2728488ac5848acdbf14bb4fde50f8ba783cbf3c19e9abd506741389fa7f", size = 4406209, upload-time = "2025-09-08T09:08:18.172Z" },
+ { url = "https://files.pythonhosted.org/packages/86/bf/0b25d8d5b2b67ea558e133c2ab7f22c0b4602956dd23b0d34485e44e8311/psycopg_binary-3.2.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ab1c6d761c4ee581016823dcc02f29b16ad69177fcbba88a9074c924fc31813e", size = 3881122, upload-time = "2025-09-08T09:08:25.116Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/6e/ee6bf664b16a759d22c4fc3c3d89eb15ff98d0feb3f487de5f4acde3014e/psycopg_binary-3.2.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a024b3ee539a475cbc59df877c8ecdd6f8552a1b522b69196935bc26dc6152fb", size = 3562815, upload-time = "2025-09-08T09:08:31.046Z" },
+ { url = "https://files.pythonhosted.org/packages/79/33/1cc4266b5d1c04f873a7fee8b92fa25ad690d2fcdfb5aecdfc2ea42c81a7/psycopg_binary-3.2.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:50130c0d1a2a01ec3d41631df86b6c1646c76718be000600a399dc1aad80b813", size = 3604842, upload-time = "2025-09-08T09:08:36.771Z" },
+ { url = "https://files.pythonhosted.org/packages/4a/f8/7db03368fc36daa5f3ae609696b5a91976878b62bf95310ba1e6c93d81df/psycopg_binary-3.2.10-cp310-cp310-win_amd64.whl", hash = "sha256:7fa1626225a162924d2da0ff4ef77869f7a8501d320355d2732be5bf2dda6138", size = 2886848, upload-time = "2025-09-08T09:08:42.906Z" },
+ { url = "https://files.pythonhosted.org/packages/df/8c/f15bd09a0cc09f010c1462f1cb846d7d2706f0f6226ef8e953328243edcc/psycopg_binary-3.2.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db0eb06a19e4c64a08db0db80875ede44939af6a2afc281762c338fad5d6e547", size = 4002654, upload-time = "2025-09-08T09:08:49.779Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/df/9b7c9db70b624b96544560d062c27030a817e932f1fa803b58e25b26dcdd/psycopg_binary-3.2.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d922fdd49ed17c558b6b2f9ae2054c3d0cced2a34e079ce5a41c86904d0203f7", size = 4074650, upload-time = "2025-09-08T09:08:57.53Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/32/7aba5874e1dfd90bc3dcd26dd9200ae65e1e6e169230759dad60139f1b99/psycopg_binary-3.2.10-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d557a94cd6d2e775b3af6cc0bd0ff0d9d641820b5cc3060ccf1f5ca2bf971217", size = 4630536, upload-time = "2025-09-08T09:09:03.492Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/b1/a430d08b4eb28dc534181eb68a9c2a9e90b77c0e2933e338790534e7dce0/psycopg_binary-3.2.10-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:29b6bb87959515bc8b6abef10d8d23a9a681f03e48e9f0c8adb4b9fb7fa73f11", size = 4728387, upload-time = "2025-09-08T09:09:08.909Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/d4/26d0fa9e8e7c05f0338024d2822a3740fac6093999443ad54e164f154bcc/psycopg_binary-3.2.10-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1b29285474e3339d0840e1b5079fdb0481914108f92ec62de0c87ae333c60b24", size = 4413805, upload-time = "2025-09-08T09:09:13.704Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/f2/d05c037c02e2ac4cb1c5b895c6c82428b3eaa0c48d08767b771bc2ea155a/psycopg_binary-3.2.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:62590dd113d10cd9c08251cb80b32e2e8aaf01ece04a700322e776b1d216959f", size = 3886830, upload-time = "2025-09-08T09:09:18.102Z" },
+ { url = "https://files.pythonhosted.org/packages/8f/84/db3dee4335cd80c56e173a5ffbda6d17a7a10eeed030378d9adf3ab19ea7/psycopg_binary-3.2.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:764a5b9b40ad371c55dfdf95374d89e44a82fd62272d4fceebea0adb8930e2fb", size = 3568543, upload-time = "2025-09-08T09:09:22.765Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/45/4117274f24b8d49b8a9c1cb60488bb172ac9e57b8f804726115c332d16f8/psycopg_binary-3.2.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bd3676a04970cf825d2c771b0c147f91182c5a3653e0dbe958e12383668d0f79", size = 3610614, upload-time = "2025-09-08T09:09:27.534Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/22/f1b294dfc8af32a96a363aa99c0ebb530fc1c372a424c54a862dcf77ef47/psycopg_binary-3.2.10-cp311-cp311-win_amd64.whl", hash = "sha256:646048f46192c8d23786cc6ef19f35b7488d4110396391e407eca695fdfe9dcd", size = 2888340, upload-time = "2025-09-08T09:09:32.696Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/34/91c127fdedf8b270b1e3acc9f849d07ee8b80194379590c6f48dcc842924/psycopg_binary-3.2.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1dee2f4d2adc9adacbfecf8254bd82f6ac95cff707e1b9b99aa721cd1ef16b47", size = 3983963, upload-time = "2025-09-08T09:09:38.454Z" },
+ { url = "https://files.pythonhosted.org/packages/1e/03/1d10ce2bf70cf549a8019639dc0c49be03e41092901d4324371a968b8c01/psycopg_binary-3.2.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8b45e65383da9c4a42a56f817973e521e893f4faae897fe9f1a971f9fe799742", size = 4069171, upload-time = "2025-09-08T09:09:44.395Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/5e/39cb924d6e119145aa5fc5532f48e79c67e13a76675e9366c327098db7b5/psycopg_binary-3.2.10-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:484d2b1659afe0f8f1cef5ea960bb640e96fa864faf917086f9f833f5c7a8034", size = 4610780, upload-time = "2025-09-08T09:09:53.073Z" },
+ { url = "https://files.pythonhosted.org/packages/20/05/5a1282ebc4e39f5890abdd4bb7edfe9d19e4667497a1793ad288a8b81826/psycopg_binary-3.2.10-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:3bb4046973264ebc8cb7e20a83882d68577c1f26a6f8ad4fe52e4468cd9a8eee", size = 4700479, upload-time = "2025-09-08T09:09:58.183Z" },
+ { url = "https://files.pythonhosted.org/packages/af/7a/e1c06e558ca3f37b7e6b002e555ebcfce0bf4dee6f3ae589a7444e16ce17/psycopg_binary-3.2.10-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:14bcbcac0cab465d88b2581e43ec01af4b01c9833e663f1352e05cb41be19e44", size = 4391772, upload-time = "2025-09-08T09:10:04.406Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/d6/56f449c86988c9a97dc6c5f31d3689cfe8aedb37f2a02bd3e3882465d385/psycopg_binary-3.2.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:70bb7f665587dfd79e69f48b34efe226149454d7aab138ed22d5431d703de2f6", size = 3858214, upload-time = "2025-09-08T09:10:09.693Z" },
+ { url = "https://files.pythonhosted.org/packages/93/56/f9eed67c9a1701b1e315f3687ff85f2f22a0a7d0eae4505cff65ef2f2679/psycopg_binary-3.2.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d2fe9eaa367f6171ab1a21a7dcb335eb2398be7f8bb7e04a20e2260aedc6f782", size = 3528051, upload-time = "2025-09-08T09:10:13.423Z" },
+ { url = "https://files.pythonhosted.org/packages/25/cc/636709c72540cb859566537c0a03e46c3d2c4c4c2e13f78df46b6c4082b3/psycopg_binary-3.2.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:299834cce3eec0c48aae5a5207fc8f0c558fd65f2ceab1a36693329847da956b", size = 3580117, upload-time = "2025-09-08T09:10:17.81Z" },
+ { url = "https://files.pythonhosted.org/packages/c1/a8/a2c822fa06b0dbbb8ad4b0221da2534f77bac54332d2971dbf930f64be5a/psycopg_binary-3.2.10-cp312-cp312-win_amd64.whl", hash = "sha256:e037aac8dc894d147ef33056fc826ee5072977107a3fdf06122224353a057598", size = 2878872, upload-time = "2025-09-08T09:10:22.162Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/80/db840f7ebf948ab05b4793ad34d4da6ad251829d6c02714445ae8b5f1403/psycopg_binary-3.2.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:55b14f2402be027fe1568bc6c4d75ac34628ff5442a70f74137dadf99f738e3b", size = 3982057, upload-time = "2025-09-08T09:10:28.725Z" },
+ { url = "https://files.pythonhosted.org/packages/2d/53/39308328bb8388b1ec3501a16128c5ada405f217c6d91b3d921b9f3c5604/psycopg_binary-3.2.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:43d803fb4e108a67c78ba58f3e6855437ca25d56504cae7ebbfbd8fce9b59247", size = 4066830, upload-time = "2025-09-08T09:10:34.083Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/5a/18e6f41b40c71197479468cb18703b2999c6e4ab06f9c05df3bf416a55d7/psycopg_binary-3.2.10-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:470594d303928ab72a1ffd179c9c7bde9d00f76711d6b0c28f8a46ddf56d9807", size = 4610747, upload-time = "2025-09-08T09:10:39.697Z" },
+ { url = "https://files.pythonhosted.org/packages/be/ab/9198fed279aca238c245553ec16504179d21aad049958a2865d0aa797db4/psycopg_binary-3.2.10-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a1d4e4d309049e3cb61269652a3ca56cb598da30ecd7eb8cea561e0d18bc1a43", size = 4700301, upload-time = "2025-09-08T09:10:44.715Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/0d/59024313b5e6c5da3e2a016103494c609d73a95157a86317e0f600c8acb3/psycopg_binary-3.2.10-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a92ff1c2cd79b3966d6a87e26ceb222ecd5581b5ae4b58961f126af806a861ed", size = 4392679, upload-time = "2025-09-08T09:10:49.106Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/47/21ef15d8a66e3a7a76a177f885173d27f0c5cbe39f5dd6eda9832d6b4e19/psycopg_binary-3.2.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac0365398947879c9827b319217096be727da16c94422e0eb3cf98c930643162", size = 3857881, upload-time = "2025-09-08T09:10:56.75Z" },
+ { url = "https://files.pythonhosted.org/packages/af/35/c5e5402ccd40016f15d708bbf343b8cf107a58f8ae34d14dc178fdea4fd4/psycopg_binary-3.2.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:42ee399c2613b470a87084ed79b06d9d277f19b0457c10e03a4aef7059097abc", size = 3531135, upload-time = "2025-09-08T09:11:03.346Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/e2/9b82946859001fe5e546c8749991b8b3b283f40d51bdc897d7a8e13e0a5e/psycopg_binary-3.2.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2028073fc12cd70ba003309d1439c0c4afab4a7eee7653b8c91213064fffe12b", size = 3581813, upload-time = "2025-09-08T09:11:08.76Z" },
+ { url = "https://files.pythonhosted.org/packages/c5/91/c10cfccb75464adb4781486e0014ecd7c2ad6decf6cbe0afd8db65ac2bc9/psycopg_binary-3.2.10-cp313-cp313-win_amd64.whl", hash = "sha256:8390db6d2010ffcaf7f2b42339a2da620a7125d37029c1f9b72dfb04a8e7be6f", size = 2881466, upload-time = "2025-09-08T09:11:14.078Z" },
+]
+
+[[package]]
+name = "psycopg-pool"
+version = "3.2.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/cf/13/1e7850bb2c69a63267c3dbf37387d3f71a00fd0e2fa55c5db14d64ba1af4/psycopg_pool-3.2.6.tar.gz", hash = "sha256:0f92a7817719517212fbfe2fd58b8c35c1850cdd2a80d36b581ba2085d9148e5", size = 29770, upload-time = "2025-02-26T12:03:47.129Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/47/fd/4feb52a55c1a4bd748f2acaed1903ab54a723c47f6d0242780f4d97104d4/psycopg_pool-3.2.6-py3-none-any.whl", hash = "sha256:5887318a9f6af906d041a0b1dc1c60f8f0dda8340c2572b74e10907b51ed5da7", size = 38252, upload-time = "2025-02-26T12:03:45.073Z" },
+]
+
[[package]]
name = "psycopg2-binary"
version = "2.9.10"
@@ -1146,6 +1266,63 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
]
+[[package]]
+name = "sqlalchemy"
+version = "2.0.44"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f0/f2/840d7b9496825333f532d2e3976b8eadbf52034178aac53630d09fe6e1ef/sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22", size = 9819830, upload-time = "2025-10-10T14:39:12.935Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a2/a7/e9ccfa7eecaf34c6f57d8cb0bb7cbdeeff27017cc0f5d0ca90fdde7a7c0d/sqlalchemy-2.0.44-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c77f3080674fc529b1bd99489378c7f63fcb4ba7f8322b79732e0258f0ea3ce", size = 2137282, upload-time = "2025-10-10T15:36:10.965Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/e1/50bc121885bdf10833a4f65ecbe9fe229a3215f4d65a58da8a181734cae3/sqlalchemy-2.0.44-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26ef74ba842d61635b0152763d057c8d48215d5be9bb8b7604116a059e9985", size = 2127322, upload-time = "2025-10-10T15:36:12.428Z" },
+ { url = "https://files.pythonhosted.org/packages/46/f2/a8573b7230a3ce5ee4b961a2d510d71b43872513647398e595b744344664/sqlalchemy-2.0.44-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4a172b31785e2f00780eccab00bc240ccdbfdb8345f1e6063175b3ff12ad1b0", size = 3214772, upload-time = "2025-10-10T15:34:15.09Z" },
+ { url = "https://files.pythonhosted.org/packages/4a/d8/c63d8adb6a7edaf8dcb6f75a2b1e9f8577960a1e489606859c4d73e7d32b/sqlalchemy-2.0.44-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9480c0740aabd8cb29c329b422fb65358049840b34aba0adf63162371d2a96e", size = 3214434, upload-time = "2025-10-10T15:47:00.473Z" },
+ { url = "https://files.pythonhosted.org/packages/ee/a6/243d277a4b54fae74d4797957a7320a5c210c293487f931cbe036debb697/sqlalchemy-2.0.44-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:17835885016b9e4d0135720160db3095dc78c583e7b902b6be799fb21035e749", size = 3155365, upload-time = "2025-10-10T15:34:17.932Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/f8/6a39516ddd75429fd4ee5a0d72e4c80639fab329b2467c75f363c2ed9751/sqlalchemy-2.0.44-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cbe4f85f50c656d753890f39468fcd8190c5f08282caf19219f684225bfd5fd2", size = 3178910, upload-time = "2025-10-10T15:47:02.346Z" },
+ { url = "https://files.pythonhosted.org/packages/43/f0/118355d4ad3c39d9a2f5ee4c7304a9665b3571482777357fa9920cd7a6b4/sqlalchemy-2.0.44-cp310-cp310-win32.whl", hash = "sha256:2fcc4901a86ed81dc76703f3b93ff881e08761c63263c46991081fd7f034b165", size = 2105624, upload-time = "2025-10-10T15:38:15.552Z" },
+ { url = "https://files.pythonhosted.org/packages/61/83/6ae5f9466f8aa5d0dcebfff8c9c33b98b27ce23292df3b990454b3d434fd/sqlalchemy-2.0.44-cp310-cp310-win_amd64.whl", hash = "sha256:9919e77403a483ab81e3423151e8ffc9dd992c20d2603bf17e4a8161111e55f5", size = 2129240, upload-time = "2025-10-10T15:38:17.175Z" },
+ { url = "https://files.pythonhosted.org/packages/e3/81/15d7c161c9ddf0900b076b55345872ed04ff1ed6a0666e5e94ab44b0163c/sqlalchemy-2.0.44-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fe3917059c7ab2ee3f35e77757062b1bea10a0b6ca633c58391e3f3c6c488dd", size = 2140517, upload-time = "2025-10-10T15:36:15.64Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/d5/4abd13b245c7d91bdf131d4916fd9e96a584dac74215f8b5bc945206a974/sqlalchemy-2.0.44-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de4387a354ff230bc979b46b2207af841dc8bf29847b6c7dbe60af186d97aefa", size = 2130738, upload-time = "2025-10-10T15:36:16.91Z" },
+ { url = "https://files.pythonhosted.org/packages/cb/3c/8418969879c26522019c1025171cefbb2a8586b6789ea13254ac602986c0/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3678a0fb72c8a6a29422b2732fe423db3ce119c34421b5f9955873eb9b62c1e", size = 3304145, upload-time = "2025-10-10T15:34:19.569Z" },
+ { url = "https://files.pythonhosted.org/packages/94/2d/fdb9246d9d32518bda5d90f4b65030b9bf403a935cfe4c36a474846517cb/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cf6872a23601672d61a68f390e44703442639a12ee9dd5a88bbce52a695e46e", size = 3304511, upload-time = "2025-10-10T15:47:05.088Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/fb/40f2ad1da97d5c83f6c1269664678293d3fe28e90ad17a1093b735420549/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:329aa42d1be9929603f406186630135be1e7a42569540577ba2c69952b7cf399", size = 3235161, upload-time = "2025-10-10T15:34:21.193Z" },
+ { url = "https://files.pythonhosted.org/packages/95/cb/7cf4078b46752dca917d18cf31910d4eff6076e5b513c2d66100c4293d83/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:70e03833faca7166e6a9927fbee7c27e6ecde436774cd0b24bbcc96353bce06b", size = 3261426, upload-time = "2025-10-10T15:47:07.196Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/3b/55c09b285cb2d55bdfa711e778bdffdd0dc3ffa052b0af41f1c5d6e582fa/sqlalchemy-2.0.44-cp311-cp311-win32.whl", hash = "sha256:253e2f29843fb303eca6b2fc645aca91fa7aa0aa70b38b6950da92d44ff267f3", size = 2105392, upload-time = "2025-10-10T15:38:20.051Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/23/907193c2f4d680aedbfbdf7bf24c13925e3c7c292e813326c1b84a0b878e/sqlalchemy-2.0.44-cp311-cp311-win_amd64.whl", hash = "sha256:7a8694107eb4308a13b425ca8c0e67112f8134c846b6e1f722698708741215d5", size = 2130293, upload-time = "2025-10-10T15:38:21.601Z" },
+ { url = "https://files.pythonhosted.org/packages/62/c4/59c7c9b068e6813c898b771204aad36683c96318ed12d4233e1b18762164/sqlalchemy-2.0.44-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72fea91746b5890f9e5e0997f16cbf3d53550580d76355ba2d998311b17b2250", size = 2139675, upload-time = "2025-10-10T16:03:31.064Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/ae/eeb0920537a6f9c5a3708e4a5fc55af25900216bdb4847ec29cfddf3bf3a/sqlalchemy-2.0.44-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:585c0c852a891450edbb1eaca8648408a3cc125f18cf433941fa6babcc359e29", size = 2127726, upload-time = "2025-10-10T16:03:35.934Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/d5/2ebbabe0379418eda8041c06b0b551f213576bfe4c2f09d77c06c07c8cc5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b94843a102efa9ac68a7a30cd46df3ff1ed9c658100d30a725d10d9c60a2f44", size = 3327603, upload-time = "2025-10-10T15:35:28.322Z" },
+ { url = "https://files.pythonhosted.org/packages/45/e5/5aa65852dadc24b7d8ae75b7efb8d19303ed6ac93482e60c44a585930ea5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:119dc41e7a7defcefc57189cfa0e61b1bf9c228211aba432b53fb71ef367fda1", size = 3337842, upload-time = "2025-10-10T15:43:45.431Z" },
+ { url = "https://files.pythonhosted.org/packages/41/92/648f1afd3f20b71e880ca797a960f638d39d243e233a7082c93093c22378/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0765e318ee9179b3718c4fd7ba35c434f4dd20332fbc6857a5e8df17719c24d7", size = 3264558, upload-time = "2025-10-10T15:35:29.93Z" },
+ { url = "https://files.pythonhosted.org/packages/40/cf/e27d7ee61a10f74b17740918e23cbc5bc62011b48282170dc4c66da8ec0f/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2e7b5b079055e02d06a4308d0481658e4f06bc7ef211567edc8f7d5dce52018d", size = 3301570, upload-time = "2025-10-10T15:43:48.407Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/3d/3116a9a7b63e780fb402799b6da227435be878b6846b192f076d2f838654/sqlalchemy-2.0.44-cp312-cp312-win32.whl", hash = "sha256:846541e58b9a81cce7dee8329f352c318de25aa2f2bbe1e31587eb1f057448b4", size = 2103447, upload-time = "2025-10-10T15:03:21.678Z" },
+ { url = "https://files.pythonhosted.org/packages/25/83/24690e9dfc241e6ab062df82cc0df7f4231c79ba98b273fa496fb3dd78ed/sqlalchemy-2.0.44-cp312-cp312-win_amd64.whl", hash = "sha256:7cbcb47fd66ab294703e1644f78971f6f2f1126424d2b300678f419aa73c7b6e", size = 2130912, upload-time = "2025-10-10T15:03:24.656Z" },
+ { url = "https://files.pythonhosted.org/packages/45/d3/c67077a2249fdb455246e6853166360054c331db4613cda3e31ab1cadbef/sqlalchemy-2.0.44-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ff486e183d151e51b1d694c7aa1695747599bb00b9f5f604092b54b74c64a8e1", size = 2135479, upload-time = "2025-10-10T16:03:37.671Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/91/eabd0688330d6fd114f5f12c4f89b0d02929f525e6bf7ff80aa17ca802af/sqlalchemy-2.0.44-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b1af8392eb27b372ddb783b317dea0f650241cea5bd29199b22235299ca2e45", size = 2123212, upload-time = "2025-10-10T16:03:41.755Z" },
+ { url = "https://files.pythonhosted.org/packages/b0/bb/43e246cfe0e81c018076a16036d9b548c4cc649de241fa27d8d9ca6f85ab/sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b61188657e3a2b9ac4e8f04d6cf8e51046e28175f79464c67f2fd35bceb0976", size = 3255353, upload-time = "2025-10-10T15:35:31.221Z" },
+ { url = "https://files.pythonhosted.org/packages/b9/96/c6105ed9a880abe346b64d3b6ddef269ddfcab04f7f3d90a0bf3c5a88e82/sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b87e7b91a5d5973dda5f00cd61ef72ad75a1db73a386b62877d4875a8840959c", size = 3260222, upload-time = "2025-10-10T15:43:50.124Z" },
+ { url = "https://files.pythonhosted.org/packages/44/16/1857e35a47155b5ad927272fee81ae49d398959cb749edca6eaa399b582f/sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:15f3326f7f0b2bfe406ee562e17f43f36e16167af99c4c0df61db668de20002d", size = 3189614, upload-time = "2025-10-10T15:35:32.578Z" },
+ { url = "https://files.pythonhosted.org/packages/88/ee/4afb39a8ee4fc786e2d716c20ab87b5b1fb33d4ac4129a1aaa574ae8a585/sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1e77faf6ff919aa8cd63f1c4e561cac1d9a454a191bb864d5dd5e545935e5a40", size = 3226248, upload-time = "2025-10-10T15:43:51.862Z" },
+ { url = "https://files.pythonhosted.org/packages/32/d5/0e66097fc64fa266f29a7963296b40a80d6a997b7ac13806183700676f86/sqlalchemy-2.0.44-cp313-cp313-win32.whl", hash = "sha256:ee51625c2d51f8baadf2829fae817ad0b66b140573939dd69284d2ba3553ae73", size = 2101275, upload-time = "2025-10-10T15:03:26.096Z" },
+ { url = "https://files.pythonhosted.org/packages/03/51/665617fe4f8c6450f42a6d8d69243f9420f5677395572c2fe9d21b493b7b/sqlalchemy-2.0.44-cp313-cp313-win_amd64.whl", hash = "sha256:c1c80faaee1a6c3428cecf40d16a2365bcf56c424c92c2b6f0f9ad204b899e9e", size = 2127901, upload-time = "2025-10-10T15:03:27.548Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/5e/6a29fa884d9fb7ddadf6b69490a9d45fded3b38541713010dad16b77d015/sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05", size = 1928718, upload-time = "2025-10-10T15:29:45.32Z" },
+]
+
+[[package]]
+name = "sqlalchemy-utils"
+version = "0.42.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "sqlalchemy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/63/80/4e15fdcfc25a2226122bf316f0ebac86d840ab3fb38b38ca4cabc395865e/sqlalchemy_utils-0.42.0.tar.gz", hash = "sha256:6d1ecd3eed8b941f0faf8a531f5d5cee7cffa2598fcf8163de8c31c7a417a5e0", size = 130531, upload-time = "2025-08-30T18:43:41.904Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/52/86/21e97809b017a4ebc88971eea335130782421851b0ed8dc3ab6126b479f1/sqlalchemy_utils-0.42.0-py3-none-any.whl", hash = "sha256:c8c0b7f00f4734f6f20e9a4d06b39d79d58c8629cba50924fcaeb20e28eb4f48", size = 91744, upload-time = "2025-08-30T18:43:40.199Z" },
+]
+
[[package]]
name = "taskiq"
version = "0.11.18"
@@ -1193,6 +1370,9 @@ asyncpg = [
psqlpy = [
{ name = "psqlpy" },
]
+psycopg = [
+ { name = "psycopg", extra = ["binary", "pool"] },
+]
[package.dev-dependencies]
dev = [
@@ -1205,6 +1385,7 @@ dev = [
{ name = "pytest-asyncio" },
{ name = "pytest-cov" },
{ name = "ruff" },
+ { name = "sqlalchemy-utils" },
]
[package.metadata]
@@ -1212,9 +1393,10 @@ requires-dist = [
{ name = "aiopg", marker = "extra == 'aiopg'", specifier = ">=1.4.0" },
{ name = "asyncpg", marker = "extra == 'asyncpg'", specifier = ">=0.30.0" },
{ name = "psqlpy", marker = "extra == 'psqlpy'", specifier = ">=0.11.6" },
+ { name = "psycopg", extras = ["binary", "pool"], marker = "extra == 'psycopg'", specifier = ">=3.2.10" },
{ name = "taskiq", specifier = ">=0.11.18" },
]
-provides-extras = ["aiopg", "asyncpg", "psqlpy"]
+provides-extras = ["aiopg", "asyncpg", "psqlpy", "psycopg"]
[package.metadata.requires-dev]
dev = [
@@ -1227,6 +1409,7 @@ dev = [
{ name = "pytest-asyncio", specifier = ">=1.1.0" },
{ name = "pytest-cov", specifier = ">=7.0.0" },
{ name = "ruff", specifier = ">=0.14.0" },
+ { name = "sqlalchemy-utils", specifier = ">=0.42.0" },
]
[[package]]
@@ -1289,6 +1472,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125, upload-time = "2025-02-25T17:27:57.754Z" },
]
+[[package]]
+name = "tzdata"
+version = "2025.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" },
+]
+
[[package]]
name = "urllib3"
version = "2.5.0"