diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..8adba64 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,36 @@ +name: CI + +on: + push: + branches: ["**"] + pull_request: + branches: ["**"] + +jobs: + lint-and-test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python 3.12 + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Install uv + uses: astral-sh/setup-uv@v3 + + - name: Install dependencies + run: uv sync --dev + + - name: Ruff lint + run: uv run ruff check . + + - name: Ruff format check + run: uv run ruff format --check . + + - name: Type check with mypy + run: uv run mypy src/open_data_agent --ignore-missing-imports + + - name: Run unit tests (no integration) + run: uv run pytest -m "not integration" --tb=short diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5ecc269 --- /dev/null +++ b/.gitignore @@ -0,0 +1,48 @@ +# Python +__pycache__/ +*.py[cod] +*.pyo +*.pyd +.Python +*.egg-info/ +dist/ +build/ +.venv/ +venv/ +*.egg + +# uv +.uv/ + +# Test / coverage +.pytest_cache/ +.coverage +htmlcov/ +coverage.xml + +# Auto-generated content (never commit) +docs/data-catalog/ +history/ +.opencode/rules/data-agent.md +evals/results/ + +# BMAD framework installation (tool infrastructure, not project code) +_bmad/ +# BMAD generated outputs (all) +_bmad-output/ +# BMAD IDE agent configs (generated from _bmad installation) +.opencode/agents/ +.opencode/commands/ + +# Environment +.env +*.env.local + +# OS +.DS_Store +Thumbs.db + +# IDE +.idea/ +.vscode/ +*.swp diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..e4fba21 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.12 diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..790295c --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,31 @@ +version: "3.8" + +services: + postgres: + image: postgres:16-alpine + environment: + POSTGRES_USER: oda_test + POSTGRES_PASSWORD: oda_test_pass + POSTGRES_DB: oda_test_db + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U oda_test"] + interval: 5s + timeout: 5s + retries: 5 + + mysql: + image: mysql:8.0 + environment: + MYSQL_ROOT_PASSWORD: oda_root_pass + MYSQL_USER: oda_test + MYSQL_PASSWORD: oda_test_pass + MYSQL_DATABASE: oda_test_db + ports: + - "3306:3306" + healthcheck: + test: ["CMD", "mysqladmin", "ping", "-h", "localhost", "-u", "oda_test", "-poda_test_pass"] + interval: 5s + timeout: 5s + retries: 10 diff --git a/docs/architecture.md b/docs/architecture.md new file mode 100644 index 0000000..1ba17ed --- /dev/null +++ b/docs/architecture.md @@ -0,0 +1,774 @@ +# Open Data Agent — Architecture + +> Last updated: 2026-03-09 | Python 3.12 | `uv` project + +--- + +## Table of Contents + +1. [Overview](#overview) +2. [Functional Requirements](#functional-requirements) +3. [Non-Functional Requirements](#non-functional-requirements) +4. [Technology Stack](#technology-stack) +5. [Project Structure](#project-structure) +6. [Component Architecture](#component-architecture) +7. [Data Models](#data-models) +8. [Exception Hierarchy](#exception-hierarchy) +9. [Security Model](#security-model) +10. [Query Safety](#query-safety) +11. [CLI Command Surface](#cli-command-surface) +12. [Data Flows](#data-flows) +13. [Configuration System](#configuration-system) +14. [Dialect Adapter System](#dialect-adapter-system) +15. [Testing Strategy](#testing-strategy) +16. [Development Workflow](#development-workflow) +17. [Coding Conventions](#coding-conventions) + +--- + +## Overview + +Open Data Agent (`oda`) is a local Python CLI tool that bridges natural language and SQL databases. It is designed as a tool layer for AI coding agents (primarily OpenCode) to query databases without memorising schema or writing SQL from scratch. + +**Core loop:** + +``` +User question → AI agent reads schema docs + memory + → agent calls `oda query "SELECT ..."` + → result printed + logged to history + → if zero rows: diagnostic context emitted to stderr for self-correction +``` + +**Key design principles:** + +- **Read-only by default** — hard enforcement; no bypass mechanism +- **Schema docs as context** — agent navigates a generated markdown catalog, not live introspection +- **Self-healing** — zero-row results trigger structured diagnostics the agent can act on +- **Local-first** — no server, no network service; all data stays on disk +- **Single active connection** — one DB at a time, switchable via config + +--- + +## Functional Requirements + +| ID | Requirement | +|----|-------------| +| FR1 | Multi-database connectivity — PostgreSQL, MySQL, SQLite via unified `DialectAdapter` | +| FR2 | Knowledge Layer — auto-generated hierarchical markdown schema catalog | +| FR3 | Memory Layer — curated markdown knowledge files (query patterns, corrections, etc.) | +| FR4 | Tools/CLI Layer — full `oda` command surface for all operations | +| FR5 | History Layer — JSONL auto-log of every executed query | +| FR6 | Evals Layer — dialect-aware golden SQL regression framework | +| FR7 | Agent Configuration — OpenCode rules file rendered from template on `oda connect` | +| FR8 | SQL Dialect Adaptation — `DialectAdapter` ABC with per-DB subclasses | +| FR9 | Query Safety — read-only whitelist, dangerous pattern detection, row limits, timeouts | +| FR10 | Secure Credential Management — `~/.config/open-data-agent/connections.yaml` (`chmod 600`) | +| FR11 | First-Run Onboarding — `oda init` creates config directory and default files | +| FR12 | Self-Healing Diagnostics — structured diagnostic output on zero-row or error results | + +--- + +## Non-Functional Requirements + +| ID | Requirement | +|----|-------------| +| NFR1 | **Security** — no credentials in project directory; atomic `0o600` write; hidden password input | +| NFR2 | **Portability** — macOS, Linux, Windows; no server deployment | +| NFR3 | **Extensibility** — `DialectAdapter` ABC plugin pattern; new adapters without modifying core | +| NFR4 | **Consistency** — schema docs visually identical across all DB types via `NormalizedColumn` | +| NFR5 | **Safety** — `SafetyChecker` runs before every query; no exceptions | +| NFR6 | **IDE-agnosticism** — OpenCode primary for v1; template committed, generated file gitignored | +| NFR7 | **Testability** — in-memory SQLite for unit tests; `@pytest.mark.integration` gates live-DB tests | +| NFR8 | **Code Quality** — type annotations on all signatures; `ruff`; `mypy`; `py.typed`; `__all__` everywhere | + +--- + +## Technology Stack + +| Package | Version | Purpose | +|---------|---------|---------| +| Python | 3.12 | Runtime (pinned via `.python-version`) | +| `click` | 8.3.1 | CLI framework | +| `rich` | 14.3.3 | Terminal formatting (tables, colours) | +| `pyyaml` | latest | YAML parsing (memory files, connections config) | +| `psycopg` | v3 (`[binary]`) | PostgreSQL driver | +| `pymysql` | latest | MySQL driver (pure Python) | +| `sqlite3` | stdlib | SQLite (no install required) | +| `pytest` | 9.0.2 | Test framework | +| `pytest-cov` | latest | Coverage reporting | +| `ruff` | latest | Linting + formatting | +| `mypy` | latest | Static type checking | +| `uv` | latest | Package manager and script runner | + +--- + +## Project Structure + +``` +open-data-agent/ +├── pyproject.toml # Metadata, deps, scripts, ruff + pytest config +├── uv.lock # Locked dependency versions (committed) +├── .python-version # Python 3.12 pin +├── .gitignore +├── docker-compose.yml # PostgreSQL + MySQL for local integration tests +│ +├── .github/ +│ └── workflows/ +│ └── ci.yml # ruff + mypy + pytest (unit only, SQLite in-memory) +│ +├── .opencode/ +│ └── rules/ +│ ├── data-agent.md.template # 6-block rules template (committed) +│ └── data-agent.md # GENERATED by `oda connect` (gitignored) +│ +├── src/ +│ └── open_data_agent/ +│ ├── __init__.py +│ ├── py.typed # PEP 561 typed marker +│ │ +│ ├── cli.py # REGISTRY ONLY — imports + add_command() calls; zero logic +│ ├── cli_init.py # oda init +│ ├── cli_connections.py # oda connect / connections list|add|remove +│ ├── cli_schema.py # oda schemas / tables / describe / sample +│ ├── cli_query.py # oda query +│ ├── cli_docs.py # oda docs generate|status +│ ├── cli_memory.py # oda memory add|list|search +│ ├── cli_history.py # oda history list|search|stats +│ ├── cli_eval.py # oda eval run|results|add +│ │ +│ ├── config.py # Config dataclass + get_config() + get_config_dir() +│ ├── exceptions.py # All custom exceptions and warnings +│ ├── memory.py # MemoryManager +│ ├── history.py # HistoryTracker +│ ├── docs_generator.py # DocGenerator +│ ├── eval_runner.py # EvalRunner +│ │ +│ └── db/ +│ ├── dialect.py # DialectAdapter ABC + SQLite/PostgreSQL/MySQL subclasses +│ ├── connection.py # ConnectionManager +│ ├── safety.py # SafetyChecker +│ ├── query.py # QueryEngine + _strip_literals_and_comments +│ ├── diagnostics.py # DiagnosticEngine +│ └── schema.py # SchemaInspector + NormalizedColumn +│ +├── tests/ +│ ├── conftest.py # Shared fixtures (in-memory SQLite, standard schema) +│ ├── unit/ +│ │ ├── db/ # test_query.py, test_safety.py, test_schema.py +│ │ ├── cli/ # test_cli_*.py (CliRunner-based) +│ │ ├── test_config.py +│ │ ├── test_docs_generator.py +│ │ ├── test_eval_runner.py +│ │ ├── test_history.py +│ │ └── test_memory.py +│ └── integration/ # Requires: docker compose up -d +│ +├── docs/ +│ ├── architecture.md # This document +│ └── data-catalog/ # AUTO-GENERATED by `oda docs generate` (gitignored) +│ +├── memory/ # Curated knowledge files (committed, human-maintained) +│ └── .gitkeep +│ +├── evals/ +│ └── golden_queries.yaml +│ +└── history/ # AUTO-LOGGED JSONL (gitignored) +``` + +**Hard rules on structure:** +- `cli.py` is a registry only — zero business logic +- All tests in `tests/unit/` or `tests/integration/` — never co-located with source +- `docs/data-catalog/` and `history/` are gitignored — never commit generated content +- Every module must define `__all__` + +--- + +## Component Architecture + +### Dependency Graph + +``` +CLI Layer (cli*.py) + │ thin orchestration — calls service layer, formats output with rich + ▼ +Service Layer + ├── config.py ─────────────────────→ ~/.config/open-data-agent/ + ├── db/connection.py ───────────────→ live DB via dialect driver + ├── db/safety.py ───────────────────→ (pure logic, no DB calls) + ├── db/query.py ────────────────────→ db/connection.py + db/safety.py + db/diagnostics.py + ├── db/schema.py ───────────────────→ db/connection.py + db/dialect.py + ├── db/diagnostics.py ──────────────→ db/connection.py (sample queries on zero-row results) + ├── docs_generator.py ──────────────→ db/schema.py → docs/data-catalog/ + ├── memory.py ──────────────────────→ memory/ (flat directory, frontmatter-indexed) + ├── history.py ─────────────────────→ ~/.config/open-data-agent/history.jsonl + └── eval_runner.py ─────────────────→ db/query.py + evals/golden_queries.yaml + +Dialect Layer (db/dialect.py) + ├── PostgreSQLAdapter ──────────────→ psycopg v3 + ├── MySQLAdapter ───────────────────→ pymysql + └── SQLiteAdapter ──────────────────→ sqlite3 (stdlib) + +Agent Runtime (OpenCode IDE) + └── .opencode/rules/data-agent.md → reads docs/data-catalog/ + memory/ + (rendered by oda connect) → invokes oda CLI commands +``` + +### Component Responsibilities + +| Component | Module | Responsibility | +|-----------|--------|----------------| +| `ConnectionManager` | `db/connection.py` | Reads `connections.yaml`, manages active connection state, constructs driver connections | +| `DialectAdapter` | `db/dialect.py` | ABC + per-DB subclasses; all dialect-specific SQL isolated here | +| `SchemaInspector` | `db/schema.py` | Uniform introspection (schemas, tables, columns, sample rows, profile) via adapter | +| `QueryEngine` | `db/query.py` | Safety check → LIMIT injection → timeout-protected execution → history logging | +| `SafetyChecker` | `db/safety.py` | Whitelist + blacklist enforcement; dialect-aware dangerous pattern detection | +| `DiagnosticEngine` | `db/diagnostics.py` | Zero-row analysis: table row counts, column value samples, NULL counts → stderr | +| `DocGenerator` | `docs_generator.py` | Hierarchical markdown catalog from `SchemaInspector`; staleness detection | +| `MemoryManager` | `memory.py` | Add/list/search flat markdown knowledge files with YAML frontmatter | +| `HistoryTracker` | `history.py` | Append/iterate JSONL query log; secret pattern warnings | +| `EvalRunner` | `eval_runner.py` | Load/filter/run/record dialect-aware golden SQL queries | +| `Config` | `config.py` | Layered config resolution (defaults → config.yaml → connection options → CLI flags) | + +--- + +## Data Models + +### `Config` (in `config.py`) + +```python +@dataclass +class Config: + row_limit: int = 1000 # default LIMIT auto-injected on queries + max_row_limit: int = 10000 # hard ceiling; user cannot exceed this + query_timeout_seconds: int = 30 # query execution timeout + docs_staleness_days: int = 7 # warn if schema docs older than this + log_level: str = "INFO" + strict_mode: bool = False # if True: block queries when docs are stale +``` + +### `NormalizedColumn` (in `db/schema.py`) + +```python +@dataclass +class NormalizedColumn: + name: str + data_type: str + is_nullable: bool + default_value: str | None + is_primary_key: bool + ordinal_position: int + comment: str | None = None # PG: pg_description, MySQL: COLUMN_COMMENT, SQLite: None +``` + +All three adapters normalise their introspection results to this schema. `DocGenerator` renders it to uniform markdown. + +### `QueryResult` (in `db/query.py`) + +```python +@dataclass +class QueryResult: + columns: list[str] + rows: list[tuple[object, ...]] + row_count: int + duration_ms: float + sql: str # final SQL after LIMIT injection + truncated: bool # True when a LIMIT was auto-applied + limit_applied: int # the effective LIMIT value + error: str | None # populated on failure; None on success +``` + +### JSONL History Record + +Every query is appended to `history.jsonl` in this exact schema: + +```json +{ + "id": "uuid4-string", + "timestamp": "2026-03-09T14:30:00+00:00", + "connection": "my-postgres", + "db_type": "postgresql", + "sql": "SELECT * FROM orders LIMIT 1000", + "tables": ["orders"], + "row_count": 42, + "duration_ms": 12.5, + "question": "Show me recent orders", + "error": null +} +``` + +### Memory File Frontmatter + +```yaml +--- +title: "Revenue uses net_item_price not item_price" +category: data_quality # one of: data_quality | query_pattern | business_context | correction +created_at: "2026-03-09T14:00:00+00:00" +tags: [revenue, columns] +--- +``` + +### Schema Doc Frontmatter + +```yaml +--- +table: orders +schema: public +full_path: "public.orders" +column_count: 12 +generated_at: "2026-03-09T14:00:00+00:00" +db_type: postgresql +--- +``` + +### `golden_queries.yaml` Entry + +```yaml +- id: sales-001 + question: "Total sales last 30 days" + sql: "SELECT SUM(amount) FROM orders WHERE created_at >= ..." + dialects: [postgresql, sqlite] + active: true +``` + +ID format: `{domain}-{NNN}` (e.g. `sales-001`, `order-items-042`). IDs are permanent; inactive entries use `active: false`. + +--- + +## Exception Hierarchy + +All exceptions are defined in `exceptions.py`. No other module defines its own exceptions. + +```python +OdaError(Exception) # base class for all oda errors +├── ConnectionError # DB connection failure +├── SafetyError # SQL blocked by safety checker +├── ConfigError # missing or invalid configuration +├── DialectError # unsupported adapter operation +└── EvalFailure # golden query produced unexpected results + +StaleDocsWarning(UserWarning) # schema docs older than staleness threshold +``` + +--- + +## Security Model + +### Credential Storage + +- Stored at `~/.config/open-data-agent/connections.yaml` +- Written atomically with `os.open(path, O_CREAT | O_WRONLY | O_TRUNC, 0o600)` — permissions set at creation, before any data is written +- Never in the project directory +- Password input via `click.prompt('Password', hide_input=True)` — never echoed + +### Active Connection State + +- Persisted in `~/.config/open-data-agent/active-connection` (single-line, connection name only) +- Survives shell restarts; updated by `oda connect ` + +### DSN Sanitisation + +DSNs are sanitised before logging: +```python +re.sub(r'(:)[^:@]+(@)', r'\1***\2', dsn) +``` + +### Secret Pattern Detection + +`HistoryTracker.append()` logs a `WARNING` if SQL matches any of: +- `password\s*=` +- `secret\s*=` +- `token\s*=` +- `api_key\s*=` + +--- + +## Query Safety + +Every SQL query passes through `SafetyChecker.validate()` before execution. There is no bypass. + +### Whitelist (per-adapter `safe_prefixes`) + +| Adapter | Allowed prefixes | +|---------|-----------------| +| SQLite | `SELECT`, `WITH`, `EXPLAIN`, `PRAGMA` | +| PostgreSQL | `SELECT`, `WITH`, `EXPLAIN`, `SHOW`, `TABLE` | +| MySQL | `SELECT`, `WITH`, `EXPLAIN`, `SHOW`, `DESCRIBE` | + +### Universal Blocked Prefixes + +`INSERT`, `UPDATE`, `DELETE`, `DROP`, `CREATE`, `ALTER`, `TRUNCATE`, `REPLACE`, `MERGE`, `GRANT`, `REVOKE`, `CALL`, `EXEC`, `EXECUTE` + +### Dialect-Specific Dangerous Patterns (regex) + +| Dialect | Patterns | +|---------|----------| +| All | `;\s*DROP`, `;\s*DELETE`, `;\s*UPDATE`, `;\s*INSERT`, `;\s*CREATE`, `;\s*ALTER` | +| PostgreSQL | `COPY\s+\w`, `CREATE\s+EXTENSION`, `ALTER\s+SYSTEM` | +| MySQL | `LOAD\s+DATA`, `INTO\s+OUTFILE`, `\bGRANT\b` | +| SQLite | `ATTACH\s+DATABASE`, `DETACH\s+(DATABASE\s+)?` | + +### LIMIT Injection + +`QueryEngine._inject_limit()` behaviour: +- No LIMIT in SQL → append `\nLIMIT {row_limit}` (default: 1000), set `truncated=True` +- LIMIT present and ≤ `max_row_limit` → pass through unchanged +- LIMIT present and > `max_row_limit` → clamp to `max_row_limit`, set `truncated=True` + +LIMIT detection uses `_strip_literals_and_comments()` which blanks out single-quoted strings, `$$`-dollar-quoted strings (PostgreSQL), `/* */` block comments, and `--` line comments before pattern matching — preventing false positives from `LIMIT` appearing in string literals or comments. + +### Timeout + +Queries execute in a daemon thread. On timeout (`query_timeout_seconds`, default 30s): +- `conn.interrupt()` called (SQLite) +- `conn.cancel()` called (psycopg v3) +- No async cancel available for pymysql (daemon thread reaped at process exit) +- `result.error` set to `"Query timed out after {N}s"` +- `DiagnosticEngine` is **not** invoked on timeout (connection may be mid-query) + +--- + +## CLI Command Surface + +``` +oda init # first-run: create ~/.config dirs + defaults + +oda connect # activate connection + render OpenCode rules file +oda connections list # list all connections (no passwords shown) +oda connections add # interactive prompt +oda connections remove # remove a connection + +oda schemas # list schemas in active DB +oda tables [--schema X] # list tables +oda describe # columns + types +oda sample
[--limit N] # sample rows + +oda query "" [--format table|json|csv] [--strict] + # execute SQL; auto-logged to history + +oda docs generate [--enrich] [--schema X] [--skip-existing] +oda docs status # freshness report + +oda memory add [--title T] [--category C] [--content X] [--tags t1,t2] +oda memory list +oda memory search + +oda history list [--n N] # most recent N entries (default 20) +oda history search +oda history stats + +oda eval run [--domain X] +oda eval results [--last N] +oda eval add --id X --question "..." --sql "..." --dialects postgresql,sqlite +``` + +**Output conventions:** +- Results → `stdout` +- Errors and warnings → `stderr` (`Console(stderr=True)`) +- Success prefix: `[green]✓[/green]` +- Error prefix: `[red]✗[/red]` +- Warning prefix: `[yellow]⚠[/yellow]` +- All tabular output via `rich.table.Table` + +--- + +## Data Flows + +### User Query (end-to-end) + +``` +User natural language question + → OpenCode reads .opencode/rules/data-agent.md + → Agent reads docs/data-catalog/_index.md + → Agent navigates to relevant table docs + → Agent checks memory/ for known corrections + → Agent calls: oda query "SELECT ..." + → cli_query.py resolves active connection + builds adapter + → QueryEngine.execute(sql) + → SafetyChecker.validate(sql) raises SafetyError if blocked + → _inject_limit(sql) adds/clamps LIMIT + → daemon thread executes sql timeout-protected + → HistoryTracker.append_result() writes to history.jsonl + → if error: DiagnosticEngine.diagnose() → stderr (unless timeout) + → if 0 rows: DiagnosticEngine.diagnose() → stderr + → rich.table to stdout + → Agent self-heals using diagnostic context if needed +``` + +### Schema Doc Generation + +``` +oda docs generate + → ConnectionManager → active connection + adapter + → SchemaInspector.get_schemas() adapter.introspect_schemas_sql() + → for each schema: + SchemaInspector.get_tables(schema) adapter.introspect_tables_sql(schema) + for each table: + SchemaInspector.get_columns() adapter.introspect_columns_sql() + adapter.normalize_column_row() → NormalizedColumn + SchemaInspector.get_sample() SELECT n+1 rows; truncated=True if >n exist + [if --enrich] SchemaInspector.get_profile() + DocGenerator._render_table_doc() → docs/data-catalog/{schema}/{table}.md + DocGenerator._write_schema_index() → docs/data-catalog/{schema}/_index.md + → DocGenerator._write_catalog_index() → docs/data-catalog/_index.md (with generated_at) +``` + +### `oda connect` (renders rules file) + +``` +oda connect + → ConnectionManager.verify_connection(name) test live connection + → write ~/.config/open-data-agent/active-connection + → render .opencode/rules/data-agent.md.template + substituting {connection_name} and {db_type} + → write .opencode/rules/data-agent.md (gitignored) + → check docs/data-catalog/_index.md staleness warn if stale or missing + → rich success output +``` + +--- + +## Configuration System + +Configuration resolves in four layers (later overrides earlier): + +| Layer | Source | Typical use | +|-------|--------|-------------| +| 1 | Hardcoded defaults | `row_limit=1000`, `timeout=30s` | +| 2 | `~/.config/open-data-agent/config.yaml` | User global preferences | +| 3 | Per-connection `options:` block in `connections.yaml` | Connection-specific overrides | +| 4 | CLI flags | Runtime one-offs | + +Always access via: +```python +from open_data_agent.config import get_config +config = get_config() +``` + +Never read YAML config files directly from other modules. + +Config directory layout: + +| Path | Purpose | Permissions | +|------|---------|-------------| +| `~/.config/open-data-agent/connections.yaml` | Named DB connections | `0o600` | +| `~/.config/open-data-agent/active-connection` | Current active connection name | standard | +| `~/.config/open-data-agent/config.yaml` | Global preferences | standard | +| `~/.config/open-data-agent/history.jsonl` | Query history log | standard | + +--- + +## Dialect Adapter System + +`DialectAdapter` (ABC in `db/dialect.py`) is the highest-complexity component. All dialect-specific SQL must live inside adapter subclasses — never inline in other modules. + +### Abstract Interface + +```python +class DialectAdapter(ABC): + # Quoting + def quote_identifier(self, name: str) -> str: ... + + # Date/Time SQL fragments + def date_trunc(self, field: str, column: str) -> str: ... + def current_timestamp(self) -> str: ... + def interval(self, n: int, unit: str) -> str: ... + + # Safety + @property + def safe_prefixes(self) -> list[str]: ... + @property + def dangerous_patterns(self) -> list[str]: ... + + # Introspection + def introspect_schemas_sql(self) -> str: ... + def introspect_tables_sql(self, schema: str) -> str: ... + def introspect_columns_sql(self, schema: str, table: str) -> str: ... + def normalize_column_row(self, row: dict[str, Any]) -> NormalizedColumn: ... +``` + +### Adapter Implementations + +| Adapter | Driver | Quote style | Schema source | Special notes | +|---------|--------|-------------|---------------|---------------| +| `SQLiteAdapter` | `sqlite3` (stdlib) | `"name"` | `sqlite_master` | Uses `PRAGMA table_info()`; no named schemas (returns `main`) | +| `PostgreSQLAdapter` | `psycopg` v3 | `"name"` | `information_schema` + `pg_catalog` | `autocommit=True` required; column comments from `pg_description` | +| `MySQLAdapter` | `pymysql` | `` `name` `` | `information_schema` | `COLUMN_COMMENT` mapped to `None` when empty string | + +### Introspection SQL Pattern + +`SchemaInspector` calls adapter methods to get SQL, then passes bind parameters separately to prevent SQL injection: + +```python +# PostgreSQL / MySQL: bind params prevent schema/table name injection +cursor = conn.execute(adapter.introspect_tables_sql(schema), (schema,)) + +# SQLite: PRAGMA executed directly via adapter helper (no bind param API) +columns = adapter.execute_introspect_columns(conn, schema, table) +``` + +### Extending with a New Adapter + +1. Subclass `DialectAdapter` in `db/dialect.py` +2. Implement all abstract methods +3. Register in `db/connection.py` driver construction block +4. Add to `__all__` in `dialect.py` +5. Add unit tests in `tests/unit/db/` + +--- + +## Testing Strategy + +### Unit Tests (no external DB required) + +All unit tests use an in-memory SQLite database with the standard fixture schema: + +```sql +CREATE TABLE customers (id INTEGER PRIMARY KEY, name TEXT NOT NULL, + email TEXT, created_at TEXT); +CREATE TABLE orders (id INTEGER PRIMARY KEY, customer_id INTEGER, + amount REAL, status TEXT, created_at TEXT); +CREATE TABLE products (id INTEGER PRIMARY KEY, name TEXT, + price REAL, category TEXT); +``` + +CLI tests use `click.testing.CliRunner`. External DB calls are mocked. + +### Integration Tests (require live DB) + +Gated behind `@pytest.mark.integration`. Run locally only: + +```bash +docker compose up -d # start PostgreSQL + MySQL +uv run pytest -m integration +docker compose down +``` + +### Test Naming Convention + +``` +test_{unit_under_test}_{scenario}_{expected_outcome} +``` + +Examples: +- `test_safety_checker_drop_table_raises_safety_error` +- `test_inject_limit_existing_over_max_clamps_to_max_row_limit` +- `test_memory_search_reads_each_file_once` + +### CI + +GitHub Actions runs on every push: +1. `ruff check .` +2. `mypy src/` +3. `pytest tests/unit/` (SQLite in-memory only — no external services) + +--- + +## Development Workflow + +```bash +# First-time setup +uv sync # install all deps from uv.lock +uv run oda init # create ~/.config/open-data-agent/ + defaults +uv run oda connections add # interactive: add first DB connection +uv run oda connect my-db # activate connection + render rules file +uv run oda docs generate # populate docs/data-catalog/ + +# Daily development +uv run pytest tests/unit/ -q # unit tests +uv run ruff check . # lint +uv run mypy src/ # type check + +# Integration testing +docker compose up -d +uv run pytest -m integration +docker compose down +``` + +--- + +## Coding Conventions + +### Mandatory Rules + +1. **`cli.py` is a registry only** — zero business logic; only `add_command()` calls +2. **`adapter.quote_identifier()`** for all DB object references — never raw string interpolation +3. **All dialect-specific SQL inside adapter** — never inline in other modules +4. **Safety check before every execution** — `SafetyChecker.validate(sql)` is not optional +5. **Rich for all terminal output** — never `print()` for results; use `console.print()` +6. **Errors to stderr** — `err_console = Console(stderr=True)`; never stdout for errors +7. **Logger namespaced** — `logging.getLogger("open_data_agent.{module}")` +8. **Config via `get_config()`** — never open YAML files directly in other modules +9. **Raise only from `exceptions.py`** — never invent new exception classes elsewhere +10. **Type annotations on all function signatures** +11. **`__all__` in every module** + +### Patterns + +**Connection handling — always guard with try/finally:** + +```python +conn: Any = None +try: + conn = driver.connect(...) + # ... use conn ... +finally: + if conn is not None: + conn.close() +``` + +**Dialect SQL — never inline:** + +```python +# Correct +sql = f"WHERE {adapter.current_timestamp()} > {adapter.interval(7, 'day')}" + +# Wrong +sql = "WHERE NOW() > INTERVAL '7 days'" +``` + +**CLI output:** + +```python +console = Console() +err_console = Console(stderr=True) + +console.print("[green]✓[/green] Done") # success → stdout +err_console.print("[red]✗[/red] Failed: ...") # error → stderr +err_console.print("[yellow]⚠[/yellow] Warning") # warning → stderr +``` + +### OpenCode Rules File Structure + +`.opencode/rules/data-agent.md` must follow this 6-block template: + +```markdown +# Data Agent — {connection_name} ({db_type}) + +## 1. Identity +You are a data agent connected to {db_type} database '{connection_name}'. + +## 2. Available Tools +[CLI commands the agent can invoke, with examples] + +## 3. Knowledge Layer +Schema docs are at docs/data-catalog/. +Always read _index.md first, then navigate to specific table docs. +Warn the user if generated_at is older than 7 days. + +## 4. Memory Layer +Curated knowledge is in memory/. +Check for corrections before writing SQL for any column with known issues. + +## 5. Self-Healing +When queries return 0 rows or unexpected results: +1. Check diagnostic output for table row counts and column value samples +2. Verify date filter ranges against the column range shown +3. Check memory/ for known issues with these columns +4. Try a broader query first, then narrow down + +## 6. Safety Rules +- Never execute INSERT, UPDATE, DELETE, DROP, or other write operations +- Never expose connection credentials +- Always confirm before running queries that affect >1000 rows +``` diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..2cef72b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,56 @@ +[project] +name = "open-data-agent" +version = "0.1.0" +description = "AI-augmented data agent CLI for natural language database querying" +requires-python = ">=3.12" +dependencies = [ + "click==8.3.1", + "rich==14.3.3", + "pyyaml>=6.0", + "psycopg[binary]>=3.1", + "pymysql>=1.1", + "keyring>=25.0", +] + +[project.scripts] +oda = "open_data_agent.cli:cli" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["src/open_data_agent"] + +[tool.pytest.ini_options] +testpaths = ["tests"] +markers = [ + "integration: marks tests as integration tests requiring live DB (deselect with '-m \"not integration\"')", +] +addopts = "-m 'not integration'" + +[tool.ruff] +line-length = 100 +target-version = "py312" + +[tool.ruff.lint] +select = ["E", "F", "I", "UP", "B", "SIM"] +ignore = ["E501"] + +[tool.ruff.format] +quote-style = "double" +indent-style = "space" + +[tool.mypy] +python_version = "3.12" +strict = true + +[dependency-groups] +dev = [ + "pytest==9.0.2", + "pytest-cov>=7.0.0", + "ruff>=0.15.5", + "mypy>=1.10", + "types-pyyaml>=6.0.12.20250915", + "types-pymysql>=1.1.0.20251220", +] diff --git a/src/open_data_agent/__init__.py b/src/open_data_agent/__init__.py new file mode 100644 index 0000000..659f3e7 --- /dev/null +++ b/src/open_data_agent/__init__.py @@ -0,0 +1,4 @@ +"""open-data-agent — AI-augmented CLI for natural language database querying.""" + +__version__ = "0.1.0" +__all__ = ["__version__"] diff --git a/src/open_data_agent/cli.py b/src/open_data_agent/cli.py new file mode 100644 index 0000000..2056553 --- /dev/null +++ b/src/open_data_agent/cli.py @@ -0,0 +1,37 @@ +"""CLI registry — imports and registers all subcommand groups. Zero business logic.""" + +from __future__ import annotations + +import click + +from open_data_agent.cli_connections import connect, connections +from open_data_agent.cli_docs import docs +from open_data_agent.cli_eval import eval_group +from open_data_agent.cli_history import history +from open_data_agent.cli_init import init +from open_data_agent.cli_memory import memory +from open_data_agent.cli_query import query +from open_data_agent.cli_schema import describe, profile, sample, schemas, tables + +__all__ = ["cli"] + + +@click.group() +@click.version_option() +def cli() -> None: + """oda — open-data-agent CLI for natural language database querying.""" + + +cli.add_command(init) +cli.add_command(connections) +cli.add_command(connect) +cli.add_command(schemas) +cli.add_command(tables) +cli.add_command(describe) +cli.add_command(sample) +cli.add_command(profile) +cli.add_command(docs) +cli.add_command(query) +cli.add_command(history) +cli.add_command(memory) +cli.add_command(eval_group, name="eval") diff --git a/src/open_data_agent/cli_connections.py b/src/open_data_agent/cli_connections.py new file mode 100644 index 0000000..74f69fc --- /dev/null +++ b/src/open_data_agent/cli_connections.py @@ -0,0 +1,248 @@ +"""oda connections — manage database connections.""" + +from __future__ import annotations + +import logging +from datetime import UTC +from typing import Any + +import click +from rich.console import Console +from rich.table import Table + +from open_data_agent.db.connection import ConnectionManager +from open_data_agent.exceptions import ConfigError + +__all__ = ["connections", "connect"] + +logger = logging.getLogger("open_data_agent.cli_connections") +console = Console() +err_console = Console(stderr=True) + + +@click.group(name="connections") +def connections() -> None: + """Manage database connections.""" + + +@connections.command(name="add") +def connections_add() -> None: + """Interactively add a new database connection.""" + mgr = ConnectionManager() + + name = click.prompt("Connection name", type=str) + db_type = click.prompt( + "Database type", + type=click.Choice(["postgresql", "mysql", "sqlite"]), + ) + + if db_type == "sqlite": + host = "localhost" + port = 0 + database = click.prompt("Database file path (or :memory:)") + username = "" + password = "" + else: + host = click.prompt("Host", default="localhost") + default_port = 5432 if db_type == "postgresql" else 3306 + port = click.prompt("Port", default=default_port, type=int) + database = click.prompt("Database name") + username = click.prompt("Username") + password = click.prompt("Password", hide_input=True) + + try: + mgr.save_connection( + name, + { + "db_type": db_type, + "host": host, + "port": port, + "database": database, + "username": username, + "password": password, + }, + ) + console.print(f"[green]✓[/green] Connection '{name}' saved.") + except ConfigError as exc: + err_console.print(f"[red]✗[/red] {exc}") + raise SystemExit(1) from exc + + +@connections.command(name="list") +def connections_list() -> None: + """List all configured connections (passwords not shown).""" + mgr = ConnectionManager() + entries = mgr.list_connections() + + if not entries: + console.print("No connections configured. Run [bold]oda connections add[/bold] to add one.") + return + + table = Table(title="Configured Connections") + table.add_column("Name", style="bold cyan") + table.add_column("DB Type") + table.add_column("Host") + table.add_column("Port") + table.add_column("Database") + table.add_column("Username") + + for entry in entries: + table.add_row( + entry.get("name", ""), + entry.get("db_type", ""), + entry.get("host", ""), + str(entry.get("port", "")), + entry.get("database", ""), + entry.get("username", ""), + ) + + console.print(table) + + +@connections.command(name="remove") +@click.argument("name") +def connections_remove(name: str) -> None: + """Remove the named connection.""" + mgr = ConnectionManager() + try: + mgr.remove_connection(name) + console.print(f"[green]✓[/green] Connection '{name}' removed.") + except ConfigError as exc: + err_console.print(f"[red]✗[/red] {exc}") + raise SystemExit(1) from exc + + +@connections.command(name="test") +@click.argument("name") +def connections_test(name: str) -> None: + """Test connectivity for the named connection.""" + mgr = ConnectionManager() + try: + params = mgr.get_connection(name) + except ConfigError as exc: + err_console.print(f"[red]✗[/red] {exc}") + raise SystemExit(1) from exc + + db_type = params["db_type"] + try: + _conn: Any # declared once; each branch assigns to it + if db_type == "sqlite": + import sqlite3 + + db_path = params["database"] + _conn = sqlite3.connect(db_path, check_same_thread=False) + _conn.execute("SELECT 1") + _conn.close() + + elif db_type == "postgresql": + import psycopg + + with psycopg.connect( + host=params["host"], + port=int(params["port"]), + dbname=params["database"], + user=params["username"], + password=params["password"], + autocommit=True, + ) as _conn: + _conn.execute("SELECT 1") + + elif db_type == "mysql": + import pymysql + + conn_m = pymysql.connect( + host=params["host"], + port=int(params["port"]), + database=params["database"], + user=params["username"], + password=params["password"], + ) + cursor = conn_m.cursor() + cursor.execute("SELECT 1") + conn_m.close() + + console.print(f"[green]✓[/green] Connection successful: {name} ({db_type})") + + except Exception as exc: + err_console.print(f"[red]✗[/red] Connection failed: {exc}") + raise SystemExit(1) from exc + + +@click.command(name="connect") +@click.argument("name") +def connect(name: str) -> None: + """Set as the active database connection. + + All subsequent oda commands will use this connection. + Also renders the OpenCode rules file for the IDE agent. + """ + mgr = ConnectionManager() + try: + params = mgr.get_connection(name) + except ConfigError as exc: + err_console.print(f"[red]✗[/red] {exc}") + raise SystemExit(1) from exc + + try: + mgr.set_active_connection(name) + except ConfigError as exc: + err_console.print(f"[red]✗[/red] {exc}") + raise SystemExit(1) from exc + + db_type = params.get("db_type", "?") + host = params.get("host", "?") + database = params.get("database", "?") + + console.print(f"[green]✓[/green] Connected to {name} ({db_type} @ {host}/{database})") + + _render_opencode_rules(name, params) + + from pathlib import Path + + catalog_index = Path("docs") / "data-catalog" / "_index.md" + if not catalog_index.exists(): + console.print( + "[yellow]⚠[/yellow] No schema docs found. " + "Run [bold]oda docs generate[/bold] to enable full agent capabilities." + ) + + +def _render_opencode_rules(name: str, params: dict[str, object]) -> None: + """Render the OpenCode rules file from template. No-op if template missing.""" + import importlib.resources + from datetime import datetime + from pathlib import Path + + template_content: str | None = None + + if hasattr(importlib.resources, "files"): + try: + ref = importlib.resources.files("open_data_agent") / "templates" / "data-agent.md.tmpl" + template_content = ref.read_text(encoding="utf-8") + except (FileNotFoundError, OSError, TypeError): + pass + + if template_content is None: + fallback_paths = [ + Path(".opencode") / "rules" / "data-agent.md.template", + ] + for tpath in fallback_paths: + if tpath.exists(): + template_content = tpath.read_text() + break + + if template_content is None: + return # Template not found — skip silently + + rendered = template_content.replace("{{connection_name}}", str(name)) + rendered = rendered.replace("{{db_type}}", str(params.get("db_type", ""))) + rendered = rendered.replace("{{db_host}}", str(params.get("host", ""))) + rendered = rendered.replace("{{db_name}}", str(params.get("database", ""))) + rendered = rendered.replace("{{generated_at}}", datetime.now(UTC).isoformat(timespec="seconds")) + + rules_dir = Path(".opencode") / "rules" + rules_dir.mkdir(parents=True, exist_ok=True) + rules_file = rules_dir / "data-agent.md" + rules_file.write_text(rendered) + + console.print(f"[green]✓[/green] OpenCode rules written to {rules_file}") diff --git a/src/open_data_agent/cli_docs.py b/src/open_data_agent/cli_docs.py new file mode 100644 index 0000000..de26bd5 --- /dev/null +++ b/src/open_data_agent/cli_docs.py @@ -0,0 +1,188 @@ +"""oda docs commands — generate and check schema documentation.""" + +from __future__ import annotations + +import logging +from pathlib import Path +from typing import Any + +import click +from rich.console import Console +from rich.progress import Progress, SpinnerColumn, TextColumn +from rich.table import Table + +from open_data_agent.config import get_config +from open_data_agent.db.connection import ConnectionManager +from open_data_agent.db.dialect import ( + DialectAdapter, + MySQLAdapter, + PostgreSQLAdapter, + SQLiteAdapter, +) +from open_data_agent.db.schema import SchemaInspector +from open_data_agent.docs_generator import DocGenerator + +__all__ = ["docs"] + +logger = logging.getLogger("open_data_agent.cli_docs") +console = Console() +err_console = Console(stderr=True) + +_DEFAULT_DOCS_DIR = Path("docs") / "data-catalog" + + +def _get_generator() -> tuple[DocGenerator, Any]: + """Build a DocGenerator from the active connection. + + Returns (generator, conn). The caller is responsible for closing conn. + """ + mgr = ConnectionManager() + active = mgr.get_active_connection() + if active is None: + err_console.print( + "[red]✗[/red] No active connection. Run [bold]oda connect [/bold] first." + ) + raise SystemExit(1) + + params = mgr.get_connection(active) + db_type = params["db_type"] + + conn: Any = None + adapter: DialectAdapter + try: + if db_type == "sqlite": + import sqlite3 + + conn = sqlite3.connect(params["database"], check_same_thread=False) + conn.row_factory = sqlite3.Row + adapter = SQLiteAdapter() + elif db_type == "postgresql": + import psycopg + + conn = psycopg.connect( + host=params["host"], + port=int(params["port"]), + dbname=params["database"], + user=params["username"], + password=params["password"], + autocommit=True, + ) + adapter = PostgreSQLAdapter() + elif db_type == "mysql": + import pymysql + + conn = pymysql.connect( + host=params["host"], + port=int(params["port"]), + database=params["database"], + user=params["username"], + password=params["password"], + ) + adapter = MySQLAdapter() + else: + err_console.print(f"[red]✗[/red] Unsupported db_type: {db_type}") + raise SystemExit(1) + except SystemExit: + if conn is not None: + conn.close() + raise + except Exception: + if conn is not None: + conn.close() + raise + + try: + inspector = SchemaInspector(adapter, conn) + config = get_config() + return DocGenerator( + inspector, db_type=db_type, profile_timeout_seconds=config.query_timeout_seconds + ), conn + except Exception: + conn.close() + raise + + +@click.group(name="docs") +def docs() -> None: + """Manage schema documentation catalog.""" + + +@docs.command(name="generate") +@click.option("--skip-existing", is_flag=True, help="Skip tables with fresh docs") +@click.option("--enrich", is_flag=True, help="Append column profile stats to table docs") +@click.option("--output-dir", default=None, type=click.Path(), help="Output directory") +def docs_generate(skip_existing: bool, enrich: bool, output_dir: str | None) -> None: + """Generate the hierarchical markdown data catalog. + + Writes schema and table docs to docs/data-catalog/ (or --output-dir). + """ + config = get_config() + out = Path(output_dir) if output_dir else _DEFAULT_DOCS_DIR + + conn = None + try: + gen, conn = _get_generator() + except SystemExit: + raise + + try: + with Progress( + SpinnerColumn(), TextColumn("[progress.description]{task.description}"), transient=True + ) as progress: + task = progress.add_task("Generating docs...", total=None) + stats = gen.generate( + out, + skip_existing=skip_existing, + enrich=enrich, + staleness_days=config.docs_staleness_days, + ) + progress.remove_task(task) + finally: + if conn is not None: + conn.close() + + console.print( + f"[green]✓[/green] Docs generated: " + f"{stats['tables_generated']} tables written, " + f"{stats['tables_skipped']} skipped. " + f"Catalog at [bold]{out}[/bold]" + ) + + +@docs.command(name="status") +@click.option("--output-dir", default=None, type=click.Path(), help="Output directory") +def docs_status(output_dir: str | None) -> None: + """Check status of schema documentation catalog. + + Exits with code 1 if any docs are stale or missing. + """ + config = get_config() + out = Path(output_dir) if output_dir else _DEFAULT_DOCS_DIR + + conn = None + try: + gen, conn = _get_generator() + except SystemExit: + raise + + try: + status = gen.get_status(out, staleness_days=config.docs_staleness_days) + finally: + if conn is not None: + conn.close() + + t = Table(title=f"Docs Status: {out}") + t.add_column("Metric") + t.add_column("Count", justify="right") + t.add_row("Total tables", str(status["total"])) + t.add_row("[green]Up to date[/green]", str(status["up_to_date"])) + t.add_row("[yellow]Stale[/yellow]", str(status["stale"])) + t.add_row("[red]Missing[/red]", str(status["missing"])) + console.print(t) + + if status["stale"] > 0 or status["missing"] > 0: + err_console.print( + "[yellow]⚠[/yellow] Docs are stale or missing. " + "Run [bold]oda docs generate[/bold] to refresh." + ) + raise SystemExit(1) diff --git a/src/open_data_agent/cli_eval.py b/src/open_data_agent/cli_eval.py new file mode 100644 index 0000000..e78ba3a --- /dev/null +++ b/src/open_data_agent/cli_eval.py @@ -0,0 +1,300 @@ +"""oda eval commands — run, results, add.""" + +from __future__ import annotations + +import json +import logging +from pathlib import Path +from typing import Any + +import click +import yaml +from rich.console import Console +from rich.table import Table + +from open_data_agent.eval_runner import EvalRunner +from open_data_agent.exceptions import ConfigError + +__all__ = ["eval_group"] + +logger = logging.getLogger("open_data_agent.cli_eval") +console = Console() +err_console = Console(stderr=True) + +_GOLDEN_QUERIES_PATH = Path("evals") / "golden_queries.yaml" +_RESULTS_PATH = Path("evals") / "results" / "latest.json" +_VALID_DIALECTS = ["postgresql", "mysql", "sqlite"] + + +def _get_active_connection() -> tuple[str, str] | None: + """Return (connection_name, db_type) from active connection state file, or None.""" + from open_data_agent.config import get_config_dir + + active_file = get_config_dir() / "active-connection" + if not active_file.exists(): + return None + name = active_file.read_text().strip() + if not name: + return None + conn_file = get_config_dir() / "connections.yaml" + if conn_file.exists(): + try: + data = yaml.safe_load(conn_file.read_text()) + if isinstance(data, dict) and name in data: + db_type = data[name].get("db_type", "sqlite") + return name, db_type + except Exception: + pass + return name, "sqlite" + + +def _print_results_table(results: list[dict[str, Any]], title: str = "Eval Results") -> None: + t = Table(title=title) + t.add_column("ID", style="dim", width=12) + t.add_column("Question", max_width=40) + t.add_column("Passed", justify="center", width=8) + t.add_column("Rows", justify="right", width=6) + t.add_column("ms", justify="right", width=8) + t.add_column("Error", max_width=30) + + for r in results: + passed_str = "✓" if r.get("passed") else "✗" + passed_style = "green" if r.get("passed") else "red" + t.add_row( + str(r.get("id", ""))[:12], + str(r.get("question", ""))[:40], + f"[{passed_style}]{passed_str}[/{passed_style}]", + str(r.get("row_count", "")), + str(round(float(r.get("duration_ms", 0)), 1)), + str(r.get("error") or "")[:30], + ) + console.print(t) + + +@click.group(name="eval") +def eval_group() -> None: + """Golden SQL evaluation suite.""" + + +@eval_group.command(name="run") +@click.option( + "--path", + "golden_path", + default=str(_GOLDEN_QUERIES_PATH), + help="Path to golden_queries.yaml", +) +@click.option("--min-rows", default=1, type=int, help="Minimum rows for a passing query") +def eval_run(golden_path: str, min_rows: int) -> None: + """Run the golden SQL regression suite for the active connection.""" + active = _get_active_connection() + if active is None: + err_console.print( + "[red]✗[/red] No active connection. Run [bold]oda connect [/bold] first." + ) + raise SystemExit(1) + + conn_name, db_type = active + path = Path(golden_path) + + runner = EvalRunner() + try: + all_entries = runner.load(path) + except ConfigError as exc: + err_console.print(f"[red]✗[/red] {exc}") + raise SystemExit(1) from exc + + entries = runner.filter_for_dialect(all_entries, db_type) + if not entries: + console.print( + f"No golden queries found for dialect '{db_type}'. Add some with 'oda eval add'." + ) + return + + from open_data_agent.config import get_config + from open_data_agent.db.connection import ConnectionManager + from open_data_agent.db.dialect import ( + DialectAdapter, + MySQLAdapter, + PostgreSQLAdapter, + SQLiteAdapter, + ) + from open_data_agent.db.query import QueryEngine + + mgr = ConnectionManager() + conn_params = mgr.get_connection(conn_name) + + raw_conn: Any = None + adapter: DialectAdapter + try: + if db_type == "sqlite": + adapter = SQLiteAdapter() + import sqlite3 + + raw_conn = sqlite3.connect( + conn_params.get("database", ":memory:"), check_same_thread=False + ) + elif db_type == "postgresql": + adapter = PostgreSQLAdapter() + import psycopg + + raw_conn = psycopg.connect( + host=conn_params["host"], + port=int(conn_params["port"]), + dbname=conn_params["database"], + user=conn_params["username"], + password=conn_params["password"], + autocommit=True, + ) + elif db_type == "mysql": + adapter = MySQLAdapter() + import pymysql + + raw_conn = pymysql.connect( + host=conn_params["host"], + port=int(conn_params["port"]), + database=conn_params["database"], + user=conn_params["username"], + password=conn_params["password"], + ) + else: + err_console.print(f"[red]✗[/red] Unsupported db_type: {db_type}") + raise SystemExit(1) + except SystemExit: + if raw_conn is not None: + raw_conn.close() + raise + except Exception: + if raw_conn is not None: + raw_conn.close() + raise + + config = get_config() + engine = QueryEngine( + adapter, raw_conn, config=config, connection_name=conn_name, db_type=db_type + ) + results: list[dict[str, Any]] = [] + try: + results = runner.run(entries, adapter, engine, min_rows=min_rows) + finally: + if raw_conn is not None: + raw_conn.close() + + _print_results_table(results) + + passed = sum(1 for r in results if r.get("passed")) + total = len(results) + console.print(f"\nPassed: {passed} / {total}") + + try: + _RESULTS_PATH.parent.mkdir(parents=True, exist_ok=True) + _RESULTS_PATH.write_text(json.dumps(results, indent=2), encoding="utf-8") + console.print(f"Results written to {_RESULTS_PATH}") + except OSError as exc: + err_console.print(f"[yellow]⚠[/yellow] Could not write results file: {exc}") + + if passed < total: + raise SystemExit(1) + + +@eval_group.command(name="results") +@click.option( + "--path", + "results_path", + default=str(_RESULTS_PATH), + help="Path to results JSON file", +) +def eval_results(results_path: str) -> None: + """Print the latest eval run results.""" + path = Path(results_path) + if not path.exists(): + console.print("No eval results found. Run 'oda eval run' first.") + return + + try: + results = json.loads(path.read_text(encoding="utf-8")) + except Exception as exc: + err_console.print(f"[red]✗[/red] Failed to read results: {exc}") + raise SystemExit(1) from exc + + if not results: + console.print("No eval results found. Run 'oda eval run' first.") + return + + _print_results_table(results, title=f"Eval Results — {path}") + + passed = sum(1 for r in results if r.get("passed")) + total = len(results) + console.print(f"\nPassed: {passed} / {total}") + + +@eval_group.command(name="add") +@click.option("--id", "entry_id", default=None, help="Explicit entry ID (e.g. sales-005)") +@click.option("--question", default=None, help="Natural language question") +@click.option("--sql", "sql_str", default=None, help="Golden SQL query") +@click.option( + "--dialects", + default=None, + help="Comma-separated dialects: postgresql,mysql,sqlite", +) +@click.option( + "--path", + "golden_path", + default=str(_GOLDEN_QUERIES_PATH), + help="Path to golden_queries.yaml", +) +def eval_add( + entry_id: str | None, + question: str | None, + sql_str: str | None, + dialects: str | None, + golden_path: str, +) -> None: + """Add a new golden query to the eval suite.""" + path = Path(golden_path) + runner = EvalRunner() + + try: + existing = runner.load(path) + except ConfigError as exc: + err_console.print(f"[red]✗[/red] {exc}") + raise SystemExit(1) from exc + + if question is None: + question = click.prompt("Question (natural language)") + + if sql_str is None: + sql_str = click.prompt("SQL") + + if dialects is None: + dialects_input = click.prompt( + f"Dialects (comma-separated, choices: {', '.join(_VALID_DIALECTS)})", + default="sqlite", + ) + dialects = dialects_input + + dialect_list = [d.strip() for d in dialects.split(",") if d.strip()] + for d in dialect_list: + if d not in _VALID_DIALECTS: + err_console.print( + f"[red]✗[/red] Invalid dialect '{d}'. Must be one of: {', '.join(_VALID_DIALECTS)}" + ) + raise SystemExit(1) + + if entry_id is None: + domain = click.prompt("Domain (e.g. sales, orders)") + entry_id = runner.next_id_for_domain(existing, domain) + + entry: dict[str, Any] = { + "id": entry_id, + "question": question, + "sql": sql_str, + "dialects": dialect_list, + "active": True, + } + + try: + runner.append_entry(path, entry, existing) + console.print(f"✓ Added eval entry '{entry_id}' to {path}") + except ConfigError as exc: + err_console.print(f"[red]✗[/red] {exc}") + raise SystemExit(1) from exc diff --git a/src/open_data_agent/cli_history.py b/src/open_data_agent/cli_history.py new file mode 100644 index 0000000..85d3972 --- /dev/null +++ b/src/open_data_agent/cli_history.py @@ -0,0 +1,129 @@ +"""oda history commands — list, search, stats.""" + +from __future__ import annotations + +import logging +from collections import Counter + +import click +from rich.console import Console +from rich.table import Table + +from open_data_agent.history import HistoryTracker + +__all__ = ["history"] + +logger = logging.getLogger("open_data_agent.cli_history") +console = Console() + + +@click.group(name="history") +def history() -> None: + """Browse and search query execution history.""" + + +@history.command(name="list") +@click.option("--n", default=20, type=int, help="Number of recent entries to show") +def history_list(n: int) -> None: + """Show the most recent query history entries.""" + tracker = HistoryTracker() + entries = list(tracker.iter_entries()) + + if not entries: + console.print("No history found.") + return + + entries = entries[-n:][::-1] + + t = Table(title=f"Recent Queries (last {len(entries)})") + t.add_column("ID", style="dim", width=8) + t.add_column("Timestamp", width=20) + t.add_column("Connection") + t.add_column("SQL", max_width=60) + t.add_column("Rows", justify="right") + t.add_column("ms", justify="right") + + for entry in entries: + t.add_row( + str(entry.get("id", ""))[:8], + str(entry.get("timestamp", ""))[:19], + str(entry.get("connection", "")), + str(entry.get("sql", ""))[:60], + str(entry.get("row_count", "")), + str(round(float(entry.get("duration_ms", 0)), 1)), + ) + console.print(t) + + +@history.command(name="search") +@click.argument("term") +def history_search(term: str) -> None: + """Search query history for TERM (case-insensitive).""" + tracker = HistoryTracker() + term_lower = term.lower() + matches = [ + e + for e in tracker.iter_entries() + if term_lower in str(e.get("sql", "")).lower() + or term_lower in str(e.get("question", "")).lower() + ] + + if not matches: + console.print(f"No history found matching '{term}'.") + return + + t = Table(title=f"History matching '{term}'") + t.add_column("ID", style="dim", width=8) + t.add_column("Timestamp", width=20) + t.add_column("Connection") + t.add_column("SQL", max_width=60) + t.add_column("Rows", justify="right") + + for entry in matches: + t.add_row( + str(entry.get("id", ""))[:8], + str(entry.get("timestamp", ""))[:19], + str(entry.get("connection", "")), + str(entry.get("sql", ""))[:60], + str(entry.get("row_count", "")), + ) + console.print(t) + + +@history.command(name="stats") +def history_stats() -> None: + """Show summary statistics for query history.""" + tracker = HistoryTracker() + entries = list(tracker.iter_entries()) + + if not entries: + console.print("No history found.") + return + + total = len(entries) + errors = sum(1 for e in entries if e.get("error")) + error_rate = (errors / total * 100) if total else 0 + durations = [ + float(e.get("duration_ms", 0)) for e in entries if e.get("duration_ms") is not None + ] + avg_duration = sum(durations) / len(durations) if durations else 0 + + connection_counts = Counter(str(e.get("connection", "")) for e in entries) + + all_tables: list[str] = [] + for e in entries: + tables = e.get("tables") or [] + all_tables.extend(tables) + top_tables = Counter(all_tables).most_common(5) + + t = Table(title="Query History Stats") + t.add_column("Metric") + t.add_column("Value", justify="right") + t.add_row("Total queries", str(total)) + t.add_row("Error rate", f"{error_rate:.1f}%") + t.add_row("Avg duration (ms)", f"{avg_duration:.1f}") + for conn, count in connection_counts.most_common(3): + t.add_row(f" Connection: {conn}", str(count)) + for tbl, count in top_tables: + t.add_row(f" Table: {tbl}", str(count)) + console.print(t) diff --git a/src/open_data_agent/cli_init.py b/src/open_data_agent/cli_init.py new file mode 100644 index 0000000..ec7c21d --- /dev/null +++ b/src/open_data_agent/cli_init.py @@ -0,0 +1,69 @@ +"""oda init — first-run onboarding command. + +Creates ~/.config/open-data-agent/ with default config files and prints +a structured next-steps guide. +""" + +from __future__ import annotations + +import os + +import click +import yaml +from rich.console import Console + +from open_data_agent.config import get_config_dir + +__all__ = ["init"] + +console = Console() + +_DEFAULT_CONFIG_YAML: dict[str, object] = { + "row_limit": 1000, + "max_row_limit": 10000, + "query_timeout_seconds": 30, + "docs_staleness_days": 7, + "log_level": "INFO", + "strict_mode": False, +} + + +@click.command(name="init") +def init() -> None: + """Initialise the oda config directory with default settings. + + Creates ~/.config/open-data-agent/ and required config files if they + do not already exist. Safe to run multiple times — existing files are + never overwritten. + """ + config_dir = get_config_dir() + + if config_dir.exists(): + console.print("Already initialised.") + return + + # Create directory + config_dir.mkdir(parents=True, exist_ok=True) + console.print(f"[green]✓[/green] Created {config_dir}/") + + # Create connections.yaml with secure permissions (owner read/write only) + connections_path = config_dir / "connections.yaml" + fd = os.open(str(connections_path), os.O_CREAT | os.O_WRONLY | os.O_TRUNC, 0o600) + with os.fdopen(fd, "w") as f: + yaml.dump({}, f) + console.print("[green]✓[/green] Created connections.yaml (chmod 600)") + + # Create config.yaml with defaults + config_path = config_dir / "config.yaml" + config_path.write_text(yaml.dump(_DEFAULT_CONFIG_YAML, default_flow_style=False)) + console.print("[green]✓[/green] Created config.yaml with defaults") + + console.print( + "\nNext steps:\n" + " 1. Add a connection: oda connections add\n" + r" 2. Activate it: oda connect " + "\n" + " 3. Generate schema docs: oda docs generate\n" + ' 4. Start querying! oda query "SELECT ..."\n' + "\nOpenCode users: rules file will be at .opencode/rules/data-agent.md\n" + r"after you run 'oda connect '" + ) diff --git a/src/open_data_agent/cli_memory.py b/src/open_data_agent/cli_memory.py new file mode 100644 index 0000000..cd4c440 --- /dev/null +++ b/src/open_data_agent/cli_memory.py @@ -0,0 +1,134 @@ +"""oda memory commands — add, list, search.""" + +from __future__ import annotations + +import logging + +import click +from rich.console import Console +from rich.table import Table + +from open_data_agent.exceptions import ConfigError +from open_data_agent.memory import MemoryManager + +__all__ = ["memory"] + +logger = logging.getLogger("open_data_agent.cli_memory") +console = Console() +err_console = Console(stderr=True) + +_CATEGORIES = ["query_pattern", "business_context", "correction", "data_quality"] + + +@click.group(name="memory") +def memory() -> None: + """Manage curated knowledge files for AI agent context.""" + + +@memory.command(name="add") +@click.option("--title", default=None, help="Memory entry title") +@click.option( + "--category", + default=None, + type=click.Choice(_CATEGORIES), + help="Knowledge category", +) +@click.option("--content", default=None, help="Markdown body content") +@click.option("--tags", default=None, help="Comma-separated tags") +def memory_add( + title: str | None, + category: str | None, + content: str | None, + tags: str | None, +) -> None: + """Add a new knowledge entry to the memory directory. + + If flags are not provided, prompts interactively. + """ + if title is None: + title = click.prompt("Title") + if category is None: + category = click.prompt( + "Category", + type=click.Choice(_CATEGORIES), + show_choices=True, + ) + if content is None: + console.print("Content (enter text, finish with a line containing only '.'):") + lines: list[str] = [] + while True: + try: + line = input() + except EOFError: + break + if line == ".": + break + lines.append(line) + content = "\n".join(lines) + + tag_list: list[str] = [] + if tags: + tag_list = [t.strip() for t in tags.split(",") if t.strip()] + + mgr = MemoryManager() + try: + path = mgr.add(title, content, category, tags=tag_list) + console.print(f"✓ Memory saved to {path}") + except ConfigError as exc: + err_console.print(f"✗ {exc}", style="red") + raise SystemExit(1) from exc + + +@memory.command(name="list") +def memory_list() -> None: + """List all memory entries.""" + mgr = MemoryManager() + entries = mgr.list_entries() + + if not entries: + console.print("No memory entries found.") + return + + t = Table(title="Memory Entries") + t.add_column("Title") + t.add_column("Category") + t.add_column("Created At", width=20) + t.add_column("Tags") + + for entry in entries: + tags = entry.get("tags") or [] + t.add_row( + str(entry.get("title", "")), + str(entry.get("category", "")), + str(entry.get("created_at", ""))[:19], + ", ".join(str(t_) for t_ in tags), + ) + console.print(t) + + +@memory.command(name="search") +@click.argument("term") +def memory_search(term: str) -> None: + """Search memory entries by TERM (case-insensitive, matches title/content/tags).""" + mgr = MemoryManager() + results = mgr.search(term) + + if not results: + console.print(f"No memory entries found matching '{term}'.") + return + + t = Table(title=f"Memory matching '{term}'") + t.add_column("Title") + t.add_column("Category") + t.add_column("Created At", width=20) + t.add_column("Tags") + + for entry in results: + tags = entry.get("tags") or [] + t.add_row( + str(entry.get("title", "")), + str(entry.get("category", "")), + str(entry.get("created_at", ""))[:19], + ", ".join(str(t_) for t_ in tags), + ) + console.print(t) diff --git a/src/open_data_agent/cli_query.py b/src/open_data_agent/cli_query.py new file mode 100644 index 0000000..f729878 --- /dev/null +++ b/src/open_data_agent/cli_query.py @@ -0,0 +1,218 @@ +"""oda query — execute SQL safely with auto-logging.""" + +from __future__ import annotations + +import logging +import warnings +from pathlib import Path +from typing import Any + +import click +from rich.console import Console +from rich.table import Table + +from open_data_agent.config import get_config +from open_data_agent.db.connection import ConnectionManager +from open_data_agent.db.diagnostics import DiagnosticEngine +from open_data_agent.db.dialect import ( + DialectAdapter, + MySQLAdapter, + PostgreSQLAdapter, + SQLiteAdapter, +) +from open_data_agent.db.query import QueryEngine +from open_data_agent.exceptions import SafetyError, StaleDocsWarning + +__all__ = ["query"] + +logger = logging.getLogger("open_data_agent.cli_query") +console = Console() +err_console = Console(stderr=True) + +_DEFAULT_DOCS_DIR = Path("docs") / "data-catalog" + + +def _is_docs_stale(staleness_days: int) -> bool: + """Return True if the catalog docs are stale or missing (reads _index.md frontmatter only).""" + from datetime import UTC, datetime + + catalog_index = _DEFAULT_DOCS_DIR / "_index.md" + + if not catalog_index.exists(): + return True + + try: + content = catalog_index.read_text() + lines = content.splitlines() + + if not lines or lines[0].strip() != "---": + return True # No frontmatter — treat as stale + for line in lines[1:]: + stripped = line.strip() + if stripped == "---": + break + if stripped.startswith("generated_at:"): + ts_str = stripped[len("generated_at:") :].strip().strip('"') + ts = datetime.fromisoformat(ts_str) + if ts.tzinfo is None: + ts = ts.replace(tzinfo=UTC) + age_days = (datetime.now(UTC) - ts).days + return age_days > staleness_days + + except Exception: + return True + + return True # no generated_at found in frontmatter + + +@click.command(name="query") +@click.argument("sql") +@click.option("--strict", is_flag=True, help="Block query if schema docs are stale") +@click.option("--format", "fmt", type=click.Choice(["table", "json", "csv"]), default="table") +def query(sql: str, strict: bool, fmt: str) -> None: + """Execute a read-only SQL query against the active database. + + All queries are safety-checked and auto-limited. + Results are auto-logged to query history. + + Example: + oda query "SELECT * FROM customers" + """ + config = get_config() + effective_strict = strict or config.strict_mode + + mgr = ConnectionManager() + active = mgr.get_active_connection() + if active is None: + err_console.print( + "[red]✗[/red] No active connection. Run [bold]oda connect [/bold] first." + ) + raise SystemExit(1) + + params = mgr.get_connection(active) + db_type = params["db_type"] + + if effective_strict and _is_docs_stale(config.docs_staleness_days): + err_console.print("[red]✗[/red] Docs are stale. Run 'oda docs generate' to refresh.") + raise SystemExit(1) + elif not effective_strict and _is_docs_stale(config.docs_staleness_days): + warnings.warn( + "Schema docs are stale. Run 'oda docs generate' to refresh.", + StaleDocsWarning, + stacklevel=2, + ) + err_console.print( + "[yellow]⚠[/yellow] Schema docs are stale. " + "Run [bold]oda docs generate[/bold] to refresh." + ) + + conn: Any = None + adapter: DialectAdapter + try: + if db_type == "sqlite": + import sqlite3 + + conn = sqlite3.connect(params["database"], check_same_thread=False) + conn.row_factory = sqlite3.Row + adapter = SQLiteAdapter() + elif db_type == "postgresql": + import psycopg + + conn = psycopg.connect( + host=params["host"], + port=int(params["port"]), + dbname=params["database"], + user=params["username"], + password=params["password"], + autocommit=True, + ) + adapter = PostgreSQLAdapter() + elif db_type == "mysql": + import pymysql + + conn = pymysql.connect( + host=params["host"], + port=int(params["port"]), + database=params["database"], + user=params["username"], + password=params["password"], + ) + adapter = MySQLAdapter() + else: + err_console.print(f"[red]✗[/red] Unsupported db_type: {db_type}") + raise SystemExit(1) + except SystemExit: + if conn is not None: + conn.close() + raise + except Exception: + if conn is not None: + conn.close() + raise + + from open_data_agent.history import HistoryTracker + + tracker = HistoryTracker() + + engine = QueryEngine( + adapter, + conn, + config=config, + history_tracker=tracker, + connection_name=active, + db_type=db_type, + ) + + try: + try: + result = engine.execute(sql, question=sql) + except SafetyError as exc: + err_console.print(f"[red]✗[/red] Safety check failed: {exc}") + raise SystemExit(1) from exc + except Exception as exc: + err_console.print(f"[red]✗[/red] Query failed: {exc}") + raise SystemExit(1) from exc + + if result.error: + err_console.print(f"[red]✗[/red] Query error: {result.error}") + # Skip diagnostics on timeout: connection may have a daemon thread still running. + if not result.error.startswith("Query timed out"): + diag = DiagnosticEngine(adapter, conn) + diag.diagnose(sql, result) + raise SystemExit(1) + + if fmt == "table": + t = Table(title=f"Results ({result.row_count} rows, {result.duration_ms:.1f}ms)") + for col in result.columns: + t.add_column(col) + for row in result.rows: + t.add_row(*[str(v) if v is not None else "NULL" for v in row]) + console.print(t) + elif fmt == "json": + import json + + records = [dict(zip(result.columns, row, strict=False)) for row in result.rows] + click.echo(json.dumps(records, default=str, indent=2)) + elif fmt == "csv": + import csv + import io + + buf = io.StringIO() + writer = csv.writer(buf) + writer.writerow(result.columns) + for row in result.rows: + writer.writerow(row) + click.echo(buf.getvalue()) + + if result.row_count == 0: + diag = DiagnosticEngine(adapter, conn) + diag.diagnose(sql, result) + + if result.truncated: + err_console.print( + f"[yellow]⚠[/yellow] Result limited to {result.limit_applied} rows. " + "Use --limit or adjust config to see more." + ) + finally: + if conn is not None: + conn.close() diff --git a/src/open_data_agent/cli_schema.py b/src/open_data_agent/cli_schema.py new file mode 100644 index 0000000..7bf4705 --- /dev/null +++ b/src/open_data_agent/cli_schema.py @@ -0,0 +1,258 @@ +"""oda schema commands — schemas, tables, describe, sample, profile.""" + +from __future__ import annotations + +import logging +from typing import Any + +import click +from rich.console import Console +from rich.table import Table + +from open_data_agent.db.connection import ConnectionManager +from open_data_agent.db.dialect import ( + DialectAdapter, + MySQLAdapter, + PostgreSQLAdapter, + SQLiteAdapter, +) +from open_data_agent.db.schema import SchemaInspector + +__all__ = ["schemas", "tables", "describe", "sample", "profile"] + +logger = logging.getLogger("open_data_agent.cli_schema") +console = Console() +err_console = Console(stderr=True) + + +def _get_inspector() -> tuple[SchemaInspector, str, Any]: + """Return (inspector, active_name, conn) or raise SystemExit.""" + mgr = ConnectionManager() + active = mgr.get_active_connection() + if active is None: + err_console.print( + "[red]✗[/red] No active connection. Run [bold]oda connect [/bold] first." + ) + raise SystemExit(1) + + params = mgr.get_connection(active) + db_type = params["db_type"] + + conn: Any = None + adapter: DialectAdapter + try: + if db_type == "sqlite": + import sqlite3 + + conn = sqlite3.connect(params["database"], check_same_thread=False) + conn.row_factory = sqlite3.Row + adapter = SQLiteAdapter() + elif db_type == "postgresql": + import psycopg + + conn = psycopg.connect( + host=params["host"], + port=int(params["port"]), + dbname=params["database"], + user=params["username"], + password=params["password"], + autocommit=True, + ) + adapter = PostgreSQLAdapter() + elif db_type == "mysql": + import pymysql + + conn = pymysql.connect( + host=params["host"], + port=int(params["port"]), + database=params["database"], + user=params["username"], + password=params["password"], + ) + adapter = MySQLAdapter() + else: + err_console.print(f"[red]✗[/red] Unsupported db_type: {db_type}") + raise SystemExit(1) + + return SchemaInspector(adapter, conn), active, conn + except SystemExit: + if conn is not None: + conn.close() + raise + except Exception: + if conn is not None: + conn.close() + raise + + +def _parse_table_arg(table_arg: str, default_schema: str = "main") -> tuple[str, str]: + """Parse 'schema.table' or 'table' into (schema, table).""" + if "." in table_arg: + parts = table_arg.split(".", 1) + return parts[0], parts[1] + return default_schema, table_arg + + +@click.command(name="schemas") +def schemas() -> None: + """List all schemas in the active database.""" + conn = None + try: + inspector, active, conn = _get_inspector() + except SystemExit: + raise + + try: + schema_list = inspector.get_schemas() + t = Table(title=f"Schemas in '{active}'") + t.add_column("Schema Name", style="bold cyan") + for s in schema_list: + t.add_row(s) + console.print(t) + except Exception as exc: + err_console.print(f"[red]✗[/red] Failed to list schemas: {exc}") + raise SystemExit(1) from exc + finally: + if conn is not None: + conn.close() + + +@click.command(name="tables") +@click.argument("schema", default="main") +def tables(schema: str) -> None: + """List all tables in the given schema of the active database.""" + conn = None + try: + inspector, active, conn = _get_inspector() + except SystemExit: + raise + + try: + table_list = inspector.get_tables(schema) + t = Table(title=f"Tables in '{active}'.'{schema}'") + t.add_column("Table Name", style="bold cyan") + for tbl in table_list: + t.add_row(tbl) + console.print(t) + except SystemExit: + raise + except Exception as exc: + err_console.print(f"[red]✗[/red] Schema not found or error: {exc}") + raise SystemExit(1) from exc + finally: + if conn is not None: + conn.close() + + +@click.command(name="describe") +@click.argument("table") +def describe(table: str) -> None: + """Show column definitions for TABLE (schema.table or table).""" + conn = None + try: + inspector, _, conn = _get_inspector() + except SystemExit: + raise + + try: + schema_name, table_name = _parse_table_arg(table) + cols = inspector.get_columns(schema_name, table_name) + t = Table(title=f"Columns: {table}") + t.add_column("Name", style="bold cyan") + t.add_column("Type") + t.add_column("Nullable") + t.add_column("PK") + t.add_column("Default") + t.add_column("Comment") + for col in cols: + t.add_row( + col.name, + col.data_type, + "YES" if col.is_nullable else "NO", + "✓" if col.is_primary_key else "", + col.default_value or "", + col.comment or "", + ) + console.print(t) + except Exception as exc: + err_console.print(f"[red]✗[/red] Failed to describe '{table}': {exc}") + raise SystemExit(1) from exc + finally: + if conn is not None: + conn.close() + + +@click.command(name="sample") +@click.argument("table") +@click.option("--n", default=5, type=int, help="Number of rows to sample") +def sample(table: str, n: int) -> None: + """Show sample rows from TABLE (schema.table or table).""" + from open_data_agent.config import get_config + + config = get_config() + n = min(n, config.max_row_limit) + + conn = None + try: + inspector, _, conn = _get_inspector() + except SystemExit: + raise + + try: + schema_name, table_name = _parse_table_arg(table) + result = inspector.get_sample(schema_name, table_name, n=n) + t = Table(title=f"Sample: {table} (up to {n} rows)") + for col in result.columns: + t.add_column(col) + for row in result.rows: + t.add_row(*[str(v) if v is not None else "NULL" for v in row]) + console.print(t) + except Exception as exc: + err_console.print(f"[red]✗[/red] Failed to sample '{table}': {exc}") + raise SystemExit(1) from exc + finally: + if conn is not None: + conn.close() + + +@click.command(name="profile") +@click.argument("table") +def profile(table: str) -> None: + """Show column statistics for TABLE (schema.table or table).""" + conn = None + try: + inspector, _, conn = _get_inspector() + except SystemExit: + raise + + try: + schema_name, table_name = _parse_table_arg(table) + from open_data_agent.config import get_config + + prof = inspector.get_profile( + schema_name, table_name, timeout_seconds=get_config().query_timeout_seconds + ) + t = Table(title=f"Profile: {table}") + t.add_column("Column", style="bold cyan") + t.add_column("Null Count") + t.add_column("Distinct") + t.add_column("Min") + t.add_column("Max") + t.add_column("Sample Values") + for col_name, stats in prof.items(): + samples = ", ".join(str(v) for v in (stats.get("sample_values") or [])[:3]) + t.add_row( + col_name, + str(stats.get("null_count", "")), + str(stats.get("distinct_count", "")), + str(stats.get("min", "")), + str(stats.get("max", "")), + samples, + ) + console.print(t) + except Exception as exc: + err_console.print(f"[red]✗[/red] Failed to profile '{table}': {exc}") + raise SystemExit(1) from exc + finally: + if conn is not None: + conn.close() diff --git a/src/open_data_agent/config.py b/src/open_data_agent/config.py new file mode 100644 index 0000000..9bfd03a --- /dev/null +++ b/src/open_data_agent/config.py @@ -0,0 +1,86 @@ +"""Configuration loading and path resolution (defaults → global yaml → connection options → CLI).""" + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from pathlib import Path +from typing import Any + +import yaml + +from open_data_agent.exceptions import ConfigError + +__all__ = ["Config", "get_config", "get_config_dir"] + +logger = logging.getLogger("open_data_agent.config") + +_CONFIG_DIR_NAME = "open-data-agent" +_CONFIG_FILE_NAME = "config.yaml" + + +@dataclass +class Config: + """Resolved configuration object. + + Field precedence: CLI flags > connection options > global config.yaml > defaults. + """ + + row_limit: int = 1000 + max_row_limit: int = 10000 + query_timeout_seconds: int = 30 + docs_staleness_days: int = 7 + log_level: str = "INFO" + strict_mode: bool = False + + +def get_config_dir() -> Path: + """Return the config directory path (~/.config/open-data-agent/).""" + return Path.home() / ".config" / _CONFIG_DIR_NAME + + +def _load_yaml_file(path: Path) -> dict[str, Any]: + """Load a YAML file, returning an empty dict if the file does not exist.""" + if not path.exists(): + logger.debug("Config file not found at %s — using defaults", path) + return {} + try: + with open(path) as f: + data = yaml.safe_load(f) or {} + if not isinstance(data, dict): + raise ConfigError(f"Expected a YAML mapping in {path}, got {type(data).__name__}") + return data + except yaml.YAMLError as exc: + raise ConfigError(f"Failed to parse YAML config at {path}: {exc}") from exc + + +def get_config( + connection_options: dict[str, Any] | None = None, + **cli_overrides: Any, +) -> Config: + """Build and return a resolved Config, applying connection options and CLI overrides.""" + resolved: dict[str, Any] = { + "row_limit": 1000, + "max_row_limit": 10000, + "query_timeout_seconds": 30, + "docs_staleness_days": 7, + "log_level": "INFO", + "strict_mode": False, + } + + global_config_path = get_config_dir() / _CONFIG_FILE_NAME + global_yaml = _load_yaml_file(global_config_path) + for key in resolved: + if key in global_yaml: + resolved[key] = global_yaml[key] + + if connection_options: + for key in resolved: + if key in connection_options: + resolved[key] = connection_options[key] + + for key, value in cli_overrides.items(): + if value is not None and key in resolved: + resolved[key] = value + + return Config(**resolved) diff --git a/src/open_data_agent/db/__init__.py b/src/open_data_agent/db/__init__.py new file mode 100644 index 0000000..d854141 --- /dev/null +++ b/src/open_data_agent/db/__init__.py @@ -0,0 +1,5 @@ +"""Database layer — dialect adapters, connection management, query engine, schema inspection.""" + +from __future__ import annotations + +__all__: list[str] = [] diff --git a/src/open_data_agent/db/connection.py b/src/open_data_agent/db/connection.py new file mode 100644 index 0000000..4f2948f --- /dev/null +++ b/src/open_data_agent/db/connection.py @@ -0,0 +1,175 @@ +"""ConnectionManager — named DB connections stored in ~/.config/open-data-agent/connections.yaml. + +Passwords are stored in the OS keychain via the `keyring` library (macOS Keychain, GNOME Keyring, +Windows Credential Manager). On headless environments where no keyring backend is available, +passwords fall back to plaintext in connections.yaml with a warning. +""" + +from __future__ import annotations + +import contextlib +import logging +import os +import re +from pathlib import Path +from typing import Any + +import keyring +import yaml + +from open_data_agent.config import get_config_dir +from open_data_agent.exceptions import ConfigError + +__all__ = ["ConnectionManager", "sanitise_dsn"] + +logger = logging.getLogger("open_data_agent.db.connection") + +_KEYRING_SERVICE = "open-data-agent" + +_CONNECTIONS_FILE = "connections.yaml" +_ACTIVE_CONNECTION_FILE = "active-connection" + +_REQUIRED_FIELDS = frozenset({"db_type", "host", "port", "database", "username", "password"}) +_VALID_DB_TYPES = frozenset({"postgresql", "mysql", "sqlite"}) + + +def sanitise_dsn(dsn: str) -> str: + """Mask password in a DSN string before logging.""" + dsn = re.sub(r"(:)[^:@]+(@)", r"\1***\2", dsn) + dsn = re.sub(r"(password\s*=\s*)\S+", r"\1***", dsn, flags=re.IGNORECASE) + return dsn + + +class ConnectionManager: + """Manages named database connections with secure storage.""" + + def __init__(self, config_dir: os.PathLike[str] | None = None) -> None: + self._config_dir = get_config_dir() if config_dir is None else Path(config_dir) + self._connections_path = self._config_dir / _CONNECTIONS_FILE + self._active_connection_path = self._config_dir / _ACTIVE_CONNECTION_FILE + + def _load_connections(self) -> dict[str, dict[str, Any]]: + """Load connections.yaml; returns {} if missing.""" + if not self._connections_path.exists(): + return {} + try: + with open(self._connections_path) as f: + data = yaml.safe_load(f) or {} + if not isinstance(data, dict): + raise ConfigError("Invalid connections.yaml: expected a mapping") + return data + except yaml.YAMLError as exc: + raise ConfigError(f"Failed to parse connections.yaml: {exc}") from exc + + def _save_connections(self, connections: dict[str, dict[str, Any]]) -> None: + """Write connections.yaml with 0o600 permissions (owner read/write only).""" + self._config_dir.mkdir(parents=True, exist_ok=True) + path = str(self._connections_path) + fd = os.open(path, os.O_CREAT | os.O_WRONLY | os.O_TRUNC, 0o600) + with os.fdopen(fd, "w") as f: + yaml.dump(connections, f, default_flow_style=False) + + def _keyring_set(self, name: str, password: str) -> bool: + """Store password in OS keychain. Returns True on success, False on failure.""" + try: + keyring.set_password(_KEYRING_SERVICE, name, password) + return True + except Exception as exc: # noqa: BLE001 + logger.warning( + "keyring write failed for '%s' — password will be stored in plaintext: %s", + name, + exc, + ) + return False + + def _keyring_get(self, name: str) -> str | None: + """Retrieve password from OS keychain, or None if unavailable/not found.""" + try: + return keyring.get_password(_KEYRING_SERVICE, name) + except Exception: # noqa: BLE001 + return None + + def _keyring_delete(self, name: str) -> None: + """Delete keychain entry; silently no-ops if not found or unavailable.""" + with contextlib.suppress(Exception): + keyring.delete_password(_KEYRING_SERVICE, name) + + def save_connection(self, name: str, params: dict[str, Any]) -> None: + """Save a named connection. Raises ConfigError on missing/invalid fields.""" + missing = _REQUIRED_FIELDS - set(params.keys()) + if missing: + raise ConfigError(f"Missing connection fields: {', '.join(sorted(missing))}") + + if params["db_type"] not in _VALID_DB_TYPES: + raise ConfigError( + f"Invalid db_type '{params['db_type']}'. " + f"Must be one of: {', '.join(sorted(_VALID_DB_TYPES))}" + ) + + stored = dict(params) + password = str(stored.pop("password", "")) + + self._keyring_delete(name) + if self._keyring_set(name, password): + stored["_keyring"] = True + else: + stored["password"] = password + + connections = self._load_connections() + connections[name] = stored + self._save_connections(connections) + logger.info("Saved connection '%s' (db_type=%s)", name, params["db_type"]) + + def list_connections(self) -> list[dict[str, Any]]: + """Return all connections without passwords.""" + connections = self._load_connections() + result = [] + for name, params in connections.items(): + entry = {k: v for k, v in params.items() if k != "password"} + entry["name"] = name + result.append(entry) + return result + + def get_connection(self, name: str) -> dict[str, Any]: + """Return full connection dict including password. Raises ConfigError if not found.""" + connections = self._load_connections() + if name not in connections: + raise ConfigError(f"Connection '{name}' not found in connections.yaml") + entry = dict(connections[name]) + if entry.pop("_keyring", False): + password = self._keyring_get(name) + if password is None: + raise ConfigError( + f"Password for '{name}' not found in keychain. " + "Re-add the connection with 'oda connections add'." + ) + entry["password"] = password + return entry + + def remove_connection(self, name: str) -> None: + """Remove a named connection and its keychain entry. Raises ConfigError if not found.""" + connections = self._load_connections() + if name not in connections: + raise ConfigError(f"Connection '{name}' not found") + del connections[name] + self._save_connections(connections) + self._keyring_delete(name) + logger.info("Removed connection '%s'", name) + + def set_active_connection(self, name: str) -> None: + """Activate a connection by name. Raises ConfigError if not found.""" + self.get_connection(name) + self._config_dir.mkdir(parents=True, exist_ok=True) + self._active_connection_path.write_text(name) + logger.info("Active connection set to '%s'", name) + + def get_active_connection(self) -> str | None: + """Return the active connection name, or None.""" + if not self._active_connection_path.exists(): + return None + name = self._active_connection_path.read_text().strip() + return name if name else None + + def connection_exists(self, name: str) -> bool: + """Return True if *name* exists in connections.yaml.""" + return name in self._load_connections() diff --git a/src/open_data_agent/db/diagnostics.py b/src/open_data_agent/db/diagnostics.py new file mode 100644 index 0000000..5c9e7b9 --- /dev/null +++ b/src/open_data_agent/db/diagnostics.py @@ -0,0 +1,132 @@ +"""DiagnosticEngine — structured diagnostic output for zero-row or error query results.""" + +from __future__ import annotations + +import logging +import re +from typing import TYPE_CHECKING, Any + +import click + +if TYPE_CHECKING: + from open_data_agent.db.dialect import DialectAdapter + from open_data_agent.db.query import QueryResult + +__all__ = ["DiagnosticEngine"] + +# Each (col, table) pair fires 2 live queries; cap combinations to bound diagnostic overhead. +_MAX_DIAG_COLUMNS = 4 +_MAX_DIAG_TABLES = 2 +_MAX_DIAG_SAMPLE_VALUES = 5 # DISTINCT sample values fetched per (col, table) pair + +logger = logging.getLogger("open_data_agent.db.diagnostics") + +_TABLE_PATTERN = re.compile( + r"\bFROM\s+([\w.\"` ]+?)(?:\s+(?:WHERE|JOIN|LEFT|RIGHT|INNER|OUTER|GROUP|ORDER|LIMIT|$))", + re.IGNORECASE, +) +_JOIN_PATTERN = re.compile( + r"\bJOIN\s+([\w.\"` ]+?)(?:\s+(?:ON|WHERE|LEFT|RIGHT|INNER|OUTER|GROUP|ORDER|LIMIT|$))", + re.IGNORECASE, +) + + +def _extract_table_names(sql: str) -> list[str]: + """Extract referenced table names from SQL.""" + names: list[str] = [] + for match in _TABLE_PATTERN.finditer(sql): + raw = match.group(1).strip().strip('"').strip("`").strip("'") + if raw: + names.append(raw.split(".")[-1]) + for match in _JOIN_PATTERN.finditer(sql): + raw = match.group(1).strip().strip('"').strip("`").strip("'") + if raw: + names.append(raw.split(".")[-1]) + return list(dict.fromkeys(names)) # deduplicate, preserve order + + +def _extract_filter_columns(sql: str) -> list[str]: + """Extract column names from WHERE clause (simple heuristic).""" + where_match = re.search( + r"\bWHERE\b(.+?)(?:\bGROUP\b|\bORDER\b|\bLIMIT\b|$)", sql, re.IGNORECASE | re.DOTALL + ) + if not where_match: + return [] + where_clause = where_match.group(1) + col_matches = re.findall( + r"\b([\w]+)\s*(?:=|!=|<>|>|<|>=|<=|LIKE|IN|IS)", where_clause, re.IGNORECASE + ) + keywords = {"AND", "OR", "NOT", "NULL", "TRUE", "FALSE", "SELECT", "WHERE", "FROM"} + return [c for c in col_matches if c.upper() not in keywords] + + +class DiagnosticEngine: + """Generates self-healing diagnostic output for zero-row or error query results.""" + + def __init__(self, adapter: DialectAdapter, conn: Any) -> None: + self._adapter = adapter + self._conn = conn + + def diagnose(self, sql: str, result: QueryResult) -> str: + """Generate diagnostic text for a zero-row or error result; also echoes to stderr.""" + lines: list[str] = ["", "[diagnostic] " + "─" * 50] + + if result.error: + lines.append(f"[diagnostic] ✗ Query error: {result.error}") + lines.append("[diagnostic] Hints:") + lines.append("[diagnostic] • Check column names and quoting style") + lines.append("[diagnostic] • Verify table exists in active schema") + lines.append("[diagnostic] " + "─" * 50) + output = "\n".join(lines) + click.echo(output, err=True) + return output + + if result.row_count == 0: + lines.append("[diagnostic] ⚠ Zero rows returned.") + lines.append("[diagnostic] Diagnostic context:") + + table_names = _extract_table_names(sql) + for table in table_names: + try: + quoted = self._adapter.quote_identifier(table) + cursor = self._conn.execute(f"SELECT COUNT(*) FROM {quoted}") + count = cursor.fetchone()[0] + lines.append(f"[diagnostic] Table '{table}' has {count:,} total rows.") + except Exception: + lines.append(f"[diagnostic] Table '{table}': could not count rows.") + + filter_cols = _extract_filter_columns(sql) + for col in filter_cols[:_MAX_DIAG_COLUMNS]: + for table in table_names[:_MAX_DIAG_TABLES]: + try: + quoted_table = self._adapter.quote_identifier(table) + quoted_col = self._adapter.quote_identifier(col) + cursor = self._conn.execute( + f"SELECT DISTINCT {quoted_col} FROM {quoted_table} " + f"WHERE {quoted_col} IS NOT NULL LIMIT {_MAX_DIAG_SAMPLE_VALUES}" + ) + sample_vals = [str(r[0]) for r in cursor.fetchall()] + cursor2 = self._conn.execute( + f"SELECT COUNT(*) FROM {quoted_table} WHERE {quoted_col} IS NULL" + ) + null_count = cursor2.fetchone()[0] + if sample_vals: + lines.append( + f"[diagnostic] Column '{col}' sample values: {sample_vals}" + ) + if null_count: + lines.append(f"[diagnostic] Column '{col}' NULL count: {null_count}") + except Exception: + pass + + lines.append("[diagnostic] Suggested checks:") + lines.append("[diagnostic] • Date filter may be too restrictive") + lines.append("[diagnostic] • Check column value casing (case-sensitive DB)") + lines.append("[diagnostic] • Verify JOIN conditions are not eliminating rows") + lines.append("[diagnostic] • Try a broader query first, then narrow down") + lines.append("[diagnostic] " + "─" * 50) + + output = "\n".join(lines) + if lines and result.row_count == 0: + click.echo(output, err=True) + return output diff --git a/src/open_data_agent/db/dialect.py b/src/open_data_agent/db/dialect.py new file mode 100644 index 0000000..c16c305 --- /dev/null +++ b/src/open_data_agent/db/dialect.py @@ -0,0 +1,393 @@ +"""DialectAdapter ABC and SQLite / PostgreSQL / MySQL implementations.""" + +from __future__ import annotations + +import sqlite3 +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any + +from open_data_agent.db.schema import NormalizedColumn +from open_data_agent.exceptions import DialectError + +if TYPE_CHECKING: + pass + +__all__ = [ + "DialectAdapter", + "SQLiteAdapter", + "PostgreSQLAdapter", + "MySQLAdapter", +] + + +class DialectAdapter(ABC): + """ABC for all DB adapters. All dialect-specific SQL must live in subclasses.""" + + @abstractmethod + def quote_identifier(self, name: str) -> str: + """Wrap an identifier in the dialect's quote chars.""" + + @abstractmethod + def date_trunc(self, field: str, column: str) -> str: + """SQL fragment truncating *column* to *field* granularity (year/month/day/…).""" + + @abstractmethod + def current_timestamp(self) -> str: + """SQL expression for the current timestamp.""" + + @abstractmethod + def interval(self, n: int, unit: str) -> str: + """SQL interval expression for *n* *unit*s.""" + + @property + @abstractmethod + def safe_prefixes(self) -> list[str]: + """Allowed read-only statement prefixes.""" + + @property + @abstractmethod + def dangerous_patterns(self) -> list[str]: + """Regex patterns for dialect-specific dangerous constructs.""" + + @abstractmethod + def introspect_schemas_sql(self) -> str: + """SQL that lists schema names.""" + + @abstractmethod + def introspect_tables_sql(self, schema: str) -> str: + """SQL that lists table names in *schema*.""" + + @abstractmethod + def introspect_columns_sql(self, schema: str, table: str) -> str: + """SQL that describes columns in *schema*.*table*.""" + + @abstractmethod + def normalize_column_row(self, row: dict[str, Any]) -> NormalizedColumn: + """Map a dialect-specific introspection row to NormalizedColumn.""" + + @property + def supports_server_timeout(self) -> bool: + """True if the dialect supports a DB-level statement timeout (no thread needed).""" + return False + + def set_statement_timeout(self, conn: Any, timeout_seconds: int) -> None: # noqa: B027 + """Set a DB-level statement timeout on *conn*. No-op for dialects that don't support it.""" + + +# ───────────────────────────────────────────────────────────────────────────── +# SQLiteAdapter +# ───────────────────────────────────────────────────────────────────────────── + + +class SQLiteAdapter(DialectAdapter): + """Adapter for SQLite (stdlib sqlite3).""" + + def quote_identifier(self, name: str) -> str: + escaped = name.replace('"', '""') + return f'"{escaped}"' + + def date_trunc(self, field: str, column: str) -> str: + """strftime-based DATE_TRUNC equivalent.""" + formats: dict[str, str] = { + "year": "%Y-01-01", + "month": "%Y-%m-01", + "week": "%Y-%W-1", # ISO week start (Monday) + "day": "%Y-%m-%d", + "hour": "%Y-%m-%d %H:00:00", + "minute": "%Y-%m-%d %H:%M:00", + } + fmt = formats.get(field.lower()) + if fmt is None: + raise DialectError(f"SQLiteAdapter.date_trunc: unsupported field '{field}'") + return f"strftime('{fmt}', {column})" + + def current_timestamp(self) -> str: + return "datetime('now')" + + def interval(self, n: int, unit: str) -> str: + """SQLite date modifier, e.g. interval(7, 'day') → "'+7 days'".""" + unit_map: dict[str, str] = { + "year": "years", + "month": "months", + "week": "weeks", + "day": "days", + "hour": "hours", + "minute": "minutes", + "second": "seconds", + } + sqlite_unit = unit_map.get(unit.lower()) + if sqlite_unit is None: + raise DialectError(f"SQLiteAdapter.interval: unsupported unit '{unit}'") + return f"'+{n} {sqlite_unit}'" + + @property + def safe_prefixes(self) -> list[str]: + # PRAGMA is the SQLite introspection command; SHOW/DESCRIBE are not supported. + return ["SELECT", "WITH", "EXPLAIN", "PRAGMA"] + + @property + def dangerous_patterns(self) -> list[str]: + return [ + r"ATTACH\s+DATABASE", + r"DETACH\s+(DATABASE\s+)?", + r";\s*DROP", + r";\s*DELETE", + r";\s*UPDATE", + r";\s*INSERT", + r";\s*CREATE", + r";\s*ALTER", + ] + + def introspect_schemas_sql(self) -> str: + # SQLite has no named schemas; expose 'main' as the single schema. + return "SELECT 'main' AS schema_name" + + def introspect_tables_sql(self, schema: str) -> str: + return ( + "SELECT name AS table_name " + "FROM sqlite_master " + "WHERE type='table' AND name NOT LIKE 'sqlite_%' " + "ORDER BY name" + ) + + def introspect_columns_sql(self, schema: str, table: str) -> str: + return f"PRAGMA table_info({self.quote_identifier(table)})" + + def normalize_column_row(self, row: dict[str, Any]) -> NormalizedColumn: + """Map a PRAGMA table_info row (cid, name, type, notnull, dflt_value, pk) to NormalizedColumn.""" + return NormalizedColumn( + name=str(row["name"]), + data_type=str(row["type"]) if row["type"] else "TEXT", + is_nullable=not bool(row["notnull"]), + default_value=str(row["dflt_value"]) if row["dflt_value"] is not None else None, + is_primary_key=bool(row["pk"]), + ordinal_position=int(row["cid"]) + 1, + comment=None, + ) + + def execute_introspect_columns( + self, + conn: sqlite3.Connection, + schema: str, + table: str, + ) -> list[NormalizedColumn]: + """Execute PRAGMA table_info and return normalized columns.""" + cursor = conn.execute(self.introspect_columns_sql(schema, table)) + columns_desc = [d[0] for d in cursor.description] + rows = [dict(zip(columns_desc, row, strict=False)) for row in cursor.fetchall()] + return [self.normalize_column_row(row) for row in rows] + + +# ───────────────────────────────────────────────────────────────────────────── +# PostgreSQLAdapter +# ───────────────────────────────────────────────────────────────────────────── + + +class PostgreSQLAdapter(DialectAdapter): + """Adapter for PostgreSQL (psycopg v3).""" + + def quote_identifier(self, name: str) -> str: + escaped = name.replace('"', '""') + return f'"{escaped}"' + + def date_trunc(self, field: str, column: str) -> str: + return f"DATE_TRUNC('{field.lower()}', {column})" + + def current_timestamp(self) -> str: + return "NOW()" + + def interval(self, n: int, unit: str) -> str: + return f"INTERVAL '{n} {unit.lower()}s'" + + @property + def safe_prefixes(self) -> list[str]: + # TABLE = shorthand for SELECT * FROM. SHOW = config params. DESCRIBE/PRAGMA not supported. + return ["SELECT", "WITH", "EXPLAIN", "SHOW", "TABLE"] + + @property + def dangerous_patterns(self) -> list[str]: + return [ + r";\s*DROP", + r";\s*DELETE", + r";\s*UPDATE", + r";\s*INSERT", + r";\s*CREATE", + r";\s*ALTER", + r";\s*TRUNCATE", + r"COPY\s+\w", + r"CREATE\s+EXTENSION", + r"ALTER\s+SYSTEM", + ] + + @property + def supports_server_timeout(self) -> bool: + return True + + def set_statement_timeout(self, conn: Any, timeout_seconds: int) -> None: + """Set PostgreSQL statement_timeout (milliseconds).""" + conn.execute(f"SET statement_timeout = {timeout_seconds * 1000}") + + def introspect_schemas_sql(self) -> str: + return ( + "SELECT schema_name " + "FROM information_schema.schemata " + "WHERE schema_name NOT IN ('information_schema', 'pg_catalog', 'pg_toast') " + " AND schema_name NOT LIKE 'pg_temp_%' " + " AND schema_name NOT LIKE 'pg_toast_temp_%' " + "ORDER BY schema_name" + ) + + def introspect_tables_sql(self, schema: str) -> str: + # Callers pass (schema,) as bind params. + return ( + "SELECT table_name " + "FROM information_schema.tables " + "WHERE table_schema = %s " + " AND table_type = 'BASE TABLE' " + "ORDER BY table_name" + ) + + def introspect_columns_sql(self, schema: str, table: str) -> str: + # Callers pass (schema, table) as bind params. + return ( + "SELECT " + " c.column_name AS name, " + " c.data_type AS data_type, " + " CASE WHEN c.is_nullable = 'YES' THEN true ELSE false END AS is_nullable, " + " c.column_default AS default_value, " + " CASE WHEN kcu.column_name IS NOT NULL THEN true ELSE false END AS is_primary_key, " + " c.ordinal_position, " + " pgd.description AS comment " + "FROM information_schema.columns c " + "LEFT JOIN information_schema.table_constraints tc " + " ON tc.table_schema = c.table_schema " + " AND tc.table_name = c.table_name " + " AND tc.constraint_type = 'PRIMARY KEY' " + "LEFT JOIN information_schema.key_column_usage kcu " + " ON kcu.constraint_name = tc.constraint_name " + " AND kcu.column_name = c.column_name " + "LEFT JOIN pg_catalog.pg_statio_all_tables st " + " ON st.schemaname = c.table_schema AND st.relname = c.table_name " + "LEFT JOIN pg_catalog.pg_description pgd " + " ON pgd.objoid = st.relid AND pgd.objsubid = c.ordinal_position " + "WHERE c.table_schema = %s AND c.table_name = %s " + "ORDER BY c.ordinal_position" + ) + + def normalize_column_row(self, row: dict[str, Any]) -> NormalizedColumn: + return NormalizedColumn( + name=str(row["name"]), + data_type=str(row["data_type"]), + is_nullable=bool(row["is_nullable"]), + default_value=str(row["default_value"]) if row["default_value"] is not None else None, + is_primary_key=bool(row["is_primary_key"]), + ordinal_position=int(row["ordinal_position"]), + comment=str(row["comment"]) if row.get("comment") is not None else None, + ) + + +# ───────────────────────────────────────────────────────────────────────────── +# MySQLAdapter +# ───────────────────────────────────────────────────────────────────────────── + + +class MySQLAdapter(DialectAdapter): + """Adapter for MySQL (pymysql).""" + + def quote_identifier(self, name: str) -> str: + escaped = name.replace("`", "``") + return f"`{escaped}`" + + def date_trunc(self, field: str, column: str) -> str: + """DATE_FORMAT-based DATE_TRUNC equivalent.""" + formats: dict[str, str] = { + "year": "%Y-01-01", + "month": "%Y-%m-01", + "day": "%Y-%m-%d", + "hour": "%Y-%m-%d %H:00:00", + "minute": "%Y-%m-%d %H:%i:00", + } + fmt = formats.get(field.lower()) + if fmt is None: + raise DialectError(f"MySQLAdapter.date_trunc: unsupported field '{field}'") + return f"DATE_FORMAT({column}, '{fmt}')" + + def current_timestamp(self) -> str: + return "NOW()" + + def interval(self, n: int, unit: str) -> str: + return f"INTERVAL {n} {unit.upper()}" + + @property + def safe_prefixes(self) -> list[str]: + # DESCRIBE = SHOW COLUMNS shorthand. PRAGMA/TABLE not supported. + return ["SELECT", "WITH", "EXPLAIN", "SHOW", "DESCRIBE"] + + @property + def dangerous_patterns(self) -> list[str]: + return [ + r";\s*DROP", + r";\s*DELETE", + r";\s*UPDATE", + r";\s*INSERT", + r";\s*CREATE", + r";\s*ALTER", + r";\s*TRUNCATE", + r"LOAD\s+DATA", + r"INTO\s+OUTFILE", + r"\bGRANT\b", + ] + + @property + def supports_server_timeout(self) -> bool: + return True + + def set_statement_timeout(self, conn: Any, timeout_seconds: int) -> None: + """Set MySQL MAX_EXECUTION_TIME (ms). NOTE: applies to SELECT only; 0 disables.""" + conn.execute(f"SET SESSION MAX_EXECUTION_TIME = {timeout_seconds * 1000}") + + def introspect_schemas_sql(self) -> str: + return ( + "SELECT schema_name " + "FROM information_schema.schemata " + "WHERE schema_name NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys') " + "ORDER BY schema_name" + ) + + def introspect_tables_sql(self, schema: str) -> str: + # Callers pass (schema,) as bind params. + return ( + "SELECT table_name " + "FROM information_schema.tables " + "WHERE table_schema = %s " + " AND table_type = 'BASE TABLE' " + "ORDER BY table_name" + ) + + def introspect_columns_sql(self, schema: str, table: str) -> str: + # Callers pass (schema, table) as bind params. + return ( + "SELECT " + " column_name AS name, " + " data_type, " + " CASE WHEN is_nullable = 'YES' THEN 1 ELSE 0 END AS is_nullable, " + " column_default AS default_value, " + " CASE WHEN column_key = 'PRI' THEN 1 ELSE 0 END AS is_primary_key, " + " ordinal_position, " + " column_comment AS comment " + "FROM information_schema.columns " + "WHERE table_schema = %s AND table_name = %s " + "ORDER BY ordinal_position" + ) + + def normalize_column_row(self, row: dict[str, Any]) -> NormalizedColumn: + return NormalizedColumn( + name=str(row["name"]), + data_type=str(row["data_type"]), + is_nullable=bool(row["is_nullable"]), + default_value=str(row["default_value"]) if row["default_value"] is not None else None, + is_primary_key=bool(row["is_primary_key"]), + ordinal_position=int(row["ordinal_position"]), + # COLUMN_COMMENT is "" when unset; use `not in` to preserve comments like "0". + comment=str(row["comment"]) if row.get("comment") not in (None, "") else None, + ) diff --git a/src/open_data_agent/db/query.py b/src/open_data_agent/db/query.py new file mode 100644 index 0000000..bfd07a6 --- /dev/null +++ b/src/open_data_agent/db/query.py @@ -0,0 +1,229 @@ +"""QueryEngine — safety check, LIMIT injection, timeout-protected execution, history logging.""" + +from __future__ import annotations + +import contextlib +import logging +import re +import threading +import time +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Any + +from open_data_agent.config import Config, get_config +from open_data_agent.db.safety import SafetyChecker + +if TYPE_CHECKING: + from open_data_agent.db.dialect import DialectAdapter + +__all__ = ["QueryResult", "QueryEngine"] + +logger = logging.getLogger("open_data_agent.db.query") + +# Applied to stripped SQL so LIMIT inside literals/comments is not falsely detected. +_LIMIT_PATTERN = re.compile(r"\bLIMIT\s+(\d+)\b", re.IGNORECASE) + +# Length-preserving replacements keep match positions identical to the original SQL. +_SINGLE_QUOTE_LITERAL = re.compile(r"'(?:[^']|'')*'", re.DOTALL) +_LINE_COMMENT = re.compile(r"--[^\n]*") +_BLOCK_COMMENT = re.compile(r"/\*.*?\*/", re.DOTALL) +# Handles both $$...$$ and $tag$...$tag$ via backreference \1. +_DOLLAR_QUOTE = re.compile(r"\$([^$]*)\$.*?\$\1\$", re.DOTALL) + + +def _strip_literals_and_comments(sql: str) -> str: + """Blank out literals and comments with same-length spaces (preserves match positions). + + Order: block comments → dollar-quoted strings → single-quoted strings → line comments. + """ + stripped = _BLOCK_COMMENT.sub(lambda m: " " * len(m.group(0)), sql) + stripped = _DOLLAR_QUOTE.sub(lambda m: " " * len(m.group(0)), stripped) + stripped = _SINGLE_QUOTE_LITERAL.sub(lambda m: " " * len(m.group(0)), stripped) + stripped = _LINE_COMMENT.sub(lambda m: " " * len(m.group(0)), stripped) + return stripped + + +@dataclass +class QueryResult: + """Result of a QueryEngine.execute() call.""" + + columns: list[str] + rows: list[tuple[object, ...]] + row_count: int + duration_ms: float + sql: str + truncated: bool + limit_applied: int + error: str | None = None + _extra: dict[str, object] = field(default_factory=dict, repr=False) + + +class QueryEngine: + """Executes SQL with safety check → LIMIT injection → timeout-protected execution.""" + + def __init__( + self, + adapter: DialectAdapter, + conn: Any, + config: Config | None = None, + history_tracker: Any | None = None, + connection_name: str = "", + db_type: str = "", + ) -> None: + self._adapter = adapter + self._conn = conn + self._config = config or get_config() + self._safety = SafetyChecker(adapter) + self._history = history_tracker + self._connection_name = connection_name + self._db_type = db_type + + def execute(self, sql: str, question: str | None = None) -> QueryResult: + """Execute SQL safely. Raises SafetyError if blocked. Timeout surfaces as result.error.""" + self._safety.validate(sql) + sql_with_limit, limit_applied, truncated = self._inject_limit(sql) + + timeout_seconds = self._config.query_timeout_seconds + t0 = time.monotonic() + error: str | None = None + columns: list[str] = [] + rows: list[tuple[object, ...]] = [] + + if self._adapter.supports_server_timeout: + columns, rows, error = self._execute_with_server_timeout( + sql_with_limit, timeout_seconds + ) + else: + columns, rows, error = self._execute_with_thread_timeout( + sql_with_limit, timeout_seconds + ) + + duration_ms = (time.monotonic() - t0) * 1000 + + result = QueryResult( + columns=columns, + rows=rows, + row_count=len(rows), + duration_ms=duration_ms, + sql=sql_with_limit, + truncated=truncated, + limit_applied=limit_applied, + error=error, + ) + + if self._history is not None: + try: + self._history.append_result( + result, + question=question, + connection=self._connection_name, + db_type=self._db_type, + ) + except Exception as exc: + logger.warning("Failed to log to history: %s", exc) + + return result + + def _execute_with_server_timeout( + self, sql: str, timeout_seconds: int + ) -> tuple[list[str], list[tuple[object, ...]], str | None]: + """Execute using a DB-level statement timeout (PostgreSQL, MySQL).""" + try: + self._adapter.set_statement_timeout(self._conn, timeout_seconds) + except Exception as exc: # noqa: BLE001 + logger.warning( + "Failed to set DB-level statement timeout (%ss): %s — " + "query will proceed without server-side timeout", + timeout_seconds, + exc, + ) + + try: + cursor = self._conn.execute(sql) + cols = [d[0] for d in cursor.description] if cursor.description else [] + fetched = [tuple(r) for r in cursor.fetchall()] + return cols, fetched, None + except Exception as exc: # noqa: BLE001 + if self._is_timeout_exception(exc): + logger.error("Query timed out: %s", sql[:200]) + return [], [], f"Query timed out after {timeout_seconds}s" + logger.error("Query execution failed: %s", exc) + return [], [], str(exc) + finally: + with contextlib.suppress(Exception): + self._adapter.set_statement_timeout(self._conn, 0) + + @staticmethod + def _is_timeout_exception(exc: Exception) -> bool: + """True if *exc* is a DB-level statement timeout (pgcode 57014 or MySQL errno 3024/1969).""" + pgcode = getattr(exc, "pgcode", None) or getattr(exc, "sqlstate", None) + if pgcode == "57014": + return True + errno = getattr(exc, "args", (None,))[0] if exc.args else None + return errno in (3024, 1969) + + def _execute_with_thread_timeout( + self, sql: str, timeout_seconds: int + ) -> tuple[list[str], list[tuple[object, ...]], str | None]: + """Execute in a daemon thread with a join timeout (SQLite fallback).""" + result_holder: dict[str, Any] = {} + exc_holder: list[Exception] = [] + + def _run() -> None: + try: + cursor = self._conn.execute(sql) + result_holder["columns"] = ( + [d[0] for d in cursor.description] if cursor.description else [] + ) + result_holder["rows"] = [tuple(r) for r in cursor.fetchall()] + except Exception as exc: + exc_holder.append(exc) + + thread = threading.Thread(target=_run, daemon=True) + thread.start() + thread.join(timeout=timeout_seconds) + + if thread.is_alive(): + with contextlib.suppress(AttributeError): + self._conn.interrupt() + thread.join(timeout=1.0) + if thread.is_alive(): + logger.warning( + "Query thread did not stop after interrupt — thread may be leaked: %s", + sql[:200], + ) + logger.error("Query timed out: %s", sql[:200]) + return [], [], f"Query timed out after {timeout_seconds}s" + + if exc_holder: + logger.error("Query execution failed: %s", exc_holder[0]) + return [], [], str(exc_holder[0]) + + return ( + result_holder.get("columns", []), + result_holder.get("rows", []), + None, + ) + + def _inject_limit(self, sql: str) -> tuple[str, int, bool]: + """Inject or clamp LIMIT; returns (sql_with_limit, effective_limit, was_truncated).""" + config = self._config + default_limit = config.row_limit + max_limit = config.max_row_limit + + # Search stripped SQL so LIMIT inside literals/comments isn't falsely matched. + stripped = _strip_literals_and_comments(sql) + match = _LIMIT_PATTERN.search(stripped) + if match: + existing = int(match.group(1)) + if existing > max_limit: + # Length-preserving strip keeps match positions identical to original SQL. + start, end = match.start(), match.end() + sql = sql[:start] + f"LIMIT {max_limit}" + sql[end:] + return sql, max_limit, True + return sql, existing, False + else: + # Inject on a new line so it isn't swallowed by a trailing -- comment. + sql = sql.rstrip().rstrip(";") + sql = f"{sql}\nLIMIT {default_limit}" + return sql, default_limit, True diff --git a/src/open_data_agent/db/safety.py b/src/open_data_agent/db/safety.py new file mode 100644 index 0000000..56a9a61 --- /dev/null +++ b/src/open_data_agent/db/safety.py @@ -0,0 +1,76 @@ +"""SafetyChecker — read-only SQL enforcement with dialect-aware dangerous pattern detection.""" + +from __future__ import annotations + +import logging +import re +from typing import TYPE_CHECKING + +from open_data_agent.exceptions import SafetyError + +if TYPE_CHECKING: + from open_data_agent.db.dialect import DialectAdapter + +__all__ = ["SafetyChecker"] + +logger = logging.getLogger("open_data_agent.db.safety") + +_UNIVERSAL_BLOCKED_PREFIXES = frozenset( + { + "INSERT", + "UPDATE", + "DELETE", + "DROP", + "CREATE", + "ALTER", + "TRUNCATE", + "REPLACE", + "MERGE", + "GRANT", + "REVOKE", + "CALL", + "EXEC", + "EXECUTE", + } +) + + +class SafetyChecker: + """Validates SQL for read-only safety: whitelist prefix, blacklist prefix, dialect patterns.""" + + def __init__(self, adapter: DialectAdapter) -> None: + self._adapter = adapter + self._allowed_prefixes = frozenset(p.upper() for p in adapter.safe_prefixes) + self._dangerous_patterns = [ + re.compile(p, re.IGNORECASE | re.DOTALL) for p in adapter.dangerous_patterns + ] + + def validate(self, sql: str) -> None: + """Raise SafetyError if SQL contains any blocked or dangerous operation.""" + if not sql or not sql.strip(): + raise SafetyError("Empty SQL is not allowed") + + normalized = sql.strip() + first_token = normalized.split()[0].upper().rstrip(";") + + if first_token in _UNIVERSAL_BLOCKED_PREFIXES: + raise SafetyError( + f"SQL blocked: '{first_token}' statements are not allowed. " + "Only read-only queries are permitted." + ) + + if first_token not in self._allowed_prefixes: + raise SafetyError( + f"SQL blocked: '{first_token}' is not in the allowed statement list. " + f"Allowed: {', '.join(sorted(self._allowed_prefixes))}" + ) + + for pattern in self._dangerous_patterns: + match = pattern.search(normalized) + if match: + raise SafetyError( + f"SQL blocked: dangerous pattern detected: '{match.group(0).strip()}'. " + "Possible SQL injection attempt." + ) + + logger.debug("SQL passed safety check: %.80s...", normalized) diff --git a/src/open_data_agent/db/schema.py b/src/open_data_agent/db/schema.py new file mode 100644 index 0000000..5655846 --- /dev/null +++ b/src/open_data_agent/db/schema.py @@ -0,0 +1,168 @@ +"""SchemaInspector — uniform DB introspection across SQLite, PostgreSQL, and MySQL.""" + +from __future__ import annotations + +import contextlib +import logging +import threading +import time +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from open_data_agent.db.dialect import DialectAdapter + from open_data_agent.db.query import QueryResult + +__all__ = ["NormalizedColumn", "SchemaInspector"] + +logger = logging.getLogger("open_data_agent.db.schema") + + +@dataclass +class NormalizedColumn: + """Uniform column representation across all DB dialects.""" + + name: str + data_type: str + is_nullable: bool + default_value: str | None + is_primary_key: bool + ordinal_position: int + comment: str | None = None # PG: pg_description, MySQL: COLUMN_COMMENT, SQLite: None + + +class SchemaInspector: + """Uniform schema introspection over any DialectAdapter.""" + + def __init__(self, adapter: DialectAdapter, conn: Any) -> None: + self._adapter = adapter + self._conn = conn + + def get_schemas(self) -> list[str]: + """Return a list of schema names in the connected database.""" + sql = self._adapter.introspect_schemas_sql() + cursor = self._conn.execute(sql) + rows = cursor.fetchall() + return [row[0] for row in rows] + + def get_tables(self, schema: str) -> list[str]: + """Return a list of table names in the given schema.""" + from open_data_agent.db.dialect import SQLiteAdapter + + sql = self._adapter.introspect_tables_sql(schema) + if isinstance(self._adapter, SQLiteAdapter): + # SQLite uses a static query with no schema filter needed + cursor = self._conn.execute(sql) + else: + # PostgreSQL / MySQL — pass schema as a bind parameter to prevent injection + cursor = self._conn.execute(sql, (schema,)) + rows = cursor.fetchall() + return [row[0] for row in rows] + + def get_columns(self, schema: str, table: str) -> list[NormalizedColumn]: + """Return normalized column definitions for the given table.""" + from open_data_agent.db.dialect import SQLiteAdapter + + if isinstance(self._adapter, SQLiteAdapter): + # SQLite uses PRAGMA — special helper on adapter + return self._adapter.execute_introspect_columns(self._conn, schema, table) + + # PostgreSQL / MySQL — pass schema + table as bind parameters to prevent injection + sql = self._adapter.introspect_columns_sql(schema, table) + cursor = self._conn.execute(sql, (schema, table)) + columns_desc = [d[0] for d in cursor.description] + rows = [dict(zip(columns_desc, row, strict=False)) for row in cursor.fetchall()] + return [self._adapter.normalize_column_row(row) for row in rows] + + def get_sample(self, schema: str, table: str, n: int = 5) -> QueryResult: + """Return up to *n* rows. Fetches n+1 to detect truncation without false positives.""" + from open_data_agent.db.query import QueryResult + + if n < 1: + n = 1 + + adapter = self._adapter + quoted = ( + f"{adapter.quote_identifier(schema)}.{adapter.quote_identifier(table)}" + if schema and schema != "main" + else adapter.quote_identifier(table) + ) + sql = f"SELECT * FROM {quoted} LIMIT {n + 1}" + t0 = time.monotonic() + cursor = self._conn.execute(sql) + duration_ms = (time.monotonic() - t0) * 1000 + columns = [d[0] for d in cursor.description] if cursor.description else [] + all_rows = cursor.fetchall() + truncated = len(all_rows) > n + rows = all_rows[:n] + return QueryResult( + columns=columns, + rows=[tuple(r) for r in rows], + row_count=len(rows), + duration_ms=duration_ms, + sql=f"SELECT * FROM {quoted} LIMIT {n}", + truncated=truncated, + limit_applied=n, + ) + + def get_profile( + self, schema: str, table: str, timeout_seconds: int = 30 + ) -> dict[str, dict[str, Any]]: + """Return per-column stats: null_count, distinct_count, min, max, sample_values.""" + columns = self.get_columns(schema, table) + adapter = self._adapter + quoted_table = ( + f"{adapter.quote_identifier(schema)}.{adapter.quote_identifier(table)}" + if schema and schema != "main" + else adapter.quote_identifier(table) + ) + + def _run_with_timeout(sql: str) -> Any: + """Run a stat query with a timeout; returns rows or None on timeout/error.""" + result_holder: dict[str, Any] = {} + exc_holder: list[Exception] = [] + + def _run() -> None: + try: + cur = self._conn.execute(sql) + result_holder["cursor_rows"] = cur.fetchall() + except Exception as e: + exc_holder.append(e) + + t = threading.Thread(target=_run, daemon=True) + t.start() + t.join(timeout=timeout_seconds) + if t.is_alive(): + with contextlib.suppress(AttributeError, Exception): + self._conn.interrupt() + t.join(timeout=1.0) + logger.warning("Profile query timed out after %ds: %s", timeout_seconds, sql[:120]) + return None + if exc_holder: + logger.debug("Profile query failed: %s", exc_holder[0]) + return None + return result_holder.get("cursor_rows") + + profile: dict[str, dict[str, Any]] = {} + for col in columns: + qcol = adapter.quote_identifier(col.name) + stats: dict[str, Any] = {} + + rows = _run_with_timeout(f"SELECT COUNT(*) FROM {quoted_table} WHERE {qcol} IS NULL") + stats["null_count"] = rows[0][0] if rows else None + + rows = _run_with_timeout(f"SELECT COUNT(DISTINCT {qcol}) FROM {quoted_table}") + stats["distinct_count"] = rows[0][0] if rows else None + + rows = _run_with_timeout(f"SELECT MIN({qcol}), MAX({qcol}) FROM {quoted_table}") + stats["min"] = rows[0][0] if rows else None + stats["max"] = rows[0][1] if rows else None + + rows = _run_with_timeout( + f"SELECT DISTINCT {qcol} FROM {quoted_table} WHERE {qcol} IS NOT NULL LIMIT 5" + ) + stats["sample_values"] = [r[0] for r in rows] if rows else [] + + profile[col.name] = stats + + return profile diff --git a/src/open_data_agent/docs_generator.py b/src/open_data_agent/docs_generator.py new file mode 100644 index 0000000..e6b6657 --- /dev/null +++ b/src/open_data_agent/docs_generator.py @@ -0,0 +1,262 @@ +"""DocGenerator — hierarchical markdown data catalog generator.""" + +from __future__ import annotations + +import logging +from datetime import UTC, datetime +from pathlib import Path +from typing import TYPE_CHECKING, Any + +from open_data_agent.exceptions import SafetyError + +if TYPE_CHECKING: + from open_data_agent.db.schema import SchemaInspector + +__all__ = ["DocGenerator", "validate_path_component"] + +logger = logging.getLogger("open_data_agent.docs_generator") + +_STALENESS_MARKER = "generated_at:" + + +def validate_path_component(name: str) -> str: + """Sanitise a DB identifier for filesystem use; raises SafetyError on path-unsafe chars.""" + if not name: + raise SafetyError("Invalid identifier for path use: empty string") + if "/" in name or "\\" in name or "\0" in name or ".." in name: + raise SafetyError(f"Invalid identifier for path use: {name!r}") + return name + + +def _is_stale(doc_path: Path, staleness_days: int) -> bool: + """Return True if the doc is older than staleness_days (reads frontmatter only).""" + if not doc_path.exists(): + return True + + try: + content = doc_path.read_text() + lines = content.splitlines() + + if not lines or lines[0].strip() != "---": + return True # No frontmatter — treat as stale + for line in lines[1:]: + stripped = line.strip() + if stripped == "---": + break + if stripped.startswith(_STALENESS_MARKER): + ts_str = stripped[len(_STALENESS_MARKER) :].strip().strip('"') + ts = datetime.fromisoformat(ts_str) + if ts.tzinfo is None: + ts = ts.replace(tzinfo=UTC) + age_days = (datetime.now(UTC) - ts).days + return age_days > staleness_days + except Exception: + return True + + return True # no generated_at in frontmatter + + +class DocGenerator: + """Generates a hierarchical markdown data catalog from a SchemaInspector.""" + + def __init__( + self, + inspector: SchemaInspector, + db_type: str = "sqlite", + profile_timeout_seconds: int = 30, + ) -> None: + self._inspector = inspector + self._db_type = db_type + self._profile_timeout_seconds = profile_timeout_seconds + + def generate( + self, + output_dir: Path, + skip_existing: bool = False, + enrich: bool = False, + staleness_days: int = 7, + ) -> dict[str, Any]: + """Generate the full data catalog; returns {tables_generated, tables_skipped, tables_total}.""" + output_dir.mkdir(parents=True, exist_ok=True) + schemas = self._inspector.get_schemas() + + stats = {"tables_generated": 0, "tables_skipped": 0, "tables_total": 0} + catalog_entries: list[dict[str, Any]] = [] + + for schema in schemas: + tables = self._inspector.get_tables(schema) + stats["tables_total"] += len(tables) + + try: + schema_dir = output_dir / validate_path_component(schema) + except SafetyError: + logger.warning("Skipping schema '%s': invalid path component", schema) + continue + + schema_dir.mkdir(exist_ok=True) + schema_entries: list[str] = [] + + for table in tables: + try: + table_name = validate_path_component(table) + except SafetyError: + logger.warning("Skipping table '%s': invalid path component", table) + stats["tables_skipped"] += 1 + continue + + table_doc_path = schema_dir / f"{table_name}.md" + + if skip_existing and not _is_stale(table_doc_path, staleness_days): + logger.debug("Skipping '%s.%s' — up to date", schema, table) + stats["tables_skipped"] += 1 + schema_entries.append(table) + continue + + try: + cols = self._inspector.get_columns(schema, table) + sample = self._inspector.get_sample(schema, table, n=5) + profile = ( + self._inspector.get_profile( + schema, table, timeout_seconds=self._profile_timeout_seconds + ) + if enrich + else None + ) + generated_at = datetime.now(UTC).isoformat(timespec="seconds") + + content = self._render_table_doc( + schema, table, cols, sample, profile, generated_at + ) + table_doc_path.write_text(content) + stats["tables_generated"] += 1 + schema_entries.append(table) + logger.info("Generated doc for '%s.%s'", schema, table) + except OSError: + raise # systemic I/O failures (disk full, permission denied) + except Exception as exc: + logger.error("Failed to generate doc for '%s.%s': %s", schema, table, exc) + stats["tables_skipped"] += 1 + + self._write_schema_index(schema_dir, schema, schema_entries) + catalog_entries.append({"schema": schema, "tables": schema_entries}) + + self._write_catalog_index(output_dir, catalog_entries) + return stats + + def _render_table_doc( + self, + schema: str, + table: str, + cols: list[Any], + sample: Any, + profile: dict[str, Any] | None, + generated_at: str, + ) -> str: + """Render a table markdown document.""" + lines = [ + "---", + f"table: {table}", + f"schema: {schema}", + f'full_path: "{schema}.{table}"', + f"column_count: {len(cols)}", + f'generated_at: "{generated_at}"', + f"db_type: {self._db_type}", + "---", + "", + f"# {schema}.{table}", + "", + "## Columns", + "", + "| Name | Type | Nullable | PK | Default | Comment |", + "|------|------|----------|----|---------|---------|", + ] + for col in cols: + lines.append( + f"| {col.name} | {col.data_type} | " + f"{'YES' if col.is_nullable else 'NO'} | " + f"{'✓' if col.is_primary_key else ''} | " + f"{col.default_value or ''} | " + f"{col.comment or ''} |" + ) + + lines.append("") + lines.append("## Sample Rows") + lines.append("") + if sample.columns: + lines.append("| " + " | ".join(sample.columns) + " |") + lines.append("| " + " | ".join(["---"] * len(sample.columns)) + " |") + for row in sample.rows: + lines.append( + "| " + " | ".join(str(v) if v is not None else "NULL" for v in row) + " |" + ) + else: + lines.append("_No rows available_") + + if profile: + lines.append("") + lines.append("## Profile") + lines.append("") + lines.append("| Column | Null Count | Distinct | Min | Max | Sample Values |") + lines.append("|--------|-----------|---------|-----|-----|---------------|") + for col_name, stats in profile.items(): + samples = ", ".join(str(v) for v in (stats.get("sample_values") or [])[:3]) + lines.append( + f"| {col_name} | {stats.get('null_count', '')} | " + f"{stats.get('distinct_count', '')} | " + f"{stats.get('min', '')} | {stats.get('max', '')} | {samples} |" + ) + + return "\n".join(lines) + "\n" + + def _write_schema_index(self, schema_dir: Path, schema: str, tables: list[str]) -> None: + lines = [f"# Schema: {schema}", "", "## Tables", ""] + for t in sorted(tables): + lines.append(f"- [{t}](./{t}.md)") + (schema_dir / "_index.md").write_text("\n".join(lines) + "\n") + + def _write_catalog_index(self, output_dir: Path, catalog_entries: list[dict[str, Any]]) -> None: + generated_at = datetime.now(UTC).isoformat(timespec="seconds") + lines = [ + "---", + f'generated_at: "{generated_at}"', + "---", + "", + "# Data Catalog", + "", + "## Schemas", + "", + ] + for entry in catalog_entries: + schema = entry["schema"] + count = len(entry["tables"]) + lines.append(f"- [{schema}](./{schema}/_index.md) — {count} tables") + (output_dir / "_index.md").write_text("\n".join(lines) + "\n") + + def get_status(self, output_dir: Path, staleness_days: int = 7) -> dict[str, Any]: + """Return {total, up_to_date, stale, missing} counts for docs in output_dir.""" + if not output_dir.exists(): + return {"total": 0, "up_to_date": 0, "stale": 0, "missing": 0} + + schemas = self._inspector.get_schemas() + status = {"total": 0, "up_to_date": 0, "stale": 0, "missing": 0} + + for schema in schemas: + tables = self._inspector.get_tables(schema) + for table in tables: + status["total"] += 1 + try: + schema_name = validate_path_component(schema) + table_name = validate_path_component(table) + except SafetyError: + status["missing"] += 1 + continue + + doc_path = output_dir / schema_name / f"{table_name}.md" + if not doc_path.exists(): + status["missing"] += 1 + elif _is_stale(doc_path, staleness_days): + status["stale"] += 1 + else: + status["up_to_date"] += 1 + + return status diff --git a/src/open_data_agent/eval_runner.py b/src/open_data_agent/eval_runner.py new file mode 100644 index 0000000..9f5a5fb --- /dev/null +++ b/src/open_data_agent/eval_runner.py @@ -0,0 +1,169 @@ +"""EvalRunner — Golden SQL regression framework.""" + +from __future__ import annotations + +import logging +import re +from pathlib import Path +from typing import Any + +import yaml + +from open_data_agent.exceptions import ConfigError + +__all__ = ["EvalRunner", "EvalResult"] + +logger = logging.getLogger("open_data_agent.eval_runner") + +# Domain hyphens vs sequence separator resolved via isdigit() check in next_id_for_domain(). +_ID_PATTERN = re.compile(r"^[a-z][a-z0-9_-]*-\d{3}$") # e.g. sales-001, order-items-001 + +_GOLDEN_QUERIES_HEADER = ( + "# golden_queries.yaml — Golden SQL regression suite\n" + "# Each entry: id, question, sql, dialects, active\n" + "---\n" +) + +EvalResult = dict[str, Any] + + +class EvalRunner: + """Loads, filters, and runs golden SQL queries against a live database.""" + + def load(self, path: Path) -> list[dict[str, Any]]: + """Parse golden_queries.yaml; raises ConfigError if malformed or missing required fields.""" + if not path.exists(): + return [] + try: + raw = yaml.safe_load(path.read_text(encoding="utf-8")) + except yaml.YAMLError as exc: + raise ConfigError(f"Failed to parse {path}: {exc}") from exc + + if raw is None: + return [] + if not isinstance(raw, list): + raise ConfigError(f"{path} must contain a YAML list of entries") + + entries: list[dict[str, Any]] = [] + for i, item in enumerate(raw): + if not isinstance(item, dict): + raise ConfigError(f"Entry {i} in {path} is not a mapping") + for required in ("id", "question", "sql", "dialects"): + if required not in item: + raise ConfigError(f"Entry {i} in {path} missing required field '{required}'") + entry_id = str(item["id"]) + if not _ID_PATTERN.match(entry_id): + raise ConfigError( + f"Entry {i} in {path} has invalid id '{entry_id}'. " + "IDs must match pattern: - (e.g. sales-001, order-items-001)" + ) + item.setdefault("active", True) + entries.append(item) + + return entries + + def filter_for_dialect( + self, entries: list[dict[str, Any]], db_type: str + ) -> list[dict[str, Any]]: + """Return entries where db_type is in dialects list and active is True.""" + return [ + e for e in entries if e.get("active", True) and db_type in (e.get("dialects") or []) + ] + + def run( + self, + entries: list[dict[str, Any]], + adapter: Any, + query_engine: Any, + min_rows: int = 1, + ) -> list[EvalResult]: + """Execute each entry and return EvalResult dicts with pass/fail status.""" + results: list[EvalResult] = [] + for entry in entries: + qid = entry["id"] + question = entry["question"] + sql = entry["sql"] + try: + qr = query_engine.execute(sql, question=question) + passed = qr.error is None and qr.row_count >= min_rows + results.append( + { + "id": qid, + "question": question, + "sql": sql, + "passed": passed, + "row_count": qr.row_count, + "duration_ms": round(qr.duration_ms, 2), + "error": qr.error, + } + ) + except Exception as exc: + logger.error("Eval %s failed with exception: %s", qid, exc) + results.append( + { + "id": qid, + "question": question, + "sql": sql, + "passed": False, + "row_count": 0, + "duration_ms": 0.0, + "error": str(exc), + } + ) + return results + + def next_id_for_domain(self, entries: list[dict[str, Any]], domain: str) -> str: + """Return the next sequential ID for domain, e.g. sales-003 (domain must match exactly).""" + max_seq = 0 + prefix = f"{domain}-" + for e in entries: + eid = str(e.get("id", "")) + if eid.startswith(prefix): + tail = eid[len(prefix) :] + if tail.isdigit(): + max_seq = max(max_seq, int(tail)) + next_seq = max_seq + 1 + if next_seq > 999: + raise ConfigError( + f"Domain '{domain}' has reached the maximum of 999 entries. " + "Use a more specific domain name (e.g. 'sales-2026') to continue." + ) + return f"{domain}-{next_seq:03d}" + + def append_entry( + self, + path: Path, + entry: dict[str, Any], + existing: list[dict[str, Any]], + ) -> None: + """Append a new entry to golden_queries.yaml; raises ConfigError on duplicate or invalid ID.""" + entry_id = str(entry["id"]) + if not _ID_PATTERN.match(entry_id): + raise ConfigError( + f"Invalid id '{entry_id}'. IDs must match pattern: - " + "(e.g. sales-001, order-items-001)" + ) + + existing_ids = {str(e["id"]) for e in existing} + if entry_id in existing_ids: + raise ConfigError(f"ID '{entry_id}' already exists. IDs are permanent.") + + path.parent.mkdir(parents=True, exist_ok=True) + + # Re-read from disk to reduce TOCTOU window before writing. + on_disk = self.load(path) + on_disk_ids = {str(e["id"]) for e in on_disk} + if entry_id in on_disk_ids: + raise ConfigError(f"ID '{entry_id}' already exists. IDs are permanent.") + + all_entries = on_disk + [entry] + try: + path.write_text( + _GOLDEN_QUERIES_HEADER + + yaml.dump( + all_entries, default_flow_style=False, allow_unicode=True, sort_keys=False + ), + encoding="utf-8", + ) + except OSError as exc: + raise ConfigError(f"Failed to write golden queries file '{path}': {exc}") from exc diff --git a/src/open_data_agent/exceptions.py b/src/open_data_agent/exceptions.py new file mode 100644 index 0000000..bafed92 --- /dev/null +++ b/src/open_data_agent/exceptions.py @@ -0,0 +1,41 @@ +"""All custom exceptions and warnings for open-data-agent.""" + +from __future__ import annotations + +__all__ = [ + "OdaError", + "ConnectionError", + "SafetyError", + "ConfigError", + "DialectError", + "EvalFailure", + "StaleDocsWarning", +] + + +class OdaError(Exception): + """Base class for all open-data-agent errors.""" + + +class ConnectionError(OdaError): + """Raised when a database connection cannot be established or is invalid.""" + + +class SafetyError(OdaError): + """Raised when a SQL query is blocked by the safety checker.""" + + +class ConfigError(OdaError): + """Raised when configuration is missing, invalid, or cannot be resolved.""" + + +class DialectError(OdaError): + """Raised when an unsupported or invalid dialect operation is requested.""" + + +class EvalFailure(OdaError): + """Raised when a golden SQL eval query produces unexpected results.""" + + +class StaleDocsWarning(UserWarning): + """Warning emitted when schema documentation is older than the staleness threshold.""" diff --git a/src/open_data_agent/history.py b/src/open_data_agent/history.py new file mode 100644 index 0000000..52bf262 --- /dev/null +++ b/src/open_data_agent/history.py @@ -0,0 +1,101 @@ +"""HistoryTracker — JSONL query log with lazy iteration.""" + +from __future__ import annotations + +import json +import logging +import re +import uuid +from collections.abc import Generator +from datetime import UTC, datetime +from pathlib import Path +from typing import Any + +from open_data_agent.config import get_config_dir + +__all__ = ["HistoryTracker"] + +logger = logging.getLogger("open_data_agent.history") + +_DEFAULT_HISTORY_FILE = "history.jsonl" + +_SECRET_PATTERNS = [ + re.compile(r"password\s*=", re.IGNORECASE), + re.compile(r"secret\s*=", re.IGNORECASE), + re.compile(r"token\s*=", re.IGNORECASE), + re.compile(r"api_key\s*=", re.IGNORECASE), +] + + +def _extract_tables_from_sql(sql: str) -> list[str]: + """Heuristic extraction of table names referenced in SQL.""" + pattern = re.compile( + r"\bFROM\s+([\w.\"` ]+?)(?:\s+(?:WHERE|JOIN|LEFT|RIGHT|INNER|OUTER|GROUP|ORDER|LIMIT|ON|$))", + re.IGNORECASE, + ) + join_pattern = re.compile( + r"\bJOIN\s+([\w.\"` ]+?)(?:\s+(?:ON|WHERE|LEFT|RIGHT|INNER|OUTER|GROUP|ORDER|LIMIT|$))", + re.IGNORECASE, + ) + tables: list[str] = [] + for match in pattern.finditer(sql): + raw = match.group(1).strip().strip('"').strip("`").strip("'") + if raw: + tables.append(raw) + for match in join_pattern.finditer(sql): + raw = match.group(1).strip().strip('"').strip("`").strip("'") + if raw: + tables.append(raw) + return list(dict.fromkeys(tables)) + + +class HistoryTracker: + """Appends query execution records to a JSONL file and provides lazy iteration.""" + + def __init__(self, history_file: Path | None = None) -> None: + self._history_file = history_file or (get_config_dir() / _DEFAULT_HISTORY_FILE) + + def append(self, entry: dict[str, Any]) -> None: + """Append a single JSONL entry to the history file (created if absent).""" + sql = entry.get("sql", "") + for pattern in _SECRET_PATTERNS: + if pattern.search(sql): + logger.warning( + "SQL may contain hardcoded secrets (matched pattern: %s)", pattern.pattern + ) + break + + self._history_file.parent.mkdir(parents=True, exist_ok=True) + with open(self._history_file, "a", encoding="utf-8") as f: + f.write(json.dumps(entry) + "\n") + + def append_result( + self, result: Any, question: str | None = None, connection: str = "", db_type: str = "" + ) -> None: + """Convenience method: create an entry from a QueryResult and append it.""" + entry = { + "id": str(uuid.uuid4()), + "timestamp": datetime.now(UTC).isoformat(timespec="seconds"), + "connection": connection, + "db_type": db_type, + "sql": result.sql, + "tables": _extract_tables_from_sql(result.sql), + "row_count": result.row_count, + "duration_ms": round(result.duration_ms, 2), + "question": question, + "error": result.error, + } + self.append(entry) + + def iter_entries(self) -> Generator[dict[str, Any], None, None]: + """Lazily yield parsed history entries from the JSONL file.""" + if not self._history_file.exists(): + return + with open(self._history_file, encoding="utf-8") as f: + for line in f: + line = line.strip() + if line: + try: + yield json.loads(line) + except json.JSONDecodeError as exc: + logger.warning("Skipping malformed history line: %s", exc) diff --git a/src/open_data_agent/memory.py b/src/open_data_agent/memory.py new file mode 100644 index 0000000..0f0b4fe --- /dev/null +++ b/src/open_data_agent/memory.py @@ -0,0 +1,147 @@ +"""MemoryManager — curated knowledge files with YAML frontmatter.""" + +from __future__ import annotations + +import logging +import re +from datetime import UTC, datetime +from pathlib import Path +from typing import Any + +import yaml + +from open_data_agent.exceptions import ConfigError + +__all__ = ["MemoryManager"] + +logger = logging.getLogger("open_data_agent.memory") + +_VALID_CATEGORIES = frozenset({"query_pattern", "business_context", "correction", "data_quality"}) +_DEFAULT_MEMORY_DIR = Path("memory") + + +def _slugify(text: str) -> str: + """Convert text to a filesystem-safe slug.""" + slug = text.lower() + slug = re.sub(r"[^\w\s-]", "", slug) + slug = re.sub(r"[-\s]+", "-", slug) + return slug.strip("-")[:60] + + +class MemoryManager: + """Manages a flat directory of curated markdown knowledge files.""" + + def __init__(self, memory_dir: Path | None = None) -> None: + self._dir = memory_dir or _DEFAULT_MEMORY_DIR + + def _ensure_dir(self) -> None: + self._dir.mkdir(parents=True, exist_ok=True) + + def add( + self, + title: str, + content: str, + category: str, + tags: list[str] | None = None, + ) -> Path: + """Create a new markdown memory file; raises ConfigError for invalid category.""" + if category not in _VALID_CATEGORIES: + raise ConfigError( + f"Invalid category '{category}'. " + f"Must be one of: {', '.join(sorted(_VALID_CATEGORIES))}" + ) + + self._ensure_dir() + slug = _slugify(title) + target = self._dir / f"{slug}.md" + + if target.exists(): + base = slug + i = 1 + while target.exists(): + target = self._dir / f"{base}-{i}.md" + i += 1 + + frontmatter: dict[str, Any] = { + "title": title, + "category": category, + "created_at": datetime.now(UTC).isoformat(timespec="seconds"), + "tags": tags or [], + } + + fm_str = yaml.dump(frontmatter, default_flow_style=False, allow_unicode=True) + file_content = f"---\n{fm_str}---\n\n{content}\n" + target.write_text(file_content, encoding="utf-8") + logger.info("Created memory file: %s", target) + return target + + def list_entries(self) -> list[dict[str, Any]]: + """Return all memory entries as frontmatter dicts (no body content).""" + if not self._dir.exists(): + return [] + results: list[dict[str, Any]] = [] + for path in sorted(self._dir.glob("*.md")): + entry = self._parse_frontmatter(path) + if entry is not None: + entry["_path"] = str(path) + results.append(entry) + return results + + def search(self, term: str) -> list[dict[str, Any]]: + """Return entries whose title, tags, or body contain term (case-insensitive).""" + term_lower = term.lower() + results: list[dict[str, Any]] = [] + if not self._dir.exists(): + return results + + for path in sorted(self._dir.glob("*.md")): + try: + text = path.read_text(encoding="utf-8") + except OSError as exc: + logger.warning("Could not read memory file %s: %s", path, exc) + continue + fm = self._parse_frontmatter_from_text(path, text) + if fm is None: + continue + + if term_lower in str(fm.get("title", "")).lower(): + fm["_path"] = str(path) + results.append(fm) + continue + + tags = fm.get("tags") or [] + if any(term_lower in str(t).lower() for t in tags): + fm["_path"] = str(path) + results.append(fm) + continue + + if term_lower in text.lower(): + fm["_path"] = str(path) + results.append(fm) + + return results + + def _parse_frontmatter_from_text(self, path: Path, text: str) -> dict[str, Any] | None: + """Parse YAML frontmatter from an already-read markdown string.""" + try: + if not text.startswith("---"): + return None + parts = text.split("---", 2) + if len(parts) < 3: + return None + fm = yaml.safe_load(parts[1]) or {} + if not isinstance(fm, dict): + return None + return fm + except Exception as exc: + logger.warning("Failed to parse frontmatter in %s: %s", path, exc) + return None + + def _parse_frontmatter(self, path: Path) -> dict[str, Any] | None: + """Parse YAML frontmatter from a markdown file.""" + try: + text = path.read_text(encoding="utf-8") + except OSError as exc: + logger.warning("Failed to read memory file %s: %s", path, exc) + return None + return self._parse_frontmatter_from_text(path, text) diff --git a/src/open_data_agent/models.py b/src/open_data_agent/models.py new file mode 100644 index 0000000..9fa43a4 --- /dev/null +++ b/src/open_data_agent/models.py @@ -0,0 +1,9 @@ +"""Public re-export of shared dataclasses.""" + +from __future__ import annotations + +from open_data_agent.config import Config +from open_data_agent.db.query import QueryResult +from open_data_agent.db.schema import NormalizedColumn + +__all__ = ["NormalizedColumn", "QueryResult", "Config"] diff --git a/src/open_data_agent/py.typed b/src/open_data_agent/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/src/open_data_agent/templates/data-agent.md.tmpl b/src/open_data_agent/templates/data-agent.md.tmpl new file mode 100644 index 0000000..d94354d --- /dev/null +++ b/src/open_data_agent/templates/data-agent.md.tmpl @@ -0,0 +1,207 @@ +# Data Agent — {{connection_name}} ({{db_type}}) + +> Generated: {{generated_at}} +> Connection: **{{connection_name}}** | DB: **{{db_type}}** @ `{{db_host}}/{{db_name}}` + +--- + +## Block 1 — Connection Context + +You are a data agent connected to a **{{db_type}}** database. + +| Field | Value | +|---|---| +| Connection name | `{{connection_name}}` | +| Database type | `{{db_type}}` | +| Host | `{{db_host}}` | +| Database | `{{db_name}}` | +| Rules generated at | `{{generated_at}}` | + +Your role is to translate natural language questions into accurate, read-only SQL queries +using the `oda` CLI commands documented below. + +--- + +## Block 2 — Schema Exploration Commands + +Use these commands to discover the database structure before writing SQL: + +```bash +# List all schemas (namespaces/databases) +oda schemas + +# List tables in a schema (omit schema for default) +oda tables +oda tables + +# Show column definitions for a table +oda describe +# Example: +oda describe public.customers + +# Preview sample rows (default 5 rows) +oda sample +oda sample --n 10 + +# Column statistics: null count, distinct count, min/max values +oda profile +# Example: +oda profile public.orders +``` + +**Workflow:** Always run `oda describe
` before querying an unfamiliar table. +Run `oda profile
` to understand value distributions and detect data quality issues. + +--- + +## Block 3 — Query Execution Commands + +```bash +# Execute a read-only SQL query (LIMIT auto-injected, default 1000 rows) +oda query "" + +# Examples: +oda query "SELECT * FROM customers WHERE status = 'active'" +oda query "SELECT COUNT(*) FROM orders GROUP BY status" + +# Strict mode: blocks execution if schema docs are stale +oda query "" --strict + +# Query with strict mode example: +oda query "SELECT SUM(amount) FROM payments" --strict +``` + +**Behaviour:** +- `SafetyChecker` validates the SQL before execution — INSERT/UPDATE/DELETE/DROP are blocked +- A `LIMIT` clause is automatically appended if missing (default: 1,000 rows) +- Existing `LIMIT` values exceeding 10,000 are clamped to 10,000 +- Execution times out after 30 seconds +- `--strict` blocks the query if `oda docs status` reports stale docs (older than 7 days) +- Without `--strict`, stale docs emit a warning but the query still executes +- Every executed query is automatically logged to `~/.config/open-data-agent/history.jsonl` + +--- + +## Block 4 — Docs Management Commands + +```bash +# Regenerate the full schema documentation catalog +oda docs generate + +# Skip tables that already have fresh docs +oda docs generate --skip-existing + +# Include column profile statistics (nulls, distinct, min/max) in generated docs +oda docs generate --enrich + +# Check whether docs are up to date (exit 0 = fresh, exit 1 = stale) +oda docs status +``` + +**Schema docs** are written to `docs/data-catalog/`: +- `docs/data-catalog/_index.md` — master catalog listing all schemas and tables +- `docs/data-catalog//_index.md` — schema-level index +- `docs/data-catalog//
.md` — per-table doc with columns, sample rows, profile stats + +**Always read `_index.md` first**, then navigate to specific table docs for column details. +Warn the user if `generated_at` in any doc is older than 7 days. + +--- + +## Block 5 — Memory Commands + +```bash +# Add a curated knowledge entry (interactive) +oda memory add + +# Add non-interactively +oda memory add --title "Revenue column" --category data_quality --content "Use net_item_price not item_price" + +# With tags +oda memory add --title "Status values" --category business_context --content "Active=1, Inactive=0" --tags "status,lookup" + +# List all memory entries +oda memory list + +# Search memory by keyword (matches title, body content, tags — case-insensitive) +oda memory search revenue +oda memory search "net price" +``` + +**Categories:** `query_pattern` | `business_context` | `correction` | `data_quality` + +**Workflow:** +1. Before writing SQL for any column involved in known data quality issues, run `oda memory search ` +2. After discovering a data quirk, run `oda memory add` to record it for future reference +3. Run `oda memory list` to review the full knowledge base + +Memory files are stored in `./memory/` as markdown with YAML frontmatter. + +--- + +## Block 6 — Self-Healing Guidance + +When queries return 0 rows, errors, or unexpected results, follow this step-by-step checklist: + +### Zero-Row Diagnostics + +1. **Read the diagnostic output** — when a query returns 0 rows, `oda query` automatically + prints diagnostic information to stderr: + - Row counts for each table referenced in the SQL + - Up to 3 sample values for each filter column + - NULL counts for filter columns + - Suggested checks (e.g. "Column 'status' values seen: ['active', 'pending'] — your filter used 'closed'") + +2. **Check date filter ranges** against the `column range` shown in diagnostic context. + Use `oda profile
` to see actual min/max dates. + +3. **Run `oda memory search `** for known issues with filter columns. + Example: `oda memory search status` + +4. **Try a broader query first**, then narrow down: + ```bash + oda query "SELECT DISTINCT status FROM orders" # see all values + oda query "SELECT * FROM orders LIMIT 5" # confirm table has rows + ``` + +5. **Check column value casing** — `{{db_type}}` may be case-sensitive. + Example: `'Active'` ≠ `'active'` in some databases. + +6. **Verify JOIN conditions** are not silently eliminating rows: + ```bash + oda query "SELECT COUNT(*) FROM orders" # rows in orders + oda query "SELECT COUNT(*) FROM orders JOIN customers ON ..." # rows after join + ``` + +### Stale Docs Resolution + +If `oda query --strict` is blocked with "Docs are stale": + +```bash +oda docs generate # regenerate full catalog +oda docs generate --enrich # include profile stats +oda docs status # verify docs are now fresh +``` + +### Eval Run Workflow + +Run the golden SQL regression suite to validate agent quality after schema changes: + +```bash +oda eval run # run all golden queries for active dialect +oda eval results # review last run results +oda eval add # add a new known-good query as a regression test +``` + +The eval suite exits with code `1` if any query fails — suitable for CI integration. +Results are written to `evals/results/latest.json`. + +--- + +## Safety Rules + +- **NEVER** execute INSERT, UPDATE, DELETE, DROP, TRUNCATE, ALTER, or any write operation +- **NEVER** expose or log connection credentials +- All queries are automatically limited to `{{db_type}}`-safe read-only operations +- `oda query` enforces a hard row limit (default 1,000 rows; max 10,000) +- `SafetyChecker` blocks dangerous SQL patterns before every execution — there is no bypass diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..09bf6d7 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,97 @@ +"""Shared pytest fixtures for all tests.""" + +from __future__ import annotations + +import sqlite3 +from collections.abc import Generator +from pathlib import Path +from unittest.mock import patch + +import pytest + +_FIXTURES_DIR = Path(__file__).parent / "fixtures" + +# --------------------------------------------------------------------------- +# Keyring: replace OS keychain with an in-memory dict for all unit tests. +# Without this, tests would either require a real keychain or fail on CI. +# --------------------------------------------------------------------------- + + +@pytest.fixture(autouse=True) +def mock_keyring() -> Generator[dict[str, str], None, None]: + """Autouse fixture: redirect keyring to an in-memory store for all tests.""" + store: dict[str, str] = {} + + def _set(name: str, password: str) -> bool: + store[name] = password + return True + + def _get(name: str) -> str | None: + return store.get(name) + + def _delete(name: str) -> None: + store.pop(name, None) + + with ( + patch.object( + __import__( + "open_data_agent.db.connection", fromlist=["ConnectionManager"] + ).ConnectionManager, + "_keyring_set", + lambda self, n, p: _set(n, p), + ), + patch.object( + __import__( + "open_data_agent.db.connection", fromlist=["ConnectionManager"] + ).ConnectionManager, + "_keyring_get", + lambda self, n: _get(n), + ), + patch.object( + __import__( + "open_data_agent.db.connection", fromlist=["ConnectionManager"] + ).ConnectionManager, + "_keyring_delete", + lambda self, n: _delete(n), + ), + ): + yield store + + +@pytest.fixture(scope="session") +def sqlite_db() -> Generator[sqlite3.Connection, None, None]: + """Return a session-scoped in-memory SQLite connection with the standard fixture schema. + + Tables: customers, orders, products (see tests/fixtures/schema.sql) + check_same_thread=False is required because QueryEngine executes queries in a worker thread. + """ + conn = sqlite3.connect(":memory:", check_same_thread=False) + conn.row_factory = sqlite3.Row + + schema_sql = (_FIXTURES_DIR / "schema.sql").read_text() + conn.executescript(schema_sql) + conn.commit() + + yield conn + + conn.close() + + +@pytest.fixture(scope="session") +def sqlite_db_with_data() -> Generator[sqlite3.Connection, None, None]: + """Session-scoped in-memory SQLite connection with standard schema AND seed data. + + check_same_thread=False is required because QueryEngine executes queries in a worker thread. + """ + conn = sqlite3.connect(":memory:", check_same_thread=False) + conn.row_factory = sqlite3.Row + + schema_sql = (_FIXTURES_DIR / "schema.sql").read_text() + seed_sql = (_FIXTURES_DIR / "seed_data.sql").read_text() + conn.executescript(schema_sql) + conn.executescript(seed_sql) + conn.commit() + + yield conn + + conn.close() diff --git a/tests/fixtures/schema.sql b/tests/fixtures/schema.sql new file mode 100644 index 0000000..1b3719e --- /dev/null +++ b/tests/fixtures/schema.sql @@ -0,0 +1,24 @@ +-- Standard test fixture schema +-- Used by all unit tests via the sqlite_db fixture in conftest.py + +CREATE TABLE customers ( + id INTEGER PRIMARY KEY, + name TEXT NOT NULL, + email TEXT, + created_at TEXT +); + +CREATE TABLE orders ( + id INTEGER PRIMARY KEY, + customer_id INTEGER, + amount REAL, + status TEXT, + created_at TEXT +); + +CREATE TABLE products ( + id INTEGER PRIMARY KEY, + name TEXT, + price REAL, + category TEXT +); diff --git a/tests/fixtures/seed_data.sql b/tests/fixtures/seed_data.sql new file mode 100644 index 0000000..9ae365e --- /dev/null +++ b/tests/fixtures/seed_data.sql @@ -0,0 +1,17 @@ +-- Seed data for integration tests + +INSERT INTO customers (id, name, email, created_at) VALUES + (1, 'Alice Smith', 'alice@example.com', '2025-01-15'), + (2, 'Bob Jones', 'bob@example.com', '2025-02-20'), + (3, 'Carol White', NULL, '2025-03-05'); + +INSERT INTO orders (id, customer_id, amount, status, created_at) VALUES + (1, 1, 150.00, 'shipped', '2025-04-01'), + (2, 1, 75.50, 'pending', '2025-04-10'), + (3, 2, 200.00, 'cancelled', '2025-04-15'), + (4, 3, 50.00, 'shipped', '2025-04-20'); + +INSERT INTO products (id, name, price, category) VALUES + (1, 'Widget A', 29.99, 'widgets'), + (2, 'Widget B', 49.99, 'widgets'), + (3, 'Gadget X', 99.99, 'gadgets'); diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/cli/__init__.py b/tests/unit/cli/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/cli/test_cli_connections.py b/tests/unit/cli/test_cli_connections.py new file mode 100644 index 0000000..db9a4e0 --- /dev/null +++ b/tests/unit/cli/test_cli_connections.py @@ -0,0 +1,192 @@ +"""Unit tests for oda connections and oda connect commands.""" + +from __future__ import annotations + +import stat +import sys +from pathlib import Path +from unittest.mock import patch + +import pytest +from click.testing import CliRunner + +from open_data_agent.cli import cli +from open_data_agent.db.connection import ConnectionManager, sanitise_dsn +from open_data_agent.exceptions import ConfigError + +# ── sanitise_dsn ───────────────────────────────────────────────────────────── + + +class TestSanitiseDsn: + def test_masks_url_style_password(self) -> None: + dsn = "postgresql://user:s3cr3t@localhost:5432/mydb" + result = sanitise_dsn(dsn) + assert "s3cr3t" not in result + assert "***" in result + + def test_masks_key_value_password(self) -> None: + dsn = "host=localhost user=alice password=secret" + result = sanitise_dsn(dsn) + assert "secret" not in result + assert "***" in result + + def test_preserves_non_sensitive_parts(self) -> None: + dsn = "postgresql://user:pass@localhost:5432/db" + result = sanitise_dsn(dsn) + assert "localhost" in result + assert "5432" in result + + +# ── ConnectionManager ───────────────────────────────────────────────────────── + + +class TestConnectionManager: + _BASE_PARAMS = { + "db_type": "postgresql", + "host": "localhost", + "port": 5432, + "database": "mydb", + "username": "alice", + "password": "s3cr3t", + } + + def test_save_and_reload_round_trip(self, tmp_path: Path) -> None: + mgr = ConnectionManager(config_dir=tmp_path) + mgr.save_connection("my-pg", self._BASE_PARAMS) + loaded = mgr.get_connection("my-pg") + assert loaded["host"] == "localhost" + assert loaded["password"] == "s3cr3t" + + def test_list_connections_no_passwords(self, tmp_path: Path) -> None: + mgr = ConnectionManager(config_dir=tmp_path) + mgr.save_connection("my-pg", self._BASE_PARAMS) + entries = mgr.list_connections() + assert len(entries) == 1 + entry = entries[0] + assert "password" not in entry + assert entry["name"] == "my-pg" + + def test_remove_connection(self, tmp_path: Path) -> None: + mgr = ConnectionManager(config_dir=tmp_path) + mgr.save_connection("my-pg", self._BASE_PARAMS) + mgr.remove_connection("my-pg") + assert not mgr.connection_exists("my-pg") + + def test_remove_nonexistent_raises_config_error(self, tmp_path: Path) -> None: + mgr = ConnectionManager(config_dir=tmp_path) + with pytest.raises(ConfigError): + mgr.remove_connection("nonexistent") + + def test_get_nonexistent_raises_config_error(self, tmp_path: Path) -> None: + mgr = ConnectionManager(config_dir=tmp_path) + with pytest.raises(ConfigError): + mgr.get_connection("nonexistent") + + def test_invalid_db_type_raises_config_error(self, tmp_path: Path) -> None: + mgr = ConnectionManager(config_dir=tmp_path) + bad_params = {**self._BASE_PARAMS, "db_type": "mssql"} + with pytest.raises(ConfigError, match="Invalid db_type"): + mgr.save_connection("bad", bad_params) + + def test_missing_fields_raises_config_error(self, tmp_path: Path) -> None: + mgr = ConnectionManager(config_dir=tmp_path) + with pytest.raises(ConfigError, match="Missing connection fields"): + mgr.save_connection("bad", {"db_type": "postgresql"}) + + @pytest.mark.skipif(sys.platform == "win32", reason="chmod not applicable on Windows") + def test_connections_yaml_has_0o600_permissions(self, tmp_path: Path) -> None: + mgr = ConnectionManager(config_dir=tmp_path) + mgr.save_connection("my-pg", self._BASE_PARAMS) + path = tmp_path / "connections.yaml" + assert stat.S_IMODE(path.stat().st_mode) == 0o600 + + +# ── Active connection state ─────────────────────────────────────────────────── + + +class TestActiveConnection: + _BASE_PARAMS = { + "db_type": "postgresql", + "host": "localhost", + "port": 5432, + "database": "mydb", + "username": "alice", + "password": "s3cr3t", + } + + def test_set_and_get_active_connection(self, tmp_path: Path) -> None: + mgr = ConnectionManager(config_dir=tmp_path) + mgr.save_connection("my-pg", self._BASE_PARAMS) + mgr.set_active_connection("my-pg") + assert mgr.get_active_connection() == "my-pg" + + def test_get_active_returns_none_when_not_set(self, tmp_path: Path) -> None: + mgr = ConnectionManager(config_dir=tmp_path) + assert mgr.get_active_connection() is None + + def test_set_active_nonexistent_raises_config_error(self, tmp_path: Path) -> None: + mgr = ConnectionManager(config_dir=tmp_path) + with pytest.raises(ConfigError): + mgr.set_active_connection("nonexistent") + + +# ── CLI commands ────────────────────────────────────────────────────────────── + + +class TestConnectionsListCLI: + def test_list_empty_shows_message(self, tmp_path: Path) -> None: + runner = CliRunner() + with patch("open_data_agent.cli_connections.ConnectionManager") as MockMgr: + MockMgr.return_value.list_connections.return_value = [] + result = runner.invoke(cli, ["connections", "list"]) + assert result.exit_code == 0 + assert "No connections" in result.output + + def test_list_shows_connections(self, tmp_path: Path) -> None: + runner = CliRunner() + with patch("open_data_agent.cli_connections.ConnectionManager") as MockMgr: + MockMgr.return_value.list_connections.return_value = [ + { + "name": "my-pg", + "db_type": "postgresql", + "host": "localhost", + "port": 5432, + "database": "mydb", + "username": "alice", + } + ] + result = runner.invoke(cli, ["connections", "list"]) + assert result.exit_code == 0 + assert "my-pg" in result.output + assert "password" not in result.output.lower() + + +class TestConnectionsRemoveCLI: + def test_remove_nonexistent_exits_nonzero(self) -> None: + runner = CliRunner() + with patch("open_data_agent.cli_connections.ConnectionManager") as MockMgr: + MockMgr.return_value.remove_connection.side_effect = ConfigError("Not found") + result = runner.invoke(cli, ["connections", "remove", "nonexistent"]) + assert result.exit_code != 0 + + +class TestConnectCLI: + def test_connect_sets_active(self, tmp_path: Path) -> None: + runner = CliRunner() + with patch("open_data_agent.cli_connections.ConnectionManager") as MockMgr: + MockMgr.return_value.get_connection.return_value = { + "db_type": "postgresql", + "host": "localhost", + "database": "mydb", + } + MockMgr.return_value.set_active_connection.return_value = None + result = runner.invoke(cli, ["connect", "my-pg"]) + assert result.exit_code == 0 + assert "my-pg" in result.output + + def test_connect_missing_name_exits_nonzero(self) -> None: + runner = CliRunner() + with patch("open_data_agent.cli_connections.ConnectionManager") as MockMgr: + MockMgr.return_value.get_connection.side_effect = ConfigError("Not found") + result = runner.invoke(cli, ["connect", "nonexistent"]) + assert result.exit_code != 0 diff --git a/tests/unit/cli/test_cli_docs.py b/tests/unit/cli/test_cli_docs.py new file mode 100644 index 0000000..04d448c --- /dev/null +++ b/tests/unit/cli/test_cli_docs.py @@ -0,0 +1,120 @@ +"""Unit tests for oda docs CLI commands (generate, status).""" + +from __future__ import annotations + +import sqlite3 +from pathlib import Path +from unittest.mock import patch + +from click.testing import CliRunner + +from open_data_agent.cli import cli + + +def _make_params(db_path: str) -> dict: + return { + "db_type": "sqlite", + "database": db_path, + "host": "localhost", + "port": 0, + "username": "", + "password": "", + } + + +def _setup_db(tmp_path: Path) -> str: + db_path = str(tmp_path / "test.db") + conn = sqlite3.connect(db_path, check_same_thread=False) + conn.execute("CREATE TABLE customers (id INTEGER PRIMARY KEY, name TEXT)") + conn.execute("INSERT INTO customers VALUES (1, 'Alice')") + conn.commit() + conn.close() + return db_path + + +class TestOdaDocsGenerateNoActiveConnection: + def test_exits_1_when_no_active_connection(self) -> None: + runner = CliRunner() + with patch("open_data_agent.cli_docs.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = None + result = runner.invoke(cli, ["docs", "generate"]) + assert result.exit_code == 1 + + +class TestOdaDocsGenerate: + def test_generates_catalog_files(self, tmp_path: Path) -> None: + db_path = _setup_db(tmp_path) + out_dir = tmp_path / "catalog" + runner = CliRunner() + with patch("open_data_agent.cli_docs.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = _make_params(db_path) + result = runner.invoke(cli, ["docs", "generate", "--output-dir", str(out_dir)]) + assert result.exit_code == 0 + assert (out_dir / "_index.md").exists() + assert (out_dir / "main" / "customers.md").exists() + + def test_output_shows_tables_generated(self, tmp_path: Path) -> None: + db_path = _setup_db(tmp_path) + out_dir = tmp_path / "catalog" + runner = CliRunner() + with patch("open_data_agent.cli_docs.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = _make_params(db_path) + result = runner.invoke(cli, ["docs", "generate", "--output-dir", str(out_dir)]) + assert result.exit_code == 0 + assert "tables written" in result.output or "generated" in result.output.lower() + + def test_skip_existing_flag_accepted(self, tmp_path: Path) -> None: + db_path = _setup_db(tmp_path) + out_dir = tmp_path / "catalog" + runner = CliRunner() + with patch("open_data_agent.cli_docs.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = _make_params(db_path) + # First run + runner.invoke(cli, ["docs", "generate", "--output-dir", str(out_dir)]) + # Second run with --skip-existing + result = runner.invoke( + cli, ["docs", "generate", "--skip-existing", "--output-dir", str(out_dir)] + ) + assert result.exit_code == 0 + + +class TestOdaDocsStatus: + def test_exits_1_when_no_active_connection(self) -> None: + runner = CliRunner() + with patch("open_data_agent.cli_docs.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = None + result = runner.invoke(cli, ["docs", "status"]) + assert result.exit_code == 1 + + def test_exits_1_when_a_table_doc_is_missing(self, tmp_path: Path) -> None: + """Generate docs then delete one table doc — status should exit 1 (missing).""" + db_path = _setup_db(tmp_path) + out_dir = tmp_path / "catalog" + runner = CliRunner() + with patch("open_data_agent.cli_docs.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = _make_params(db_path) + # Generate first so the catalog exists + runner.invoke(cli, ["docs", "generate", "--output-dir", str(out_dir)]) + # Delete one table doc to simulate missing + table_doc = out_dir / "main" / "customers.md" + if table_doc.exists(): + table_doc.unlink() + result = runner.invoke(cli, ["docs", "status", "--output-dir", str(out_dir)]) + assert result.exit_code == 1 + + def test_exits_0_when_docs_fresh(self, tmp_path: Path) -> None: + db_path = _setup_db(tmp_path) + out_dir = tmp_path / "catalog" + runner = CliRunner() + with patch("open_data_agent.cli_docs.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = _make_params(db_path) + # Generate first + runner.invoke(cli, ["docs", "generate", "--output-dir", str(out_dir)]) + # Status should pass + result = runner.invoke(cli, ["docs", "status", "--output-dir", str(out_dir)]) + assert result.exit_code == 0 diff --git a/tests/unit/cli/test_cli_eval.py b/tests/unit/cli/test_cli_eval.py new file mode 100644 index 0000000..2bb5c99 --- /dev/null +++ b/tests/unit/cli/test_cli_eval.py @@ -0,0 +1,279 @@ +"""Unit tests for oda eval CLI commands (run, results, add).""" + +from __future__ import annotations + +import json +import sqlite3 +from pathlib import Path +from unittest.mock import patch + +import yaml +from click.testing import CliRunner + +from open_data_agent.cli import cli + + +def _make_params(db_path: str) -> dict: + return { + "db_type": "sqlite", + "database": db_path, + "host": "localhost", + "port": 0, + "username": "", + "password": "", + } + + +def _setup_db(tmp_path: Path) -> str: + db_path = str(tmp_path / "test.db") + conn = sqlite3.connect(db_path, check_same_thread=False) + conn.execute("CREATE TABLE customers (id INTEGER PRIMARY KEY, name TEXT)") + conn.execute("INSERT INTO customers VALUES (1, 'Alice')") + conn.commit() + conn.close() + return db_path + + +def _write_golden(path: Path, entries: list[dict]) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(yaml.dump(entries), encoding="utf-8") + + +class TestOdaEvalRunNoActiveConnection: + def test_exits_1_when_no_active_connection(self, tmp_path: Path) -> None: + gq = tmp_path / "evals" / "golden_queries.yaml" + _write_golden(gq, []) + runner = CliRunner() + with patch("open_data_agent.cli_eval._get_active_connection", return_value=None): + result = runner.invoke(cli, ["eval", "run", "--path", str(gq)]) + assert result.exit_code == 1 + + +class TestOdaEvalRunHappyPath: + def test_run_passes_with_valid_golden_query(self, tmp_path: Path) -> None: + db_path = _setup_db(tmp_path) + gq = tmp_path / "evals" / "golden_queries.yaml" + _write_golden( + gq, + [ + { + "id": "customers-001", + "question": "How many customers?", + "sql": "SELECT COUNT(*) FROM customers", + "dialects": ["sqlite"], + "active": True, + } + ], + ) + runner = CliRunner() + with ( + patch( + "open_data_agent.cli_eval._get_active_connection", return_value=("test", "sqlite") + ), + patch("open_data_agent.db.connection.ConnectionManager") as mock_mgr, + ): + mock_mgr.return_value.get_connection.return_value = _make_params(db_path) + result = runner.invoke(cli, ["eval", "run", "--path", str(gq)]) + assert result.exit_code == 0 + assert "Passed:" in result.output + + def test_run_writes_results_file(self, tmp_path: Path) -> None: + db_path = _setup_db(tmp_path) + gq = tmp_path / "evals" / "golden_queries.yaml" + results_path = tmp_path / "evals" / "results" / "latest.json" + _write_golden( + gq, + [ + { + "id": "customers-001", + "question": "q", + "sql": "SELECT COUNT(*) FROM customers", + "dialects": ["sqlite"], + "active": True, + } + ], + ) + runner = CliRunner() + with ( + patch( + "open_data_agent.cli_eval._get_active_connection", return_value=("test", "sqlite") + ), + patch("open_data_agent.db.connection.ConnectionManager") as mock_mgr, + patch("open_data_agent.cli_eval._RESULTS_PATH", results_path), + ): + mock_mgr.return_value.get_connection.return_value = _make_params(db_path) + runner.invoke(cli, ["eval", "run", "--path", str(gq)]) + assert results_path.exists() + data = json.loads(results_path.read_text()) + assert len(data) == 1 + assert data[0]["id"] == "customers-001" + + def test_run_exits_1_on_failing_query(self, tmp_path: Path) -> None: + db_path = _setup_db(tmp_path) + gq = tmp_path / "evals" / "golden_queries.yaml" + _write_golden( + gq, + [ + { + "id": "fail-001", + "question": "Empty table", + "sql": "SELECT * FROM nonexistent_table_xyz", + "dialects": ["sqlite"], + "active": True, + } + ], + ) + runner = CliRunner() + with ( + patch( + "open_data_agent.cli_eval._get_active_connection", return_value=("test", "sqlite") + ), + patch("open_data_agent.db.connection.ConnectionManager") as mock_mgr, + ): + mock_mgr.return_value.get_connection.return_value = _make_params(db_path) + result = runner.invoke(cli, ["eval", "run", "--path", str(gq)]) + assert result.exit_code == 1 + + def test_run_no_golden_queries_for_dialect(self, tmp_path: Path) -> None: + gq = tmp_path / "evals" / "golden_queries.yaml" + _write_golden( + gq, + [ + { + "id": "pg-001", + "question": "PG only", + "sql": "SELECT 1", + "dialects": ["postgresql"], + "active": True, + } + ], + ) + runner = CliRunner() + with patch( + "open_data_agent.cli_eval._get_active_connection", return_value=("test", "sqlite") + ): + result = runner.invoke(cli, ["eval", "run", "--path", str(gq)]) + # No matching queries → exits 0 with info message + assert result.exit_code == 0 + + +class TestOdaEvalResults: + def test_no_results_file_exits_0(self, tmp_path: Path) -> None: + missing = tmp_path / "evals" / "results" / "latest.json" + runner = CliRunner() + result = runner.invoke(cli, ["eval", "results", "--path", str(missing)]) + assert result.exit_code == 0 + assert "No eval results" in result.output + + def test_shows_results_table(self, tmp_path: Path) -> None: + results_path = tmp_path / "latest.json" + results_path.write_text( + json.dumps( + [ + { + "id": "sales-001", + "question": "How many?", + "sql": "SELECT COUNT(*) FROM t", + "passed": True, + "row_count": 1, + "duration_ms": 5.0, + "error": None, + } + ] + ), + encoding="utf-8", + ) + runner = CliRunner() + result = runner.invoke(cli, ["eval", "results", "--path", str(results_path)]) + assert result.exit_code == 0 + assert "sales-001" in result.output + assert "Passed:" in result.output + + +class TestOdaEvalAdd: + def test_add_entry_non_interactive(self, tmp_path: Path) -> None: + gq = tmp_path / "evals" / "golden_queries.yaml" + runner = CliRunner() + result = runner.invoke( + cli, + [ + "eval", + "add", + "--id", + "sales-001", + "--question", + "How many customers?", + "--sql", + "SELECT COUNT(*) FROM customers", + "--dialects", + "sqlite", + "--path", + str(gq), + ], + ) + assert result.exit_code == 0 + assert gq.exists() + assert "sales-001" in result.output + + def test_add_duplicate_id_exits_1(self, tmp_path: Path) -> None: + gq = tmp_path / "evals" / "golden_queries.yaml" + runner = CliRunner() + # First add + runner.invoke( + cli, + [ + "eval", + "add", + "--id", + "sales-001", + "--question", + "q", + "--sql", + "SELECT 1", + "--dialects", + "sqlite", + "--path", + str(gq), + ], + ) + # Duplicate add + result = runner.invoke( + cli, + [ + "eval", + "add", + "--id", + "sales-001", + "--question", + "q2", + "--sql", + "SELECT 2", + "--dialects", + "sqlite", + "--path", + str(gq), + ], + ) + assert result.exit_code == 1 + + def test_add_invalid_dialect_exits_1(self, tmp_path: Path) -> None: + gq = tmp_path / "evals" / "golden_queries.yaml" + runner = CliRunner() + result = runner.invoke( + cli, + [ + "eval", + "add", + "--id", + "sales-001", + "--question", + "q", + "--sql", + "SELECT 1", + "--dialects", + "oracle", + "--path", + str(gq), + ], + ) + assert result.exit_code == 1 diff --git a/tests/unit/cli/test_cli_history.py b/tests/unit/cli/test_cli_history.py new file mode 100644 index 0000000..baefb74 --- /dev/null +++ b/tests/unit/cli/test_cli_history.py @@ -0,0 +1,264 @@ +"""Unit tests for oda history list, search, and stats commands.""" + +from __future__ import annotations + +import json +from pathlib import Path + +from click.testing import CliRunner + +from open_data_agent.cli_history import history + + +def _make_tracker_with_entries(tmp_path: Path, entries: list[dict]) -> Path: + """Write JSONL entries to a temp history file and return its path.""" + hist_file = tmp_path / "history.jsonl" + for e in entries: + hist_file.open("a").write(json.dumps(e) + "\n") + return hist_file + + +_ENTRY_A = { + "id": "aaaa-0001", + "timestamp": "2026-03-09T10:00:00+00:00", + "connection": "my-pg", + "db_type": "postgresql", + "sql": "SELECT * FROM customers LIMIT 10", + "tables": ["customers"], + "row_count": 3, + "duration_ms": 12.5, + "question": "Show me all customers", + "error": None, +} + +_ENTRY_B = { + "id": "bbbb-0002", + "timestamp": "2026-03-09T11:00:00+00:00", + "connection": "my-sqlite", + "db_type": "sqlite", + "sql": "SELECT * FROM orders WHERE status='pending'", + "tables": ["orders"], + "row_count": 0, + "duration_ms": 5.2, + "question": "Pending orders", + "error": "table not found", +} + +_ENTRY_C = { + "id": "cccc-0003", + "timestamp": "2026-03-09T12:00:00+00:00", + "connection": "my-pg", + "db_type": "postgresql", + "sql": "SELECT COUNT(*) FROM products", + "tables": ["products"], + "row_count": 1, + "duration_ms": 8.0, + "question": "How many products?", + "error": None, +} + + +class TestHistoryList: + def test_list_shows_entries(self, tmp_path: Path) -> None: + hist_file = _make_tracker_with_entries(tmp_path, [_ENTRY_A, _ENTRY_C]) + runner = CliRunner() + from unittest.mock import patch + + from open_data_agent.history import HistoryTracker + + with patch.object( + HistoryTracker, + "__init__", + lambda self, *a, **kw: setattr(self, "_history_file", hist_file) or None, + ): + result = runner.invoke(history, ["list"]) + assert result.exit_code == 0 + assert "customers" in result.output or "SELECT" in result.output + + def test_list_empty_history(self, tmp_path: Path) -> None: + runner = CliRunner() + from unittest.mock import patch + + from open_data_agent.history import HistoryTracker + + empty_file = tmp_path / "empty.jsonl" + with patch.object( + HistoryTracker, + "__init__", + lambda self, *a, **kw: setattr(self, "_history_file", empty_file) or None, + ): + result = runner.invoke(history, ["list"]) + assert result.exit_code == 0 + assert "No history found" in result.output + + def test_list_respects_n_flag(self, tmp_path: Path) -> None: + """--n 1 should show at most 1 entry.""" + hist_file = _make_tracker_with_entries(tmp_path, [_ENTRY_A, _ENTRY_B, _ENTRY_C]) + runner = CliRunner() + from unittest.mock import patch + + from open_data_agent.history import HistoryTracker + + with patch.object( + HistoryTracker, + "__init__", + lambda self, *a, **kw: setattr(self, "_history_file", hist_file) or None, + ): + result = runner.invoke(history, ["list", "--n", "1"]) + assert result.exit_code == 0 + # Only 1 row should appear (newest = _ENTRY_C) + assert "cccc" in result.output or "products" in result.output + + def test_list_newest_first(self, tmp_path: Path) -> None: + """Entries should appear newest first (reversed).""" + hist_file = _make_tracker_with_entries(tmp_path, [_ENTRY_A, _ENTRY_C]) + runner = CliRunner() + from unittest.mock import patch + + from open_data_agent.history import HistoryTracker + + with patch.object( + HistoryTracker, + "__init__", + lambda self, *a, **kw: setattr(self, "_history_file", hist_file) or None, + ): + result = runner.invoke(history, ["list"]) + # _ENTRY_C (cccc) should appear before _ENTRY_A (aaaa) in output + idx_c = result.output.find("cccc") + idx_a = result.output.find("aaaa") + assert idx_c < idx_a + + +class TestHistorySearch: + def test_search_finds_matching_sql(self, tmp_path: Path) -> None: + hist_file = _make_tracker_with_entries(tmp_path, [_ENTRY_A, _ENTRY_B, _ENTRY_C]) + runner = CliRunner() + from unittest.mock import patch + + from open_data_agent.history import HistoryTracker + + with patch.object( + HistoryTracker, + "__init__", + lambda self, *a, **kw: setattr(self, "_history_file", hist_file) or None, + ): + result = runner.invoke(history, ["search", "customers"]) + assert result.exit_code == 0 + assert "customers" in result.output + + def test_search_case_insensitive(self, tmp_path: Path) -> None: + hist_file = _make_tracker_with_entries(tmp_path, [_ENTRY_A]) + runner = CliRunner() + from unittest.mock import patch + + from open_data_agent.history import HistoryTracker + + with patch.object( + HistoryTracker, + "__init__", + lambda self, *a, **kw: setattr(self, "_history_file", hist_file) or None, + ): + result = runner.invoke(history, ["search", "CUSTOMERS"]) + assert result.exit_code == 0 + assert "customers" in result.output.lower() or "aaaa" in result.output + + def test_search_no_matches(self, tmp_path: Path) -> None: + hist_file = _make_tracker_with_entries(tmp_path, [_ENTRY_A]) + runner = CliRunner() + from unittest.mock import patch + + from open_data_agent.history import HistoryTracker + + with patch.object( + HistoryTracker, + "__init__", + lambda self, *a, **kw: setattr(self, "_history_file", hist_file) or None, + ): + result = runner.invoke(history, ["search", "zzznomatch"]) + assert result.exit_code == 0 + assert "No history found" in result.output + + def test_search_matches_question_field(self, tmp_path: Path) -> None: + hist_file = _make_tracker_with_entries(tmp_path, [_ENTRY_C]) + runner = CliRunner() + from unittest.mock import patch + + from open_data_agent.history import HistoryTracker + + with patch.object( + HistoryTracker, + "__init__", + lambda self, *a, **kw: setattr(self, "_history_file", hist_file) or None, + ): + result = runner.invoke(history, ["search", "how many"]) + assert result.exit_code == 0 + assert "cccc" in result.output or "products" in result.output + + +class TestHistoryStats: + def test_stats_basic(self, tmp_path: Path) -> None: + hist_file = _make_tracker_with_entries(tmp_path, [_ENTRY_A, _ENTRY_B, _ENTRY_C]) + runner = CliRunner() + from unittest.mock import patch + + from open_data_agent.history import HistoryTracker + + with patch.object( + HistoryTracker, + "__init__", + lambda self, *a, **kw: setattr(self, "_history_file", hist_file) or None, + ): + result = runner.invoke(history, ["stats"]) + assert result.exit_code == 0 + assert "3" in result.output # total queries + + def test_stats_error_rate(self, tmp_path: Path) -> None: + """With 1 error out of 3, error rate = 33.3%.""" + hist_file = _make_tracker_with_entries(tmp_path, [_ENTRY_A, _ENTRY_B, _ENTRY_C]) + runner = CliRunner() + from unittest.mock import patch + + from open_data_agent.history import HistoryTracker + + with patch.object( + HistoryTracker, + "__init__", + lambda self, *a, **kw: setattr(self, "_history_file", hist_file) or None, + ): + result = runner.invoke(history, ["stats"]) + assert result.exit_code == 0 + assert "33.3" in result.output + + def test_stats_empty_history(self, tmp_path: Path) -> None: + runner = CliRunner() + from unittest.mock import patch + + from open_data_agent.history import HistoryTracker + + empty_file = tmp_path / "empty.jsonl" + with patch.object( + HistoryTracker, + "__init__", + lambda self, *a, **kw: setattr(self, "_history_file", empty_file) or None, + ): + result = runner.invoke(history, ["stats"]) + assert result.exit_code == 0 + assert "No history found" in result.output + + def test_stats_avg_duration(self, tmp_path: Path) -> None: + """Average of 12.5, 5.2, 8.0 = ~8.6.""" + hist_file = _make_tracker_with_entries(tmp_path, [_ENTRY_A, _ENTRY_B, _ENTRY_C]) + runner = CliRunner() + from unittest.mock import patch + + from open_data_agent.history import HistoryTracker + + with patch.object( + HistoryTracker, + "__init__", + lambda self, *a, **kw: setattr(self, "_history_file", hist_file) or None, + ): + result = runner.invoke(history, ["stats"]) + assert result.exit_code == 0 + # avg = (12.5 + 5.2 + 8.0) / 3 ≈ 8.6 + assert "8." in result.output diff --git a/tests/unit/cli/test_cli_init.py b/tests/unit/cli/test_cli_init.py new file mode 100644 index 0000000..ade7fa2 --- /dev/null +++ b/tests/unit/cli/test_cli_init.py @@ -0,0 +1,120 @@ +"""Unit tests for oda init command.""" + +from __future__ import annotations + +import stat +import sys +from pathlib import Path +from unittest.mock import patch + +import pytest +from click.testing import CliRunner + +from open_data_agent.cli import cli + + +class TestOdaInit: + def test_creates_config_dir(self, tmp_path: Path) -> None: + runner = CliRunner() + config_dir = tmp_path / "open-data-agent" + + with patch("open_data_agent.cli_init.get_config_dir", return_value=config_dir): + result = runner.invoke(cli, ["init"]) + + assert result.exit_code == 0 + assert config_dir.exists() + + def test_creates_connections_yaml(self, tmp_path: Path) -> None: + runner = CliRunner() + config_dir = tmp_path / "open-data-agent" + + with patch("open_data_agent.cli_init.get_config_dir", return_value=config_dir): + runner.invoke(cli, ["init"]) + + connections_file = config_dir / "connections.yaml" + assert connections_file.exists() + + def test_creates_config_yaml(self, tmp_path: Path) -> None: + runner = CliRunner() + config_dir = tmp_path / "open-data-agent" + + with patch("open_data_agent.cli_init.get_config_dir", return_value=config_dir): + runner.invoke(cli, ["init"]) + + config_file = config_dir / "config.yaml" + assert config_file.exists() + + @pytest.mark.skipif(sys.platform == "win32", reason="chmod not meaningful on Windows") + def test_connections_yaml_has_secure_permissions(self, tmp_path: Path) -> None: + runner = CliRunner() + config_dir = tmp_path / "open-data-agent" + + with patch("open_data_agent.cli_init.get_config_dir", return_value=config_dir): + runner.invoke(cli, ["init"]) + + connections_file = config_dir / "connections.yaml" + file_stat = connections_file.stat() + # Only owner should have read/write (0o600) + assert stat.S_IMODE(file_stat.st_mode) == 0o600 + + def test_idempotent_second_run(self, tmp_path: Path) -> None: + runner = CliRunner() + config_dir = tmp_path / "open-data-agent" + + with patch("open_data_agent.cli_init.get_config_dir", return_value=config_dir): + runner.invoke(cli, ["init"]) + # Write something to config.yaml to verify it's not overwritten + config_file = config_dir / "config.yaml" + original_content = config_file.read_text() + + result = runner.invoke(cli, ["init"]) + + assert result.exit_code == 0 + assert "Already initialised." in result.output + # Existing file unchanged + assert config_file.read_text() == original_content + + def test_prints_next_steps(self, tmp_path: Path) -> None: + runner = CliRunner() + config_dir = tmp_path / "open-data-agent" + + with patch("open_data_agent.cli_init.get_config_dir", return_value=config_dir): + result = runner.invoke(cli, ["init"]) + + assert "oda connections add" in result.output + assert "oda connect" in result.output + assert "oda docs generate" in result.output + + def test_help_shows_description(self) -> None: + runner = CliRunner() + result = runner.invoke(cli, ["init", "--help"]) + assert result.exit_code == 0 + assert "Initialise" in result.output or "config directory" in result.output + + def test_config_yaml_has_default_values(self, tmp_path: Path) -> None: + import yaml + + runner = CliRunner() + config_dir = tmp_path / "open-data-agent" + + with patch("open_data_agent.cli_init.get_config_dir", return_value=config_dir): + runner.invoke(cli, ["init"]) + + config_file = config_dir / "config.yaml" + data = yaml.safe_load(config_file.read_text()) + assert data["row_limit"] == 1000 + assert data["strict_mode"] is False + assert data["log_level"] == "INFO" + + def test_no_rich_markup_in_output(self, tmp_path: Path) -> None: + """Regression: cli_init must not print '[green]' or '[/green]' literally.""" + runner = CliRunner() + config_dir = tmp_path / "open-data-agent" + + with patch("open_data_agent.cli_init.get_config_dir", return_value=config_dir): + result = runner.invoke(cli, ["init"]) + + assert "[green]" not in result.output + assert "[/green]" not in result.output + # Checkmark should appear as the actual Unicode character, not markup + assert "✓" in result.output diff --git a/tests/unit/cli/test_cli_memory.py b/tests/unit/cli/test_cli_memory.py new file mode 100644 index 0000000..1ec25ed --- /dev/null +++ b/tests/unit/cli/test_cli_memory.py @@ -0,0 +1,145 @@ +"""Unit tests for oda memory add, list, and search commands.""" + +from __future__ import annotations + +from pathlib import Path +from unittest.mock import patch + +from click.testing import CliRunner + +from open_data_agent.cli_memory import memory +from open_data_agent.memory import MemoryManager + + +def _patched_runner(tmp_path: Path) -> tuple[CliRunner, MemoryManager]: + """Return a CliRunner + MemoryManager pre-pointed at tmp_path.""" + mgr = MemoryManager(memory_dir=tmp_path / "memory") + return CliRunner(), mgr + + +class TestMemoryAdd: + def test_add_noninteractive(self, tmp_path: Path) -> None: + runner, mgr = _patched_runner(tmp_path) + with patch("open_data_agent.cli_memory.MemoryManager", return_value=mgr): + result = runner.invoke( + memory, + ["add", "--title", "My note", "--category", "correction", "--content", "Fix X"], + ) + assert result.exit_code == 0 + assert "✓" in result.output + entries = mgr.list_entries() + assert len(entries) == 1 + assert entries[0]["title"] == "My note" + + def test_add_with_tags(self, tmp_path: Path) -> None: + runner, mgr = _patched_runner(tmp_path) + with patch("open_data_agent.cli_memory.MemoryManager", return_value=mgr): + result = runner.invoke( + memory, + [ + "add", + "--title", + "Tagged", + "--category", + "data_quality", + "--content", + "body", + "--tags", + "alpha,beta", + ], + ) + assert result.exit_code == 0 + entries = mgr.list_entries() + assert "alpha" in entries[0].get("tags", []) + + def test_add_invalid_category(self, tmp_path: Path) -> None: + runner = CliRunner() + result = runner.invoke( + memory, + ["add", "--title", "T", "--category", "INVALID", "--content", "C"], + ) + # Click Choice should reject it with a non-zero exit or error message + assert result.exit_code != 0 + + +class TestMemoryList: + def test_list_shows_entries(self, tmp_path: Path) -> None: + runner, mgr = _patched_runner(tmp_path) + mgr.add("Entry One", "content", "business_context") + mgr.add("Entry Two", "body", "correction") + with patch("open_data_agent.cli_memory.MemoryManager", return_value=mgr): + result = runner.invoke(memory, ["list"]) + assert result.exit_code == 0 + assert "Entry One" in result.output + assert "Entry Two" in result.output + + def test_list_empty(self, tmp_path: Path) -> None: + runner, mgr = _patched_runner(tmp_path) + with patch("open_data_agent.cli_memory.MemoryManager", return_value=mgr): + result = runner.invoke(memory, ["list"]) + assert result.exit_code == 0 + assert "No memory entries found" in result.output + + def test_list_shows_columns(self, tmp_path: Path) -> None: + runner, mgr = _patched_runner(tmp_path) + mgr.add("My Title", "content", "query_pattern", tags=["foo"]) + with patch("open_data_agent.cli_memory.MemoryManager", return_value=mgr): + result = runner.invoke(memory, ["list"]) + assert "Title" in result.output + assert "Category" in result.output + assert "query_pattern" in result.output + + +class TestMemorySearch: + def test_search_finds_match(self, tmp_path: Path) -> None: + runner, mgr = _patched_runner(tmp_path) + mgr.add("Revenue insight", "use net price", "business_context") + mgr.add("Unrelated", "nothing", "correction") + with patch("open_data_agent.cli_memory.MemoryManager", return_value=mgr): + result = runner.invoke(memory, ["search", "revenue"]) + assert result.exit_code == 0 + assert "Revenue insight" in result.output + assert "Unrelated" not in result.output + + def test_search_no_results(self, tmp_path: Path) -> None: + runner, mgr = _patched_runner(tmp_path) + mgr.add("Something", "content", "correction") + with patch("open_data_agent.cli_memory.MemoryManager", return_value=mgr): + result = runner.invoke(memory, ["search", "zzznomatch"]) + assert result.exit_code == 0 + assert "No memory entries found" in result.output + + def test_search_empty_memory(self, tmp_path: Path) -> None: + runner, mgr = _patched_runner(tmp_path) + with patch("open_data_agent.cli_memory.MemoryManager", return_value=mgr): + result = runner.invoke(memory, ["search", "anything"]) + assert result.exit_code == 0 + assert "No memory entries found" in result.output + + +class TestMemoryAddErrorRouting: + def test_config_error_exits_nonzero(self, tmp_path: Path) -> None: + """Regression: ConfigError in memory add must exit with code 1.""" + from open_data_agent.exceptions import ConfigError + + runner, mgr = _patched_runner(tmp_path) + # Patch mgr.add to raise ConfigError + with ( + patch("open_data_agent.cli_memory.MemoryManager", return_value=mgr), + patch.object(mgr, "add", side_effect=ConfigError("bad category")), + ): + result = runner.invoke( + memory, + ["add", "--title", "T", "--category", "correction", "--content", "C"], + ) + assert result.exit_code == 1 + + def test_err_console_is_stderr(self) -> None: + """Regression: cli_memory must declare err_console = Console(stderr=True).""" + import open_data_agent.cli_memory as mod + + # err_console must exist and be configured for stderr + assert hasattr(mod, "err_console"), "cli_memory must export err_console" + assert mod.err_console._file is None or mod.err_console.stderr, ( + "err_console must write to stderr" + ) diff --git a/tests/unit/cli/test_cli_query.py b/tests/unit/cli/test_cli_query.py new file mode 100644 index 0000000..42fbc74 --- /dev/null +++ b/tests/unit/cli/test_cli_query.py @@ -0,0 +1,151 @@ +"""Unit tests for oda query CLI command.""" + +from __future__ import annotations + +import sqlite3 +from pathlib import Path +from unittest.mock import patch + +from click.testing import CliRunner + +from open_data_agent.cli import cli + + +def _active_connection_patches(tmp_path: Path): + """Return a stack of patches that make the active connection point at a temp SQLite DB.""" + db_path = str(tmp_path / "test.db") + conn = sqlite3.connect(db_path, check_same_thread=False) + conn.row_factory = sqlite3.Row + conn.execute("CREATE TABLE customers (id INTEGER PRIMARY KEY, name TEXT)") + conn.execute("INSERT INTO customers VALUES (1, 'Alice')") + conn.commit() + conn.close() + + params = { + "db_type": "sqlite", + "database": db_path, + "host": "localhost", + "port": 0, + "username": "", + "password": "", + } + return params + + +class TestOdaQueryNoActiveConnection: + def test_exits_1_when_no_active_connection(self) -> None: + runner = CliRunner() + with patch("open_data_agent.cli_query.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = None + result = runner.invoke(cli, ["query", "SELECT 1"]) + assert result.exit_code == 1 + + def test_error_message_shown_when_no_active_connection(self) -> None: + runner = CliRunner() + with patch("open_data_agent.cli_query.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = None + result = runner.invoke(cli, ["query", "SELECT 1"]) + assert "No active connection" in result.output + + +class TestOdaQueryHappyPath: + def test_executes_select_and_prints_table(self, tmp_path: Path) -> None: + params = _active_connection_patches(tmp_path) + runner = CliRunner() + with ( + patch("open_data_agent.cli_query.ConnectionManager") as mock_mgr, + patch("open_data_agent.cli_query._is_docs_stale", return_value=False), + ): + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = params + result = runner.invoke(cli, ["query", "SELECT * FROM customers"]) + assert result.exit_code == 0 + assert "Alice" in result.output + + def test_json_format_output(self, tmp_path: Path) -> None: + import json + + params = _active_connection_patches(tmp_path) + runner = CliRunner() + with ( + patch("open_data_agent.cli_query.ConnectionManager") as mock_mgr, + patch("open_data_agent.cli_query._is_docs_stale", return_value=False), + ): + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = params + result = runner.invoke(cli, ["query", "--format", "json", "SELECT * FROM customers"]) + assert result.exit_code == 0 + # Extract just the JSON array from the output (truncation warning may follow it) + json_end = result.output.rfind("]") + 1 + json_str = result.output[:json_end] + parsed = json.loads(json_str) + assert isinstance(parsed, list) + assert len(parsed) == 1 + assert parsed[0]["name"] == "Alice" + + def test_csv_format_output(self, tmp_path: Path) -> None: + import csv + + params = _active_connection_patches(tmp_path) + runner = CliRunner() + with ( + patch("open_data_agent.cli_query.ConnectionManager") as mock_mgr, + patch("open_data_agent.cli_query._is_docs_stale", return_value=False), + ): + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = params + result = runner.invoke(cli, ["query", "--format", "csv", "SELECT * FROM customers"]) + assert result.exit_code == 0 + # _is_docs_stale is mocked to False so no stale-docs warning is emitted. + # The Click test runner merges stdout+stderr; strip any Rich warning lines + # (which start with the ⚠ character written by err_console) so the CSV + # parser only sees the header + data rows. + csv_lines = [ + line for line in result.output.splitlines() if line and not line.startswith("\u26a0") + ] + reader = csv.DictReader(csv_lines) + rows = list(reader) + assert len(rows) == 1 + assert rows[0]["name"] == "Alice" + + +class TestOdaQueryStrictMode: + def test_strict_mode_blocks_stale_docs(self, tmp_path: Path) -> None: + params = _active_connection_patches(tmp_path) + runner = CliRunner() + with ( + patch("open_data_agent.cli_query.ConnectionManager") as mock_mgr, + patch("open_data_agent.cli_query._is_docs_stale", return_value=True), + ): + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = params + result = runner.invoke(cli, ["query", "--strict", "SELECT 1"]) + assert result.exit_code == 1 + + def test_no_strict_warns_on_stale_docs_but_runs(self, tmp_path: Path) -> None: + params = _active_connection_patches(tmp_path) + runner = CliRunner() + with ( + patch("open_data_agent.cli_query.ConnectionManager") as mock_mgr, + patch("open_data_agent.cli_query._is_docs_stale", return_value=True), + ): + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = params + result = runner.invoke(cli, ["query", "SELECT * FROM customers"]) + # Query should still execute (not blocked) and warning must be visible + assert result.exit_code == 0 + assert "stale" in result.output.lower() + + +class TestOdaQuerySafetyError: + def test_blocked_sql_exits_1(self, tmp_path: Path) -> None: + params = _active_connection_patches(tmp_path) + runner = CliRunner() + with ( + patch("open_data_agent.cli_query.ConnectionManager") as mock_mgr, + patch("open_data_agent.cli_query._is_docs_stale", return_value=False), + ): + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = params + result = runner.invoke(cli, ["query", "DROP TABLE customers"]) + assert result.exit_code == 1 diff --git a/tests/unit/cli/test_cli_schema.py b/tests/unit/cli/test_cli_schema.py new file mode 100644 index 0000000..368939d --- /dev/null +++ b/tests/unit/cli/test_cli_schema.py @@ -0,0 +1,130 @@ +"""Unit tests for oda schema CLI commands (schemas, tables, describe, sample, profile).""" + +from __future__ import annotations + +import sqlite3 +from pathlib import Path +from unittest.mock import patch + +from click.testing import CliRunner + +from open_data_agent.cli import cli + + +def _make_params(db_path: str) -> dict: + return { + "db_type": "sqlite", + "database": db_path, + "host": "localhost", + "port": 0, + "username": "", + "password": "", + } + + +def _setup_db(tmp_path: Path) -> str: + db_path = str(tmp_path / "test.db") + conn = sqlite3.connect(db_path, check_same_thread=False) + conn.execute("CREATE TABLE customers (id INTEGER PRIMARY KEY, name TEXT, email TEXT)") + conn.execute("INSERT INTO customers VALUES (1, 'Alice', 'alice@example.com')") + conn.commit() + conn.close() + return db_path + + +class TestOdaSchemasNoActiveConnection: + def test_exits_1_when_no_active_connection(self) -> None: + runner = CliRunner() + with patch("open_data_agent.cli_schema.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = None + result = runner.invoke(cli, ["schemas"]) + assert result.exit_code == 1 + + +class TestOdaSchemas: + def test_lists_schemas(self, tmp_path: Path) -> None: + db_path = _setup_db(tmp_path) + runner = CliRunner() + with patch("open_data_agent.cli_schema.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = _make_params(db_path) + result = runner.invoke(cli, ["schemas"]) + assert result.exit_code == 0 + assert "main" in result.output + + +class TestOdaTables: + def test_lists_tables(self, tmp_path: Path) -> None: + db_path = _setup_db(tmp_path) + runner = CliRunner() + with patch("open_data_agent.cli_schema.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = _make_params(db_path) + result = runner.invoke(cli, ["tables", "main"]) + assert result.exit_code == 0 + assert "customers" in result.output + + def test_exits_1_when_no_active_connection(self) -> None: + runner = CliRunner() + with patch("open_data_agent.cli_schema.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = None + result = runner.invoke(cli, ["tables", "main"]) + assert result.exit_code == 1 + + +class TestOdaDescribe: + def test_shows_columns(self, tmp_path: Path) -> None: + db_path = _setup_db(tmp_path) + runner = CliRunner() + with patch("open_data_agent.cli_schema.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = _make_params(db_path) + result = runner.invoke(cli, ["describe", "customers"]) + assert result.exit_code == 0 + assert "name" in result.output + assert "id" in result.output + + def test_nonexistent_table_returns_empty_columns(self, tmp_path: Path) -> None: + """SQLite PRAGMA table_info returns [] for an unknown table — command exits 0.""" + db_path = _setup_db(tmp_path) + runner = CliRunner() + with patch("open_data_agent.cli_schema.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = _make_params(db_path) + result = runner.invoke(cli, ["describe", "nonexistent_xyz"]) + # SQLite PRAGMA table_info returns empty list for unknown tables; no exception raised + assert result.exit_code == 0 + + +class TestOdaSample: + def test_shows_sample_rows(self, tmp_path: Path) -> None: + db_path = _setup_db(tmp_path) + runner = CliRunner() + with patch("open_data_agent.cli_schema.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = _make_params(db_path) + result = runner.invoke(cli, ["sample", "customers"]) + assert result.exit_code == 0 + assert "Alice" in result.output + + def test_n_flag_respected(self, tmp_path: Path) -> None: + db_path = _setup_db(tmp_path) + runner = CliRunner() + with patch("open_data_agent.cli_schema.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = _make_params(db_path) + result = runner.invoke(cli, ["sample", "--n", "3", "customers"]) + assert result.exit_code == 0 + + +class TestOdaProfile: + def test_shows_profile_stats(self, tmp_path: Path) -> None: + db_path = _setup_db(tmp_path) + runner = CliRunner() + with patch("open_data_agent.cli_schema.ConnectionManager") as mock_mgr: + mock_mgr.return_value.get_active_connection.return_value = "test" + mock_mgr.return_value.get_connection.return_value = _make_params(db_path) + result = runner.invoke(cli, ["profile", "customers"]) + assert result.exit_code == 0 + # Profile output must include all column names (not just one) + assert "id" in result.output and "name" in result.output diff --git a/tests/unit/db/__init__.py b/tests/unit/db/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/db/test_dialect_mysql.py b/tests/unit/db/test_dialect_mysql.py new file mode 100644 index 0000000..bf9101c --- /dev/null +++ b/tests/unit/db/test_dialect_mysql.py @@ -0,0 +1,92 @@ +"""Unit tests for MySQLAdapter — dialect SQL generation (no live DB).""" + +from __future__ import annotations + +import pytest + +from open_data_agent.db.dialect import DialectAdapter, MySQLAdapter +from open_data_agent.db.schema import NormalizedColumn +from open_data_agent.exceptions import DialectError + + +@pytest.fixture +def adapter() -> MySQLAdapter: + return MySQLAdapter() + + +class TestMySQLAdapterInterface: + def test_is_dialect_adapter(self, adapter: MySQLAdapter) -> None: + assert isinstance(adapter, DialectAdapter) + + def test_quote_identifier_backticks(self, adapter: MySQLAdapter) -> None: + assert adapter.quote_identifier("my col") == "`my col`" + + def test_quote_identifier_escapes_backtick(self, adapter: MySQLAdapter) -> None: + assert adapter.quote_identifier("col`name") == "`col``name`" + + def test_date_trunc_month(self, adapter: MySQLAdapter) -> None: + result = adapter.date_trunc("month", "created_at") + assert "DATE_FORMAT" in result + assert "%Y-%m-01" in result + + def test_date_trunc_year(self, adapter: MySQLAdapter) -> None: + result = adapter.date_trunc("year", "created_at") + assert "%Y-01-01" in result + + def test_date_trunc_unsupported_raises_dialect_error(self, adapter: MySQLAdapter) -> None: + with pytest.raises(DialectError): + adapter.date_trunc("quarter", "created_at") + + def test_current_timestamp(self, adapter: MySQLAdapter) -> None: + assert adapter.current_timestamp() == "NOW()" + + def test_interval(self, adapter: MySQLAdapter) -> None: + result = adapter.interval(30, "day") + assert "30" in result + assert "DAY" in result.upper() + + def test_safe_prefixes_includes_select(self, adapter: MySQLAdapter) -> None: + assert "SELECT" in adapter.safe_prefixes + + def test_dangerous_patterns_includes_load_data(self, adapter: MySQLAdapter) -> None: + patterns = " ".join(adapter.dangerous_patterns) + assert "LOAD" in patterns + + def test_dangerous_patterns_includes_grant(self, adapter: MySQLAdapter) -> None: + patterns = " ".join(adapter.dangerous_patterns) + assert "GRANT" in patterns + + +class TestMySQLNormalizeColumnRow: + def test_normalize_basic_row(self, adapter: MySQLAdapter) -> None: + row = { + "name": "email", + "data_type": "varchar", + "is_nullable": 1, + "default_value": None, + "is_primary_key": 0, + "ordinal_position": 3, + "comment": "", + } + col = adapter.normalize_column_row(row) + assert isinstance(col, NormalizedColumn) + assert col.name == "email" + assert col.data_type == "varchar" + assert col.is_nullable is True + assert col.is_primary_key is False + assert col.ordinal_position == 3 + assert col.comment is None # empty string → None + + def test_normalize_with_comment(self, adapter: MySQLAdapter) -> None: + row = { + "name": "id", + "data_type": "int", + "is_nullable": 0, + "default_value": None, + "is_primary_key": 1, + "ordinal_position": 1, + "comment": "Auto-increment PK", + } + col = adapter.normalize_column_row(row) + assert col.is_primary_key is True + assert col.comment == "Auto-increment PK" diff --git a/tests/unit/db/test_dialect_pg.py b/tests/unit/db/test_dialect_pg.py new file mode 100644 index 0000000..c476919 --- /dev/null +++ b/tests/unit/db/test_dialect_pg.py @@ -0,0 +1,85 @@ +"""Unit tests for PostgreSQLAdapter — dialect SQL generation (no live DB).""" + +from __future__ import annotations + +import pytest + +from open_data_agent.db.dialect import DialectAdapter, PostgreSQLAdapter +from open_data_agent.db.schema import NormalizedColumn + + +@pytest.fixture +def adapter() -> PostgreSQLAdapter: + return PostgreSQLAdapter() + + +class TestPostgreSQLAdapterInterface: + def test_is_dialect_adapter(self, adapter: PostgreSQLAdapter) -> None: + assert isinstance(adapter, DialectAdapter) + + def test_quote_identifier_double_quotes(self, adapter: PostgreSQLAdapter) -> None: + assert adapter.quote_identifier("my col") == '"my col"' + + def test_quote_identifier_escapes_internal_quotes(self, adapter: PostgreSQLAdapter) -> None: + assert adapter.quote_identifier('say "hi"') == '"say ""hi"""' + + def test_date_trunc_month(self, adapter: PostgreSQLAdapter) -> None: + result = adapter.date_trunc("month", "created_at") + assert result == "DATE_TRUNC('month', created_at)" + + def test_date_trunc_year(self, adapter: PostgreSQLAdapter) -> None: + assert adapter.date_trunc("year", "ts") == "DATE_TRUNC('year', ts)" + + def test_current_timestamp(self, adapter: PostgreSQLAdapter) -> None: + assert adapter.current_timestamp() == "NOW()" + + def test_interval(self, adapter: PostgreSQLAdapter) -> None: + result = adapter.interval(7, "day") + assert "7" in result + assert "day" in result.lower() + + def test_safe_prefixes_includes_select(self, adapter: PostgreSQLAdapter) -> None: + assert "SELECT" in adapter.safe_prefixes + + def test_dangerous_patterns_includes_copy(self, adapter: PostgreSQLAdapter) -> None: + patterns = " ".join(adapter.dangerous_patterns) + assert "COPY" in patterns + + def test_dangerous_patterns_includes_alter_system(self, adapter: PostgreSQLAdapter) -> None: + patterns = " ".join(adapter.dangerous_patterns) + assert "ALTER" in patterns + + +class TestPostgreSQLNormalizeColumnRow: + def test_normalize_basic_row(self, adapter: PostgreSQLAdapter) -> None: + row = { + "name": "email", + "data_type": "character varying", + "is_nullable": True, + "default_value": None, + "is_primary_key": False, + "ordinal_position": 3, + "comment": None, + } + col = adapter.normalize_column_row(row) + assert isinstance(col, NormalizedColumn) + assert col.name == "email" + assert col.data_type == "character varying" + assert col.is_nullable is True + assert col.is_primary_key is False + assert col.ordinal_position == 3 + assert col.comment is None + + def test_normalize_with_comment(self, adapter: PostgreSQLAdapter) -> None: + row = { + "name": "id", + "data_type": "integer", + "is_nullable": False, + "default_value": "nextval('...')", + "is_primary_key": True, + "ordinal_position": 1, + "comment": "Primary key", + } + col = adapter.normalize_column_row(row) + assert col.is_primary_key is True + assert col.comment == "Primary key" diff --git a/tests/unit/db/test_dialect_sqlite.py b/tests/unit/db/test_dialect_sqlite.py new file mode 100644 index 0000000..567c2b5 --- /dev/null +++ b/tests/unit/db/test_dialect_sqlite.py @@ -0,0 +1,131 @@ +"""Unit tests for SQLiteAdapter — all interface methods + normalize_column_row.""" + +from __future__ import annotations + +import sqlite3 + +import pytest + +from open_data_agent.db.dialect import DialectAdapter, SQLiteAdapter +from open_data_agent.db.schema import NormalizedColumn +from open_data_agent.exceptions import DialectError + + +@pytest.fixture +def adapter() -> SQLiteAdapter: + return SQLiteAdapter() + + +class TestSQLiteAdapterInterface: + def test_is_dialect_adapter(self, adapter: SQLiteAdapter) -> None: + assert isinstance(adapter, DialectAdapter) + + def test_quote_identifier_simple(self, adapter: SQLiteAdapter) -> None: + assert adapter.quote_identifier("my_table") == '"my_table"' + + def test_quote_identifier_with_space(self, adapter: SQLiteAdapter) -> None: + assert adapter.quote_identifier("my table") == '"my table"' + + def test_quote_identifier_escapes_double_quotes(self, adapter: SQLiteAdapter) -> None: + assert adapter.quote_identifier('say "hello"') == '"say ""hello"""' + + def test_date_trunc_month(self, adapter: SQLiteAdapter) -> None: + result = adapter.date_trunc("month", "created_at") + assert "strftime" in result + assert "%Y-%m-01" in result + + def test_date_trunc_year(self, adapter: SQLiteAdapter) -> None: + result = adapter.date_trunc("year", "created_at") + assert "%Y-01-01" in result + + def test_date_trunc_day(self, adapter: SQLiteAdapter) -> None: + result = adapter.date_trunc("day", "created_at") + assert "%Y-%m-%d" in result + + def test_date_trunc_unsupported_raises_dialect_error(self, adapter: SQLiteAdapter) -> None: + with pytest.raises(DialectError): + adapter.date_trunc("quarter", "created_at") + + def test_current_timestamp(self, adapter: SQLiteAdapter) -> None: + ts = adapter.current_timestamp() + assert "now" in ts.lower() + + def test_interval_days(self, adapter: SQLiteAdapter) -> None: + result = adapter.interval(7, "day") + assert "7" in result + assert "day" in result + + def test_interval_unsupported_raises_dialect_error(self, adapter: SQLiteAdapter) -> None: + with pytest.raises(DialectError): + adapter.interval(1, "fortnight") + + def test_safe_prefixes_includes_select(self, adapter: SQLiteAdapter) -> None: + assert "SELECT" in adapter.safe_prefixes + + def test_safe_prefixes_includes_pragma(self, adapter: SQLiteAdapter) -> None: + assert "PRAGMA" in adapter.safe_prefixes + + def test_dangerous_patterns_is_list(self, adapter: SQLiteAdapter) -> None: + assert isinstance(adapter.dangerous_patterns, list) + assert len(adapter.dangerous_patterns) > 0 + + +class TestSQLiteIntrospection: + def test_introspect_schemas_returns_main( + self, adapter: SQLiteAdapter, sqlite_db: sqlite3.Connection + ) -> None: + sql = adapter.introspect_schemas_sql() + cursor = sqlite_db.execute(sql) + rows = cursor.fetchall() + schema_names = [row[0] for row in rows] + assert "main" in schema_names + + def test_introspect_tables_returns_fixture_tables( + self, adapter: SQLiteAdapter, sqlite_db: sqlite3.Connection + ) -> None: + sql = adapter.introspect_tables_sql("main") + cursor = sqlite_db.execute(sql) + rows = cursor.fetchall() + table_names = [row[0] for row in rows] + assert "customers" in table_names + assert "orders" in table_names + assert "products" in table_names + + def test_introspect_columns_returns_normalized( + self, adapter: SQLiteAdapter, sqlite_db: sqlite3.Connection + ) -> None: + columns = adapter.execute_introspect_columns(sqlite_db, "main", "customers") + assert len(columns) > 0 + for col in columns: + assert isinstance(col, NormalizedColumn) + + def test_customers_has_id_column( + self, adapter: SQLiteAdapter, sqlite_db: sqlite3.Connection + ) -> None: + columns = adapter.execute_introspect_columns(sqlite_db, "main", "customers") + names = [c.name for c in columns] + assert "id" in names + + def test_id_is_primary_key(self, adapter: SQLiteAdapter, sqlite_db: sqlite3.Connection) -> None: + columns = adapter.execute_introspect_columns(sqlite_db, "main", "customers") + id_col = next(c for c in columns if c.name == "id") + assert id_col.is_primary_key is True + + def test_all_normalized_column_fields_populated( + self, adapter: SQLiteAdapter, sqlite_db: sqlite3.Connection + ) -> None: + columns = adapter.execute_introspect_columns(sqlite_db, "main", "orders") + for col in columns: + assert col.name is not None + assert col.data_type is not None + assert isinstance(col.is_nullable, bool) + assert isinstance(col.is_primary_key, bool) + assert isinstance(col.ordinal_position, int) + assert col.ordinal_position >= 1 + + def test_ordinal_positions_are_sequential( + self, adapter: SQLiteAdapter, sqlite_db: sqlite3.Connection + ) -> None: + columns = adapter.execute_introspect_columns(sqlite_db, "main", "customers") + positions = [c.ordinal_position for c in columns] + assert positions == list(range(1, len(positions) + 1)) diff --git a/tests/unit/db/test_query.py b/tests/unit/db/test_query.py new file mode 100644 index 0000000..0dfc0d0 --- /dev/null +++ b/tests/unit/db/test_query.py @@ -0,0 +1,566 @@ +"""Unit tests for QueryEngine and DiagnosticEngine.""" + +from __future__ import annotations + +import sqlite3 +from unittest.mock import MagicMock, patch + +import pytest + +from open_data_agent.config import Config +from open_data_agent.db.dialect import MySQLAdapter, PostgreSQLAdapter, SQLiteAdapter +from open_data_agent.db.query import QueryEngine, QueryResult +from open_data_agent.exceptions import SafetyError + + +@pytest.fixture +def engine(sqlite_db_with_data: sqlite3.Connection) -> QueryEngine: + config = Config(row_limit=10, max_row_limit=100) + return QueryEngine(SQLiteAdapter(), sqlite_db_with_data, config=config) + + +class TestQueryEngineExecute: + def test_select_returns_query_result(self, engine: QueryEngine) -> None: + result = engine.execute("SELECT * FROM customers") + assert isinstance(result, QueryResult) + assert result.row_count > 0 + + def test_columns_populated(self, engine: QueryEngine) -> None: + result = engine.execute("SELECT id, name FROM customers") + assert "id" in result.columns + assert "name" in result.columns + + def test_safety_error_propagated(self, engine: QueryEngine) -> None: + with pytest.raises(SafetyError): + engine.execute("DROP TABLE customers") + + def test_duration_ms_populated(self, engine: QueryEngine) -> None: + result = engine.execute("SELECT 1") + assert result.duration_ms >= 0 + + +class TestLimitInjection: + def test_limit_injected_when_absent(self, sqlite_db_with_data: sqlite3.Connection) -> None: + config = Config(row_limit=2, max_row_limit=100) + engine = QueryEngine(SQLiteAdapter(), sqlite_db_with_data, config=config) + result = engine.execute("SELECT * FROM customers") + assert result.truncated is True + assert result.limit_applied == 2 + + def test_existing_limit_respected(self, sqlite_db_with_data: sqlite3.Connection) -> None: + config = Config(row_limit=100, max_row_limit=1000) + engine = QueryEngine(SQLiteAdapter(), sqlite_db_with_data, config=config) + result = engine.execute("SELECT * FROM customers LIMIT 1") + assert result.limit_applied == 1 + + def test_excessive_limit_clamped(self, sqlite_db_with_data: sqlite3.Connection) -> None: + config = Config(row_limit=1000, max_row_limit=5) + engine = QueryEngine(SQLiteAdapter(), sqlite_db_with_data, config=config) + result = engine.execute("SELECT * FROM customers LIMIT 99999") + assert result.limit_applied == 5 + assert result.truncated is True + + def test_trailing_line_comment_does_not_bypass_limit( + self, sqlite_db_with_data: sqlite3.Connection + ) -> None: + """LIMIT must not be swallowed inside a trailing -- comment.""" + config = Config(row_limit=2, max_row_limit=100) + engine = QueryEngine(SQLiteAdapter(), sqlite_db_with_data, config=config) + # Without the fix, LIMIT would land inside the comment and be silently ignored + result = engine.execute("SELECT * FROM customers -- auto-generated by tool") + assert result.limit_applied == 2 + # If LIMIT was inside the comment it would return all rows without truncation + assert result.truncated is True + + def test_string_literal_with_double_dash_not_corrupted( + self, sqlite_db_with_data: sqlite3.Connection + ) -> None: + """SQL with -- inside a string literal must not be mangled during LIMIT injection.""" + config = Config(row_limit=100, max_row_limit=1000) + engine = QueryEngine(SQLiteAdapter(), sqlite_db_with_data, config=config) + # Carol White has NULL email; this query uses a string literal containing '--' + # A regex-strip approach would corrupt this to broken SQL + result = engine.execute("SELECT * FROM customers WHERE name != 'alice -- bob'") + # Must succeed (no syntax error) and return rows + assert result.error is None + assert result.row_count >= 0 + + def test_limit_word_in_string_literal_still_injects_default_limit( + self, sqlite_db_with_data: sqlite3.Connection + ) -> None: + """LIMIT inside a string literal must not prevent injection of the default limit. + + A query like: SELECT * FROM t WHERE notes = 'LIMIT 5 is the max' + has no real LIMIT clause — the detector must ignore the word inside the string. + Without the fix, _LIMIT_PATTERN would match 'LIMIT 5' inside the literal, + skip injection, and return all rows without truncation. + """ + config = Config(row_limit=2, max_row_limit=100) + engine = QueryEngine(SQLiteAdapter(), sqlite_db_with_data, config=config) + # The string literal 'LIMIT 9999' must not be mistaken for a real LIMIT clause + result = engine.execute("SELECT * FROM customers WHERE name != 'LIMIT 9999 rows'") + assert result.error is None + # Default limit (2) must have been injected — not the literal value 9999 + assert result.limit_applied == 2 + assert result.truncated is True + + def test_limit_in_line_comment_still_injects_default_limit( + self, sqlite_db_with_data: sqlite3.Connection + ) -> None: + """LIMIT inside a -- comment must not prevent injection of the default limit.""" + config = Config(row_limit=2, max_row_limit=100) + engine = QueryEngine(SQLiteAdapter(), sqlite_db_with_data, config=config) + # The comment says LIMIT was removed — there is no real LIMIT clause + result = engine.execute("SELECT * FROM customers -- LIMIT was removed for full scan") + assert result.error is None + assert result.limit_applied == 2 + assert result.truncated is True + + def test_limit_in_block_comment_still_injects_default_limit( + self, sqlite_db_with_data: sqlite3.Connection + ) -> None: + """LIMIT inside a /* */ block comment must not prevent injection of the default limit. + + Without the H1 fix (_BLOCK_COMMENT stripping), the detector would match + 'LIMIT 5' inside the block comment, skip injection, and return all rows. + """ + config = Config(row_limit=2, max_row_limit=100) + engine = QueryEngine(SQLiteAdapter(), sqlite_db_with_data, config=config) + # Block comment contains LIMIT — there is no real LIMIT clause after it + result = engine.execute("SELECT * FROM customers /* LIMIT 5 was here */") + assert result.error is None + # Default limit (2) must have been injected, not the literal in the comment + assert result.limit_applied == 2 + assert result.truncated is True + + def test_limit_clamp_does_not_corrupt_string_literal( + self, sqlite_db_with_data: sqlite3.Connection + ) -> None: + """LIMIT clamp must only replace the real LIMIT clause, not LIMIT inside a string. + + Without the H2 fix (position-mapping instead of re.sub on original SQL), the + clamp would replace the first occurrence of LIMIT, corrupting the string literal. + """ + config = Config(row_limit=1000, max_row_limit=5) + engine = QueryEngine(SQLiteAdapter(), sqlite_db_with_data, config=config) + # String literal contains 'old LIMIT 99999'; the real LIMIT clause is LIMIT 500 + # The clamp must rewrite LIMIT 500 → LIMIT 5, leaving the string literal intact + result = engine.execute("SELECT * FROM customers WHERE name != 'old LIMIT 99999' LIMIT 500") + assert result.error is None + assert result.limit_applied == 5 + + def test_limit_in_dollar_quoted_string_still_injects_default_limit( + self, sqlite_db_with_data: sqlite3.Connection + ) -> None: + """LIMIT inside a PostgreSQL dollar-quoted string must not suppress default limit injection. + + Without the H1 fix (_DOLLAR_QUOTE stripping), the detector would match 'LIMIT 5' + inside the $$...$$ literal, skip injection, and return all rows without truncation. + + Note: SQLite does not natively support $$-quoting as a string literal, but since + the _strip_literals_and_comments logic is engine-agnostic we can verify the regex + behaviour here by using a value that SQLite treats as a column alias expression. + We test the stripping function directly to avoid SQLite parse differences. + """ + from open_data_agent.db.query import _LIMIT_PATTERN, _strip_literals_and_comments + + # Dollar-quoted literal only — no real LIMIT clause → must NOT detect a limit + sql_no_limit = "SELECT * FROM customers WHERE name != $$LIMIT 5$$" + stripped = _strip_literals_and_comments(sql_no_limit) + assert _LIMIT_PATTERN.search(stripped) is None, ( + "LIMIT inside $$ should not be detected as a real LIMIT clause" + ) + + # Dollar-quoted literal PLUS a real LIMIT clause → must detect only the real one + sql_with_limit = "SELECT * FROM customers WHERE name != $$LIMIT 5$$ LIMIT 100" + stripped2 = _strip_literals_and_comments(sql_with_limit) + m = _LIMIT_PATTERN.search(stripped2) + assert m is not None, "Real LIMIT clause must be detected even when $$ literal present" + assert int(m.group(1)) == 100, "Detected LIMIT value must be 100 (the real clause), not 5" + + def test_limit_clamp_does_not_corrupt_dollar_quoted_string( + self, sqlite_db_with_data: sqlite3.Connection + ) -> None: + """LIMIT clamp must not rewrite bytes inside a dollar-quoted literal. + + Without the H1 fix, the clamp would target the first LIMIT match — which could + be inside a $$...$$ literal — corrupting the string content. + We verify directly against the stripping + position logic. + """ + from open_data_agent.db.query import _LIMIT_PATTERN, _strip_literals_and_comments + + # Simulate: $$LIMIT 5$$ present, real LIMIT 500 follows; max_limit=3 + sql = "SELECT * FROM t WHERE x = $$LIMIT 5$$ LIMIT 500" + stripped = _strip_literals_and_comments(sql) + m = _LIMIT_PATTERN.search(stripped) + assert m is not None + + # The match must point at the REAL 'LIMIT 500', not the one inside $$...$$ + assert int(m.group(1)) == 500, "Clamp must target LIMIT 500, not the $$ literal" + + # Applying the clamp at the match position must leave the $$ literal untouched + max_limit = 3 + clamped = sql[: m.start()] + f"LIMIT {max_limit}" + sql[m.end() :] + assert "$$LIMIT 5$$" in clamped, "Dollar-quoted literal must be preserved after clamp" + assert "LIMIT 500" not in clamped, "Original real LIMIT must have been replaced" + assert f"LIMIT {max_limit}" in clamped + + +class TestQueryEngineTimeout: + """Tests for the timeout enforcement code path in QueryEngine.execute().""" + + def test_timeout_error_message_format(self) -> None: + """When timeout fires, result.error must contain a human-readable timeout message. + + Uses a FakeThread class (not a class-level mock) to avoid interfering with + pytest's own internal threads. + """ + import unittest.mock as mock + + conn = sqlite3.connect(":memory:", check_same_thread=False) + conn.execute("CREATE TABLE t (x INTEGER)") + conn.commit() + + config = Config(row_limit=10000, max_row_limit=10000, query_timeout_seconds=5) + engine = QueryEngine(SQLiteAdapter(), conn, config=config) + + class _AlwaysTimedOutThread: + def __init__(self, target=None, daemon=False, **kwargs): + pass + + def start(self): + pass + + def join(self, timeout=None): + pass + + def is_alive(self): + return True + + with mock.patch("open_data_agent.db.query.threading.Thread", _AlwaysTimedOutThread): + result = engine.execute("SELECT * FROM t") + + assert result.error is not None + assert "5" in result.error # timeout value must appear in message + assert "timed out" in result.error.lower() or "timeout" in result.error.lower() + conn.close() + + def test_timeout_result_has_empty_rows(self) -> None: + """On timeout, result.rows must be empty (partial results are not returned). + + Uses a mock on threading.Thread that intercepts only the thread created by + QueryEngine.execute() — patching Thread class-level methods globally is unsafe + because it also affects pytest's internal threads. + """ + import threading + import unittest.mock as mock + + conn = sqlite3.connect(":memory:", check_same_thread=False) + conn.execute("CREATE TABLE t (x INTEGER)") + conn.executemany("INSERT INTO t VALUES (?)", [(i,) for i in range(50)]) + conn.commit() + + config = Config(row_limit=10000, max_row_limit=10000, query_timeout_seconds=30) + engine = QueryEngine(SQLiteAdapter(), conn, config=config) + + original_thread_cls = threading.Thread + + class _FakeThread: + """Intercepts the specific query thread and simulates a timeout.""" + + def __init__(self, target=None, daemon=False, **kwargs): + self._inner = original_thread_cls(target=target, daemon=daemon, **kwargs) + + def start(self): + # Don't actually start the thread — simulates a thread that never finishes + pass + + def join(self, timeout=None): + # Never actually join — so is_alive() stays True + pass + + def is_alive(self): + return True # Simulate timeout: thread appears still running + + with mock.patch("open_data_agent.db.query.threading.Thread", _FakeThread): + result = engine.execute("SELECT * FROM t") + + assert result.error is not None + assert "timed out" in result.error.lower() or "timeout" in result.error.lower() + assert result.rows == [] + conn.close() + + +class TestDiagnosticEngine: + def test_zero_rows_triggers_diagnostic_to_stderr( + self, sqlite_db_with_data: sqlite3.Connection, capsys: pytest.CaptureFixture[str] + ) -> None: + from open_data_agent.db.diagnostics import DiagnosticEngine + + result = QueryResult( + columns=["id"], + rows=[], + row_count=0, + duration_ms=1.0, + sql="SELECT * FROM customers WHERE status='nonexistent'", + truncated=False, + limit_applied=100, + ) + engine = DiagnosticEngine(SQLiteAdapter(), sqlite_db_with_data) + engine.diagnose("SELECT * FROM customers WHERE status='nonexistent'", result) + + captured = capsys.readouterr() + assert "[diagnostic]" in captured.err + + def test_error_diagnostic_written_to_stderr( + self, sqlite_db_with_data: sqlite3.Connection, capsys: pytest.CaptureFixture[str] + ) -> None: + from open_data_agent.db.diagnostics import DiagnosticEngine + + result = QueryResult( + columns=[], + rows=[], + row_count=0, + duration_ms=1.0, + sql="SELECT * FROM nonexistent_table", + truncated=False, + limit_applied=100, + error="no such table: nonexistent_table", + ) + engine = DiagnosticEngine(SQLiteAdapter(), sqlite_db_with_data) + output = engine.diagnose("SELECT * FROM nonexistent_table", result) + assert "[diagnostic]" in output + + def test_normal_result_no_stderr( + self, sqlite_db_with_data: sqlite3.Connection, capsys: pytest.CaptureFixture[str] + ) -> None: + from open_data_agent.db.diagnostics import DiagnosticEngine + + result = QueryResult( + columns=["id"], + rows=[(1,)], + row_count=1, + duration_ms=1.0, + sql="SELECT id FROM customers LIMIT 1", + truncated=False, + limit_applied=1, + ) + engine = DiagnosticEngine(SQLiteAdapter(), sqlite_db_with_data) + engine.diagnose("SELECT id FROM customers LIMIT 1", result) + captured = capsys.readouterr() + # Normal result → no stderr output + assert captured.err == "" + + +class TestServerTimeoutPath: + """Tests for the DB-level timeout path (PostgreSQL / MySQL adapters).""" + + def _make_engine(self, adapter: object, conn: object) -> QueryEngine: + config = Config(row_limit=100, max_row_limit=1000, query_timeout_seconds=30) + return QueryEngine(adapter, conn, config=config) # type: ignore[arg-type] + + def test_postgres_adapter_supports_server_timeout(self) -> None: + assert PostgreSQLAdapter().supports_server_timeout is True + + def test_mysql_adapter_supports_server_timeout(self) -> None: + assert MySQLAdapter().supports_server_timeout is True + + def test_sqlite_adapter_does_not_support_server_timeout(self) -> None: + assert SQLiteAdapter().supports_server_timeout is False + + def test_server_timeout_path_calls_set_statement_timeout(self) -> None: + adapter = MagicMock(spec=PostgreSQLAdapter) + adapter.supports_server_timeout = True + adapter.safe_prefixes = ["SELECT", "WITH", "EXPLAIN", "SHOW", "TABLE"] + adapter.dangerous_patterns = [] + + mock_cursor = MagicMock() + mock_cursor.description = [("id",), ("name",)] + mock_cursor.fetchall.return_value = [(1, "Alice")] + + mock_conn = MagicMock() + mock_conn.execute.return_value = mock_cursor + + engine = self._make_engine(adapter, mock_conn) + result = engine.execute("SELECT id, name FROM customers") + + # Called twice: once to set the timeout, once to reset it to 0 after execution. + assert adapter.set_statement_timeout.call_count == 2 + calls = adapter.set_statement_timeout.call_args_list + assert calls[0].args == (mock_conn, 30) # set + assert calls[1].args == (mock_conn, 0) # reset + assert result.error is None + assert result.columns == ["id", "name"] + assert result.row_count == 1 + + def test_server_timeout_path_surfaces_timeout_as_error(self) -> None: + adapter = MagicMock(spec=PostgreSQLAdapter) + adapter.supports_server_timeout = True + adapter.safe_prefixes = ["SELECT", "WITH", "EXPLAIN", "SHOW", "TABLE"] + adapter.dangerous_patterns = [] + + # Simulate a psycopg3 QueryCanceled (pgcode 57014) from statement_timeout + timeout_exc = Exception("canceling statement due to statement timeout") + timeout_exc.pgcode = "57014" # type: ignore[attr-defined] + mock_conn = MagicMock() + mock_conn.execute.side_effect = timeout_exc + + engine = self._make_engine(adapter, mock_conn) + result = engine.execute("SELECT 1") + + assert result.error is not None + assert "timed out" in result.error + + def test_server_timeout_path_surfaces_non_timeout_exception_as_error(self) -> None: + adapter = MagicMock(spec=PostgreSQLAdapter) + adapter.supports_server_timeout = True + adapter.safe_prefixes = ["SELECT", "WITH", "EXPLAIN", "SHOW", "TABLE"] + adapter.dangerous_patterns = [] + + mock_conn = MagicMock() + mock_conn.execute.side_effect = Exception("relation does not exist") + + engine = self._make_engine(adapter, mock_conn) + result = engine.execute("SELECT 1") + + assert result.error == "relation does not exist" + + def test_thread_timeout_path_used_for_sqlite( + self, sqlite_db_with_data: sqlite3.Connection + ) -> None: + config = Config(row_limit=100, max_row_limit=1000, query_timeout_seconds=30) + engine = QueryEngine(SQLiteAdapter(), sqlite_db_with_data, config=config) + with patch.object( + engine, "_execute_with_thread_timeout", wraps=engine._execute_with_thread_timeout + ) as mock_thread: + engine.execute("SELECT 1") + mock_thread.assert_called_once() + + def test_server_path_used_for_postgres_adapter(self) -> None: + adapter = MagicMock(spec=PostgreSQLAdapter) + adapter.supports_server_timeout = True + adapter.safe_prefixes = ["SELECT"] + adapter.dangerous_patterns = [] + + mock_cursor = MagicMock() + mock_cursor.description = [("one",)] + mock_cursor.fetchall.return_value = [(1,)] + mock_conn = MagicMock() + mock_conn.execute.return_value = mock_cursor + + engine = self._make_engine(adapter, mock_conn) + with patch.object( + engine, "_execute_with_server_timeout", wraps=engine._execute_with_server_timeout + ) as mock_server: + engine.execute("SELECT 1") + mock_server.assert_called_once() + + def test_mysql_set_statement_timeout_sql(self) -> None: + """MySQLAdapter.set_statement_timeout must emit the correct MySQL SQL.""" + adapter = MySQLAdapter() + mock_conn = MagicMock() + adapter.set_statement_timeout(mock_conn, 30) + mock_conn.execute.assert_called_once_with("SET SESSION MAX_EXECUTION_TIME = 30000") + + def test_mysql_set_statement_timeout_reset(self) -> None: + """MySQLAdapter.set_statement_timeout(conn, 0) must emit MAX_EXECUTION_TIME = 0.""" + adapter = MySQLAdapter() + mock_conn = MagicMock() + adapter.set_statement_timeout(mock_conn, 0) + mock_conn.execute.assert_called_once_with("SET SESSION MAX_EXECUTION_TIME = 0") + + def test_postgres_set_statement_timeout_sql(self) -> None: + """PostgreSQLAdapter.set_statement_timeout must emit the correct PostgreSQL SQL.""" + adapter = PostgreSQLAdapter() + mock_conn = MagicMock() + adapter.set_statement_timeout(mock_conn, 30) + mock_conn.execute.assert_called_once_with("SET statement_timeout = 30000") + + def test_postgres_set_statement_timeout_reset(self) -> None: + """PostgreSQLAdapter.set_statement_timeout(conn, 0) must disable the timeout.""" + adapter = PostgreSQLAdapter() + mock_conn = MagicMock() + adapter.set_statement_timeout(mock_conn, 0) + mock_conn.execute.assert_called_once_with("SET statement_timeout = 0") + + def test_server_timeout_path_resets_timeout_after_success(self) -> None: + """Timeout must be reset to 0 after a successful query (pooled connection safety).""" + adapter = MagicMock(spec=PostgreSQLAdapter) + adapter.supports_server_timeout = True + adapter.safe_prefixes = ["SELECT"] + adapter.dangerous_patterns = [] + + mock_cursor = MagicMock() + mock_cursor.description = [("one",)] + mock_cursor.fetchall.return_value = [(1,)] + mock_conn = MagicMock() + mock_conn.execute.return_value = mock_cursor + + engine = self._make_engine(adapter, mock_conn) + engine.execute("SELECT 1") + + # set_statement_timeout called twice: once to set, once to reset to 0 + assert adapter.set_statement_timeout.call_count == 2 + calls = adapter.set_statement_timeout.call_args_list + assert calls[0].args == (mock_conn, 30) # set + assert calls[1].args == (mock_conn, 0) # reset + + def test_server_timeout_path_resets_timeout_after_error(self) -> None: + """Timeout must be reset to 0 even when the query raises an exception.""" + adapter = MagicMock(spec=PostgreSQLAdapter) + adapter.supports_server_timeout = True + adapter.safe_prefixes = ["SELECT"] + adapter.dangerous_patterns = [] + + mock_conn = MagicMock() + mock_conn.execute.side_effect = Exception("relation does not exist") + + engine = self._make_engine(adapter, mock_conn) + result = engine.execute("SELECT 1") + + assert result.error == "relation does not exist" + # set_statement_timeout called twice: once to set, once to reset to 0 + assert adapter.set_statement_timeout.call_count == 2 + calls = adapter.set_statement_timeout.call_args_list + assert calls[1].args == (mock_conn, 0) # reset always happens + + def test_is_timeout_exception_pgcode_57014(self) -> None: + """_is_timeout_exception must detect psycopg3 QueryCanceled via pgcode.""" + exc = Exception("canceling statement due to statement timeout") + exc.pgcode = "57014" # type: ignore[attr-defined] + assert QueryEngine._is_timeout_exception(exc) is True + + def test_is_timeout_exception_mysql_errno_3024(self) -> None: + """_is_timeout_exception must detect MySQL ER_QUERY_TIMEOUT errno 3024.""" + exc = Exception( + 3024, + "Query execution was interrupted, maximum statement execution time exceeded", + ) + assert QueryEngine._is_timeout_exception(exc) is True + + def test_is_timeout_exception_false_for_unrelated_error(self) -> None: + """_is_timeout_exception must NOT match errors whose message contains 'timeout'.""" + # A real false-positive risk: SSL timeout or lock timeout in message + exc = Exception("SSL connection timeout while connecting to host") + assert QueryEngine._is_timeout_exception(exc) is False + + def test_set_statement_timeout_failure_does_not_mask_query_result(self) -> None: + """If set_statement_timeout raises, the query must still execute and return results.""" + adapter = MagicMock(spec=PostgreSQLAdapter) + adapter.supports_server_timeout = True + adapter.safe_prefixes = ["SELECT"] + adapter.dangerous_patterns = [] + adapter.set_statement_timeout.side_effect = [Exception("permission denied"), None] + + mock_cursor = MagicMock() + mock_cursor.description = [("one",)] + mock_cursor.fetchall.return_value = [(42,)] + mock_conn = MagicMock() + mock_conn.execute.return_value = mock_cursor + + engine = self._make_engine(adapter, mock_conn) + result = engine.execute("SELECT 42") + + # Query still ran and returned data despite timeout config failure + assert result.error is None + assert result.rows == [(42,)] diff --git a/tests/unit/db/test_safety.py b/tests/unit/db/test_safety.py new file mode 100644 index 0000000..a0b78d0 --- /dev/null +++ b/tests/unit/db/test_safety.py @@ -0,0 +1,146 @@ +"""Unit tests for SafetyChecker.""" + +from __future__ import annotations + +import pytest + +from open_data_agent.db.dialect import MySQLAdapter, PostgreSQLAdapter, SQLiteAdapter +from open_data_agent.db.safety import SafetyChecker +from open_data_agent.exceptions import SafetyError + + +@pytest.fixture +def sqlite_checker() -> SafetyChecker: + return SafetyChecker(SQLiteAdapter()) + + +@pytest.fixture +def pg_checker() -> SafetyChecker: + return SafetyChecker(PostgreSQLAdapter()) + + +@pytest.fixture +def mysql_checker() -> SafetyChecker: + return SafetyChecker(MySQLAdapter()) + + +class TestAllowedPrefixes: + def test_select_passes_sqlite(self, sqlite_checker: SafetyChecker) -> None: + sqlite_checker.validate("SELECT * FROM customers") + + def test_with_passes_sqlite(self, sqlite_checker: SafetyChecker) -> None: + sqlite_checker.validate("WITH cte AS (SELECT 1) SELECT * FROM cte") + + def test_pragma_passes_sqlite(self, sqlite_checker: SafetyChecker) -> None: + sqlite_checker.validate("PRAGMA table_info(customers)") + + def test_explain_passes_sqlite(self, sqlite_checker: SafetyChecker) -> None: + sqlite_checker.validate("EXPLAIN SELECT * FROM orders") + + def test_select_passes_pg(self, pg_checker: SafetyChecker) -> None: + pg_checker.validate("SELECT 1") + + def test_select_passes_mysql(self, mysql_checker: SafetyChecker) -> None: + mysql_checker.validate("SELECT name FROM users") + + +class TestBlockedPrefixes: + def test_insert_blocked(self, sqlite_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError, match="INSERT"): + sqlite_checker.validate("INSERT INTO customers VALUES (1, 'Alice')") + + def test_update_blocked(self, sqlite_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + sqlite_checker.validate("UPDATE customers SET name='Bob' WHERE id=1") + + def test_delete_blocked(self, sqlite_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + sqlite_checker.validate("DELETE FROM customers WHERE id=1") + + def test_drop_blocked(self, sqlite_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + sqlite_checker.validate("DROP TABLE customers") + + def test_create_blocked(self, sqlite_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + sqlite_checker.validate("CREATE TABLE foo (id INT)") + + def test_alter_blocked(self, sqlite_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + sqlite_checker.validate("ALTER TABLE customers ADD COLUMN age INT") + + def test_truncate_blocked(self, sqlite_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + sqlite_checker.validate("TRUNCATE TABLE customers") + + def test_case_insensitive_insert_blocked(self, sqlite_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + sqlite_checker.validate("insert into customers values (1,'x')") + + +class TestDangerousPatterns: + def test_attach_database_blocked_sqlite(self, sqlite_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError, match="dangerous pattern"): + sqlite_checker.validate("SELECT 1; ATTACH DATABASE 'evil.db' AS e") + + def test_semicolon_drop_blocked(self, sqlite_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + sqlite_checker.validate("SELECT 1; DROP TABLE customers") + + def test_copy_blocked_pg(self, pg_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + pg_checker.validate("SELECT 1; COPY customers TO '/tmp/out.csv'") + + def test_load_data_blocked_mysql(self, mysql_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + mysql_checker.validate("SELECT 1; LOAD DATA INFILE '/etc/passwd' INTO TABLE t") + + +class TestPerAdapterPrefixes: + """Verify dialect-specific safe/blocked prefix behaviour per FR9 spec.""" + + def test_show_allowed_pg(self, pg_checker: SafetyChecker) -> None: + pg_checker.validate("SHOW search_path") + + def test_show_allowed_mysql(self, mysql_checker: SafetyChecker) -> None: + mysql_checker.validate("SHOW TABLES") + + def test_show_blocked_sqlite(self, sqlite_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + sqlite_checker.validate("SHOW TABLES") + + def test_describe_allowed_mysql(self, mysql_checker: SafetyChecker) -> None: + mysql_checker.validate("DESCRIBE customers") + + def test_describe_blocked_sqlite(self, sqlite_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + sqlite_checker.validate("DESCRIBE customers") + + def test_describe_blocked_pg(self, pg_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + pg_checker.validate("DESCRIBE customers") + + def test_pragma_allowed_sqlite(self, sqlite_checker: SafetyChecker) -> None: + sqlite_checker.validate("PRAGMA table_info(customers)") + + def test_pragma_blocked_pg(self, pg_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + pg_checker.validate("PRAGMA table_info(customers)") + + def test_table_shorthand_allowed_pg(self, pg_checker: SafetyChecker) -> None: + """PostgreSQL TABLE is a valid shorthand for SELECT * FROM .""" + pg_checker.validate("TABLE customers") + + def test_table_shorthand_blocked_sqlite(self, sqlite_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + sqlite_checker.validate("TABLE customers") + + +class TestEdgeCases: + def test_empty_sql_blocked(self, sqlite_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError, match="Empty"): + sqlite_checker.validate("") + + def test_whitespace_only_blocked(self, sqlite_checker: SafetyChecker) -> None: + with pytest.raises(SafetyError): + sqlite_checker.validate(" ") diff --git a/tests/unit/db/test_schema.py b/tests/unit/db/test_schema.py new file mode 100644 index 0000000..b7933af --- /dev/null +++ b/tests/unit/db/test_schema.py @@ -0,0 +1,133 @@ +"""Unit tests for SchemaInspector using the standard SQLite fixture.""" + +from __future__ import annotations + +import sqlite3 + +import pytest + +from open_data_agent.db.dialect import SQLiteAdapter +from open_data_agent.db.query import QueryResult +from open_data_agent.db.schema import NormalizedColumn, SchemaInspector + + +@pytest.fixture +def inspector(sqlite_db: sqlite3.Connection) -> SchemaInspector: + return SchemaInspector(SQLiteAdapter(), sqlite_db) + + +class TestSchemaInspectorGetSchemas: + def test_returns_list_of_strings(self, inspector: SchemaInspector) -> None: + schemas = inspector.get_schemas() + assert isinstance(schemas, list) + assert all(isinstance(s, str) for s in schemas) + + def test_sqlite_returns_main(self, inspector: SchemaInspector) -> None: + schemas = inspector.get_schemas() + assert "main" in schemas + + +class TestSchemaInspectorGetTables: + def test_returns_fixture_tables(self, inspector: SchemaInspector) -> None: + tables = inspector.get_tables("main") + assert "customers" in tables + assert "orders" in tables + assert "products" in tables + + def test_returns_list_of_strings(self, inspector: SchemaInspector) -> None: + tables = inspector.get_tables("main") + assert all(isinstance(t, str) for t in tables) + + +class TestSchemaInspectorGetColumns: + def test_returns_list_of_normalized_columns(self, inspector: SchemaInspector) -> None: + cols = inspector.get_columns("main", "customers") + assert len(cols) > 0 + for col in cols: + assert isinstance(col, NormalizedColumn) + + def test_customers_columns_include_id_name_email(self, inspector: SchemaInspector) -> None: + cols = inspector.get_columns("main", "customers") + names = [c.name for c in cols] + assert "id" in names + assert "name" in names + assert "email" in names + + def test_id_is_primary_key(self, inspector: SchemaInspector) -> None: + cols = inspector.get_columns("main", "customers") + id_col = next(c for c in cols if c.name == "id") + assert id_col.is_primary_key is True + + +class TestSchemaInspectorGetSample: + def test_returns_query_result(self, inspector: SchemaInspector) -> None: + result = inspector.get_sample("main", "customers") + assert isinstance(result, QueryResult) + + def test_sample_has_columns(self, inspector: SchemaInspector) -> None: + result = inspector.get_sample("main", "customers") + assert len(result.columns) > 0 + + def test_sample_default_limit_5(self, inspector: SchemaInspector) -> None: + result = inspector.get_sample("main", "customers") + assert result.limit_applied == 5 + + def test_truncated_false_when_table_has_fewer_rows_than_n(self) -> None: + """Table with exactly 3 rows sampled with n=5 must NOT be truncated.""" + conn = sqlite3.connect(":memory:", check_same_thread=False) + conn.row_factory = sqlite3.Row + conn.execute("CREATE TABLE t (id INTEGER PRIMARY KEY)") + for i in range(3): + conn.execute(f"INSERT INTO t VALUES ({i})") + insp = SchemaInspector(SQLiteAdapter(), conn) + result = insp.get_sample("main", "t", n=5) + assert result.truncated is False + assert result.row_count == 3 + + def test_truncated_true_when_table_has_more_rows_than_n(self) -> None: + """Table with 6 rows sampled with n=5 MUST be truncated.""" + conn = sqlite3.connect(":memory:", check_same_thread=False) + conn.row_factory = sqlite3.Row + conn.execute("CREATE TABLE t (id INTEGER PRIMARY KEY)") + for i in range(6): + conn.execute(f"INSERT INTO t VALUES ({i})") + insp = SchemaInspector(SQLiteAdapter(), conn) + result = insp.get_sample("main", "t", n=5) + assert result.truncated is True + assert result.row_count == 5 + + def test_truncated_false_when_table_has_exactly_n_rows(self) -> None: + """Table with exactly n rows must NOT be marked truncated (no data cut off).""" + conn = sqlite3.connect(":memory:", check_same_thread=False) + conn.row_factory = sqlite3.Row + conn.execute("CREATE TABLE t (id INTEGER PRIMARY KEY)") + for i in range(5): + conn.execute(f"INSERT INTO t VALUES ({i})") + insp = SchemaInspector(SQLiteAdapter(), conn) + result = insp.get_sample("main", "t", n=5) + assert result.truncated is False + assert result.row_count == 5 + + +class TestSchemaInspectorGetProfile: + def test_returns_dict_per_column( + self, inspector: SchemaInspector, sqlite_db_with_data: sqlite3.Connection + ) -> None: + insp = SchemaInspector(SQLiteAdapter(), sqlite_db_with_data) + profile = insp.get_profile("main", "customers") + assert isinstance(profile, dict) + # Should have an entry per column + assert "id" in profile + assert "name" in profile + + def test_profile_has_required_keys( + self, inspector: SchemaInspector, sqlite_db_with_data: sqlite3.Connection + ) -> None: + insp = SchemaInspector(SQLiteAdapter(), sqlite_db_with_data) + profile = insp.get_profile("main", "orders") + for col_stats in profile.values(): + assert "null_count" in col_stats + assert "distinct_count" in col_stats + assert "min" in col_stats + assert "max" in col_stats + assert "sample_values" in col_stats diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py new file mode 100644 index 0000000..ff131e4 --- /dev/null +++ b/tests/unit/test_config.py @@ -0,0 +1,143 @@ +"""Unit tests for config.py — Config loading, merge order, path resolution.""" + +from __future__ import annotations + +from pathlib import Path +from unittest.mock import patch + +import pytest +import yaml + +from open_data_agent.config import Config, get_config, get_config_dir +from open_data_agent.exceptions import ConfigError + + +class TestGetConfigDir: + def test_returns_path_object(self) -> None: + result = get_config_dir() + assert isinstance(result, Path) + + def test_ends_with_open_data_agent(self) -> None: + result = get_config_dir() + assert result.name == "open-data-agent" + + def test_parent_is_dot_config(self) -> None: + result = get_config_dir() + assert result.parent.name == ".config" + + def test_resolves_under_home(self) -> None: + result = get_config_dir() + assert str(result).startswith(str(Path.home())) + + +class TestConfigDefaults: + def test_defaults_when_no_config_file(self, tmp_path: Path) -> None: + """When no config.yaml exists, defaults are returned.""" + with patch("open_data_agent.config.get_config_dir", return_value=tmp_path): + config = get_config() + assert config.row_limit == 1000 + assert config.max_row_limit == 10000 + assert config.query_timeout_seconds == 30 + assert config.docs_staleness_days == 7 + assert config.log_level == "INFO" + assert config.strict_mode is False + + def test_config_is_dataclass(self) -> None: + config = Config() + assert hasattr(config, "row_limit") + assert hasattr(config, "max_row_limit") + assert hasattr(config, "query_timeout_seconds") + assert hasattr(config, "docs_staleness_days") + assert hasattr(config, "log_level") + assert hasattr(config, "strict_mode") + + +class TestYamlOverride: + def test_yaml_overrides_defaults(self, tmp_path: Path) -> None: + """Values in config.yaml override defaults.""" + config_file = tmp_path / "config.yaml" + config_file.write_text(yaml.dump({"row_limit": 500})) + + with patch("open_data_agent.config.get_config_dir", return_value=tmp_path): + config = get_config() + + assert config.row_limit == 500 + # Non-overridden values stay at defaults + assert config.max_row_limit == 10000 + + def test_yaml_strict_mode(self, tmp_path: Path) -> None: + config_file = tmp_path / "config.yaml" + config_file.write_text(yaml.dump({"strict_mode": True})) + + with patch("open_data_agent.config.get_config_dir", return_value=tmp_path): + config = get_config() + + assert config.strict_mode is True + + def test_missing_config_file_graceful_fallback(self, tmp_path: Path) -> None: + """No config.yaml → no error, defaults returned.""" + with patch("open_data_agent.config.get_config_dir", return_value=tmp_path): + config = get_config() + assert config.row_limit == 1000 + + +class TestCliOverride: + def test_cli_flag_overrides_yaml(self, tmp_path: Path) -> None: + """CLI flag wins over config.yaml.""" + config_file = tmp_path / "config.yaml" + config_file.write_text(yaml.dump({"row_limit": 500})) + + with patch("open_data_agent.config.get_config_dir", return_value=tmp_path): + config = get_config(row_limit=200) + + assert config.row_limit == 200 + + def test_none_cli_override_does_not_apply(self, tmp_path: Path) -> None: + """None CLI override should not overwrite config.yaml value.""" + config_file = tmp_path / "config.yaml" + config_file.write_text(yaml.dump({"row_limit": 500})) + + with patch("open_data_agent.config.get_config_dir", return_value=tmp_path): + config = get_config(row_limit=None) + + assert config.row_limit == 500 + + +class TestConnectionOptionsOverride: + def test_connection_options_override_yaml(self, tmp_path: Path) -> None: + """Per-connection options override global config.yaml.""" + config_file = tmp_path / "config.yaml" + config_file.write_text(yaml.dump({"row_limit": 500})) + + with patch("open_data_agent.config.get_config_dir", return_value=tmp_path): + config = get_config(connection_options={"row_limit": 750}) + + assert config.row_limit == 750 + + def test_cli_overrides_connection_options(self, tmp_path: Path) -> None: + """CLI flag wins over connection options.""" + with patch("open_data_agent.config.get_config_dir", return_value=tmp_path): + config = get_config(connection_options={"row_limit": 750}, row_limit=100) + assert config.row_limit == 100 + + +class TestConfigError: + def test_invalid_yaml_raises_config_error(self, tmp_path: Path) -> None: + config_file = tmp_path / "config.yaml" + config_file.write_text(": invalid: yaml: [unclosed") + + with ( + patch("open_data_agent.config.get_config_dir", return_value=tmp_path), + pytest.raises(ConfigError), + ): + get_config() + + def test_non_mapping_yaml_raises_config_error(self, tmp_path: Path) -> None: + config_file = tmp_path / "config.yaml" + config_file.write_text("- item1\n- item2\n") + + with ( + patch("open_data_agent.config.get_config_dir", return_value=tmp_path), + pytest.raises(ConfigError, match="Expected a YAML mapping"), + ): + get_config() diff --git a/tests/unit/test_docs_generator.py b/tests/unit/test_docs_generator.py new file mode 100644 index 0000000..4fe2a18 --- /dev/null +++ b/tests/unit/test_docs_generator.py @@ -0,0 +1,263 @@ +"""Unit tests for DocGenerator — directory structure, frontmatter, staleness, path safety.""" + +from __future__ import annotations + +import sqlite3 +from pathlib import Path + +import pytest + +from open_data_agent.db.dialect import SQLiteAdapter +from open_data_agent.db.schema import SchemaInspector +from open_data_agent.docs_generator import DocGenerator, validate_path_component +from open_data_agent.exceptions import SafetyError + + +@pytest.fixture +def generator(sqlite_db_with_data: sqlite3.Connection) -> DocGenerator: + inspector = SchemaInspector(SQLiteAdapter(), sqlite_db_with_data) + return DocGenerator(inspector, db_type="sqlite") + + +class TestValidatePathComponent: + def test_valid_name_passes(self) -> None: + assert validate_path_component("my_table") == "my_table" + + def test_forward_slash_raises(self) -> None: + with pytest.raises(SafetyError): + validate_path_component("schema/table") + + def test_backslash_raises(self) -> None: + with pytest.raises(SafetyError): + validate_path_component("schema\\table") + + def test_dotdot_raises(self) -> None: + with pytest.raises(SafetyError): + validate_path_component("../evil") + + def test_null_byte_raises(self) -> None: + with pytest.raises(SafetyError): + validate_path_component("table\0name") + + def test_empty_string_raises(self) -> None: + with pytest.raises(SafetyError): + validate_path_component("") + + +class TestDocGeneratorStructure: + def test_creates_root_index(self, generator: DocGenerator, tmp_path: Path) -> None: + generator.generate(tmp_path) + assert (tmp_path / "_index.md").exists() + + def test_creates_schema_dir(self, generator: DocGenerator, tmp_path: Path) -> None: + generator.generate(tmp_path) + assert (tmp_path / "main").is_dir() + + def test_creates_schema_index(self, generator: DocGenerator, tmp_path: Path) -> None: + generator.generate(tmp_path) + assert (tmp_path / "main" / "_index.md").exists() + + def test_creates_table_docs(self, generator: DocGenerator, tmp_path: Path) -> None: + generator.generate(tmp_path) + assert (tmp_path / "main" / "customers.md").exists() + assert (tmp_path / "main" / "orders.md").exists() + assert (tmp_path / "main" / "products.md").exists() + + def test_table_doc_has_frontmatter(self, generator: DocGenerator, tmp_path: Path) -> None: + generator.generate(tmp_path) + content = (tmp_path / "main" / "customers.md").read_text() + assert "generated_at:" in content + assert "table: customers" in content + assert "db_type: sqlite" in content + + def test_table_doc_has_column_list(self, generator: DocGenerator, tmp_path: Path) -> None: + generator.generate(tmp_path) + content = (tmp_path / "main" / "customers.md").read_text() + assert "id" in content + assert "name" in content + + def test_returns_stats(self, generator: DocGenerator, tmp_path: Path) -> None: + stats = generator.generate(tmp_path) + assert "tables_generated" in stats + assert "tables_skipped" in stats + assert stats["tables_generated"] > 0 + + +class TestDocGeneratorEnrich: + def test_enrich_adds_profile_section(self, generator: DocGenerator, tmp_path: Path) -> None: + """generate(enrich=True) must append a ## Profile section to table docs.""" + generator.generate(tmp_path, enrich=True) + content = (tmp_path / "main" / "customers.md").read_text() + assert "## Profile" in content + + def test_enrich_profile_has_column_stats(self, generator: DocGenerator, tmp_path: Path) -> None: + """Profile section must include column names and at least some stat values.""" + generator.generate(tmp_path, enrich=True) + content = (tmp_path / "main" / "customers.md").read_text() + # The 'id' column must appear in the profile table + profile_start = content.find("## Profile") + assert profile_start != -1 + profile_section = content[profile_start:] + assert "id" in profile_section + + def test_enrich_false_has_no_profile_section( + self, generator: DocGenerator, tmp_path: Path + ) -> None: + """generate(enrich=False, the default) must NOT include a ## Profile section.""" + generator.generate(tmp_path, enrich=False) + content = (tmp_path / "main" / "customers.md").read_text() + assert "## Profile" not in content + + +class TestDocGeneratorSkipExisting: + def test_skip_existing_skips_fresh_docs(self, generator: DocGenerator, tmp_path: Path) -> None: + # First run + stats1 = generator.generate(tmp_path) + generated1 = stats1["tables_generated"] + + # Second run with skip_existing + stats2 = generator.generate(tmp_path, skip_existing=True) + # All tables should be skipped (freshly generated) + assert stats2["tables_skipped"] >= generated1 + + +class TestDocGeneratorStatus: + def test_status_shows_all_missing_when_no_catalog( + self, generator: DocGenerator, tmp_path: Path + ) -> None: + status = generator.get_status(tmp_path / "nonexistent") + assert status["total"] == 0 + + def test_status_shows_up_to_date_after_generate( + self, generator: DocGenerator, tmp_path: Path + ) -> None: + generator.generate(tmp_path) + status = generator.get_status(tmp_path) + assert status["up_to_date"] > 0 + assert status["missing"] == 0 + assert status["stale"] == 0 + + +class TestCatalogIndexGeneratedAt: + def test_catalog_index_contains_generated_at( + self, generator: DocGenerator, tmp_path: Path + ) -> None: + """_write_catalog_index must write generated_at: so _is_docs_stale() can read it.""" + generator.generate(tmp_path) + content = (tmp_path / "_index.md").read_text() + assert "generated_at:" in content + + def test_catalog_index_has_yaml_frontmatter( + self, generator: DocGenerator, tmp_path: Path + ) -> None: + """Catalog _index.md must use --- delimited YAML frontmatter (matches architecture spec).""" + generator.generate(tmp_path) + lines = (tmp_path / "_index.md").read_text().splitlines() + assert lines[0] == "---", "First line must be --- (YAML frontmatter open)" + # Find the closing --- + close_idx = next((i for i, line in enumerate(lines[1:], 1) if line.strip() == "---"), None) + assert close_idx is not None, "No closing --- found for YAML frontmatter" + # generated_at must appear inside the frontmatter block + frontmatter_lines = lines[1:close_idx] + assert any(line.startswith("generated_at:") for line in frontmatter_lines), ( + "generated_at: not found inside frontmatter block" + ) + + def test_catalog_index_generated_at_is_iso_timestamp( + self, generator: DocGenerator, tmp_path: Path + ) -> None: + """The generated_at value inside the YAML frontmatter must be a valid ISO 8601 datetime. + + Searches only within the --- delimited frontmatter block, consistent with + how _is_docs_stale() parses the file. + """ + from datetime import datetime + + generator.generate(tmp_path) + lines = (tmp_path / "_index.md").read_text().splitlines() + assert lines and lines[0].strip() == "---", "No YAML frontmatter found" + + for line in lines[1:]: + if line.strip() == "---": + break # End of frontmatter — generated_at not found + if line.startswith("generated_at:"): + ts_str = line[len("generated_at:") :].strip().strip('"') + # Should not raise + dt = datetime.fromisoformat(ts_str) + assert dt is not None + return + pytest.fail("No generated_at: line found inside YAML frontmatter of catalog _index.md") + + +class TestIsDocsStale: + """Tests for cli_query._is_docs_stale() — frontmatter-aware staleness detection.""" + + def _make_catalog_index(self, path: Path, generated_at: str) -> None: + """Write a catalog _index.md with proper YAML frontmatter.""" + content = f'---\ngenerated_at: "{generated_at}"\n---\n\n# Data Catalog\n' + path.write_text(content) + + def test_returns_true_when_file_missing(self, tmp_path: Path) -> None: + from unittest.mock import patch + + from open_data_agent.cli_query import _is_docs_stale + + fake_dir = tmp_path / "docs" / "data-catalog" + with patch("open_data_agent.cli_query._DEFAULT_DOCS_DIR", fake_dir): + assert _is_docs_stale(7) is True + + def test_returns_false_for_fresh_docs(self, tmp_path: Path) -> None: + from datetime import UTC, datetime + from unittest.mock import patch + + from open_data_agent.cli_query import _is_docs_stale + + fake_dir = tmp_path / "docs" / "data-catalog" + fake_dir.mkdir(parents=True) + now = datetime.now(UTC).isoformat(timespec="seconds") + self._make_catalog_index(fake_dir / "_index.md", now) + + with patch("open_data_agent.cli_query._DEFAULT_DOCS_DIR", fake_dir): + assert _is_docs_stale(7) is False + + def test_returns_true_for_old_docs(self, tmp_path: Path) -> None: + from datetime import UTC, datetime, timedelta + from unittest.mock import patch + + from open_data_agent.cli_query import _is_docs_stale + + fake_dir = tmp_path / "docs" / "data-catalog" + fake_dir.mkdir(parents=True) + old_ts = (datetime.now(UTC) - timedelta(days=10)).isoformat(timespec="seconds") + self._make_catalog_index(fake_dir / "_index.md", old_ts) + + with patch("open_data_agent.cli_query._DEFAULT_DOCS_DIR", fake_dir): + assert _is_docs_stale(7) is True + + def test_returns_true_when_no_frontmatter(self, tmp_path: Path) -> None: + """A catalog _index.md without --- frontmatter must be treated as stale.""" + from unittest.mock import patch + + from open_data_agent.cli_query import _is_docs_stale + + fake_dir = tmp_path / "docs" / "data-catalog" + fake_dir.mkdir(parents=True) + # Old-style body-level generated_at (no frontmatter delimiters) + (fake_dir / "_index.md").write_text('# Data Catalog\n\ngenerated_at: "2026-01-01"\n') + + with patch("open_data_agent.cli_query._DEFAULT_DOCS_DIR", fake_dir): + assert _is_docs_stale(7) is True + + def test_generate_then_stale_check_roundtrip( + self, generator: DocGenerator, tmp_path: Path + ) -> None: + """DocGenerator.generate() → _is_docs_stale() returns False (full roundtrip).""" + from unittest.mock import patch + + from open_data_agent.cli_query import _is_docs_stale + + catalog_dir = tmp_path / "catalog" + generator.generate(catalog_dir) + + with patch("open_data_agent.cli_query._DEFAULT_DOCS_DIR", catalog_dir): + assert _is_docs_stale(7) is False diff --git a/tests/unit/test_eval_runner.py b/tests/unit/test_eval_runner.py new file mode 100644 index 0000000..39840b6 --- /dev/null +++ b/tests/unit/test_eval_runner.py @@ -0,0 +1,360 @@ +"""Unit tests for EvalRunner — YAML parsing, dialect filtering, pass/fail, add.""" + +from __future__ import annotations + +import sqlite3 +from pathlib import Path +from typing import Any + +import pytest +import yaml + +from open_data_agent.eval_runner import EvalRunner +from open_data_agent.exceptions import ConfigError + + +def _write_yaml(path: Path, data: list[dict]) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(yaml.dump(data, allow_unicode=True), encoding="utf-8") + + +_ENTRY_SQLITE = { + "id": "sales-001", + "question": "How many customers?", + "sql": "SELECT COUNT(*) FROM customers", + "dialects": ["sqlite", "postgresql"], + "active": True, +} + +_ENTRY_PG_ONLY = { + "id": "sales-002", + "question": "PG-only query", + "sql": "SELECT 1", + "dialects": ["postgresql"], + "active": True, +} + +_ENTRY_INACTIVE = { + "id": "sales-003", + "question": "Inactive entry", + "sql": "SELECT 1", + "dialects": ["sqlite"], + "active": False, +} + + +class TestEvalRunnerLoad: + def test_loads_entries(self, tmp_path: Path) -> None: + p = tmp_path / "evals" / "golden_queries.yaml" + _write_yaml(p, [_ENTRY_SQLITE, _ENTRY_PG_ONLY]) + runner = EvalRunner() + entries = runner.load(p) + assert len(entries) == 2 + + def test_returns_empty_for_nonexistent_file(self, tmp_path: Path) -> None: + runner = EvalRunner() + entries = runner.load(tmp_path / "nonexistent.yaml") + assert entries == [] + + def test_defaults_active_to_true(self, tmp_path: Path) -> None: + p = tmp_path / "gq.yaml" + entry = {k: v for k, v in _ENTRY_SQLITE.items() if k != "active"} + _write_yaml(p, [entry]) + runner = EvalRunner() + entries = runner.load(p) + assert entries[0]["active"] is True + + def test_raises_on_missing_required_field(self, tmp_path: Path) -> None: + p = tmp_path / "gq.yaml" + _write_yaml(p, [{"id": "x-001", "question": "q"}]) # missing sql + dialects + runner = EvalRunner() + with pytest.raises(ConfigError, match="missing required field"): + runner.load(p) + + def test_raises_on_non_list_yaml(self, tmp_path: Path) -> None: + p = tmp_path / "gq.yaml" + p.write_text("key: value\n", encoding="utf-8") + runner = EvalRunner() + with pytest.raises(ConfigError, match="must contain a YAML list"): + runner.load(p) + + def test_empty_yaml_returns_empty(self, tmp_path: Path) -> None: + p = tmp_path / "gq.yaml" + p.write_text("", encoding="utf-8") + runner = EvalRunner() + entries = runner.load(p) + assert entries == [] + + +class TestEvalRunnerFilterForDialect: + def test_filters_by_dialect(self) -> None: + runner = EvalRunner() + entries = [_ENTRY_SQLITE, _ENTRY_PG_ONLY, _ENTRY_INACTIVE] + result = runner.filter_for_dialect(entries, "sqlite") + assert len(result) == 1 + assert result[0]["id"] == "sales-001" + + def test_excludes_inactive(self) -> None: + runner = EvalRunner() + entries = [_ENTRY_INACTIVE] + result = runner.filter_for_dialect(entries, "sqlite") + assert result == [] + + def test_multi_dialect_entry_included_for_each(self) -> None: + runner = EvalRunner() + entries = [_ENTRY_SQLITE] + pg_result = runner.filter_for_dialect(entries, "postgresql") + sqlite_result = runner.filter_for_dialect(entries, "sqlite") + assert len(pg_result) == 1 + assert len(sqlite_result) == 1 + + def test_empty_entries(self) -> None: + runner = EvalRunner() + assert runner.filter_for_dialect([], "sqlite") == [] + + +class TestEvalRunnerRun: + def _make_engine(self, tmp_path: Path) -> Any: + """Build a real QueryEngine backed by an in-memory SQLite DB. + + check_same_thread=False is required because QueryEngine executes in a worker thread. + """ + from open_data_agent.db.dialect import SQLiteAdapter + from open_data_agent.db.query import QueryEngine + + conn = sqlite3.connect(":memory:", check_same_thread=False) + conn.execute("CREATE TABLE customers (id INTEGER PRIMARY KEY, name TEXT)") + conn.execute("INSERT INTO customers VALUES (1, 'Alice')") + conn.commit() + + adapter = SQLiteAdapter() + return QueryEngine(adapter, conn), adapter + + def test_passing_query(self, tmp_path: Path) -> None: + engine, adapter = self._make_engine(tmp_path) + runner = EvalRunner() + entries = [_ENTRY_SQLITE] + results = runner.run(entries, adapter, engine) + assert len(results) == 1 + assert results[0]["passed"] is True + assert results[0]["row_count"] >= 1 + + def test_failing_query_zero_rows(self, tmp_path: Path) -> None: + from open_data_agent.db.dialect import SQLiteAdapter + from open_data_agent.db.query import QueryEngine + + conn = sqlite3.connect(":memory:", check_same_thread=False) + conn.execute("CREATE TABLE empty_table (id INTEGER)") + conn.commit() + engine = QueryEngine(SQLiteAdapter(), conn) + adapter = SQLiteAdapter() + + runner = EvalRunner() + entries = [ + { + "id": "test-001", + "question": "Zero rows", + "sql": "SELECT * FROM empty_table", + "dialects": ["sqlite"], + "active": True, + } + ] + results = runner.run(entries, adapter, engine, min_rows=1) + assert results[0]["passed"] is False + assert results[0]["row_count"] == 0 + + def test_failing_query_with_error(self, tmp_path: Path) -> None: + from open_data_agent.db.dialect import SQLiteAdapter + from open_data_agent.db.query import QueryEngine + + conn = sqlite3.connect(":memory:", check_same_thread=False) + engine = QueryEngine(SQLiteAdapter(), conn) + adapter = SQLiteAdapter() + + runner = EvalRunner() + entries = [ + { + "id": "err-001", + "question": "Bad table", + "sql": "SELECT * FROM nonexistent_table_xyz", + "dialects": ["sqlite"], + "active": True, + } + ] + results = runner.run(entries, adapter, engine) + assert results[0]["passed"] is False + assert results[0]["error"] is not None + + def test_result_fields_present(self, tmp_path: Path) -> None: + engine, adapter = self._make_engine(tmp_path) + runner = EvalRunner() + results = runner.run([_ENTRY_SQLITE], adapter, engine) + required = {"id", "question", "sql", "passed", "row_count", "duration_ms", "error"} + assert required.issubset(set(results[0].keys())) + + +class TestEvalRunnerNextId: + def test_generates_first_id(self) -> None: + runner = EvalRunner() + assert runner.next_id_for_domain([], "sales") == "sales-001" + + def test_increments_existing(self) -> None: + runner = EvalRunner() + entries = [{"id": "sales-001"}, {"id": "sales-002"}] + assert runner.next_id_for_domain(entries, "sales") == "sales-003" + + def test_different_domain_does_not_interfere(self) -> None: + runner = EvalRunner() + entries = [{"id": "orders-005"}] + assert runner.next_id_for_domain(entries, "sales") == "sales-001" + + +class TestEvalRunnerAppendEntry: + def test_creates_file_and_appends(self, tmp_path: Path) -> None: + p = tmp_path / "evals" / "gq.yaml" + runner = EvalRunner() + entry = { + "id": "sales-001", + "question": "q", + "sql": "SELECT 1", + "dialects": ["sqlite"], + "active": True, + } + runner.append_entry(p, entry, []) + assert p.exists() + loaded = runner.load(p) + assert len(loaded) == 1 + assert loaded[0]["id"] == "sales-001" + + def test_rejects_duplicate_id(self, tmp_path: Path) -> None: + p = tmp_path / "gq.yaml" + runner = EvalRunner() + entry = { + "id": "sales-001", + "question": "q", + "sql": "SELECT 1", + "dialects": ["sqlite"], + "active": True, + } + runner.append_entry(p, entry, []) + with pytest.raises(ConfigError, match="already exists"): + runner.append_entry(p, entry, runner.load(p)) + + def test_appends_to_existing(self, tmp_path: Path) -> None: + p = tmp_path / "gq.yaml" + runner = EvalRunner() + e1 = { + "id": "s-001", + "question": "q1", + "sql": "SELECT 1", + "dialects": ["sqlite"], + "active": True, + } + e2 = { + "id": "s-002", + "question": "q2", + "sql": "SELECT 2", + "dialects": ["sqlite"], + "active": True, + } + runner.append_entry(p, e1, []) + runner.append_entry(p, e2, runner.load(p)) + loaded = runner.load(p) + assert len(loaded) == 2 + + +class TestEvalRunnerHyphenatedDomains: + """IDs with hyphens in the domain segment (e.g. order-items-001) must be accepted.""" + + def test_hyphenated_domain_id_accepted_by_load(self, tmp_path: Path) -> None: + runner = EvalRunner() + p = tmp_path / "gq.yaml" + entry = { + "id": "order-items-001", + "question": "q", + "sql": "SELECT 1", + "dialects": ["sqlite"], + "active": True, + } + runner.append_entry(p, entry, []) + loaded = runner.load(p) + assert loaded[0]["id"] == "order-items-001" + + def test_hyphenated_domain_append_entry_accepted(self, tmp_path: Path) -> None: + runner = EvalRunner() + p = tmp_path / "gq.yaml" + entry = { + "id": "multi-word-domain-001", + "question": "q", + "sql": "SELECT 1", + "dialects": ["sqlite"], + "active": True, + } + # Should not raise ConfigError + runner.append_entry(p, entry, []) + + def test_next_id_for_hyphenated_domain(self) -> None: + runner = EvalRunner() + entries = [{"id": "order-items-001"}, {"id": "order-items-002"}] + assert runner.next_id_for_domain(entries, "order-items") == "order-items-003" + + def test_next_id_first_for_hyphenated_domain(self) -> None: + runner = EvalRunner() + assert runner.next_id_for_domain([], "order-items") == "order-items-001" + + +class TestGoldenQueriesHeaderRoundtrip: + """Verify _GOLDEN_QUERIES_HEADER + yaml.dump round-trips correctly through load().""" + + def _make_entry(self, eid: str) -> dict: + return { + "id": eid, + "question": "q", + "sql": "SELECT 1", + "dialects": ["sqlite"], + "active": True, + } + + def test_single_entry_roundtrip(self, tmp_path: Path) -> None: + runner = EvalRunner() + p = tmp_path / "gq.yaml" + runner.append_entry(p, self._make_entry("sales-001"), []) + loaded = runner.load(p) + assert len(loaded) == 1 + assert loaded[0]["id"] == "sales-001" + + def test_file_starts_with_header_comment(self, tmp_path: Path) -> None: + runner = EvalRunner() + p = tmp_path / "gq.yaml" + runner.append_entry(p, self._make_entry("sales-001"), []) + content = p.read_text() + assert content.startswith("# golden_queries.yaml") + + def test_empty_list_after_all_removed(self, tmp_path: Path) -> None: + """Verify load() handles a file written with an empty entries list gracefully.""" + import yaml + + from open_data_agent.eval_runner import _GOLDEN_QUERIES_HEADER + + p = tmp_path / "gq.yaml" + # Simulate what append_entry would write for an empty list (edge case) + p.write_text( + _GOLDEN_QUERIES_HEADER + yaml.dump([], default_flow_style=False), + encoding="utf-8", + ) + runner = EvalRunner() + loaded = runner.load(p) + assert loaded == [] + + def test_concurrent_append_picks_up_disk_changes(self, tmp_path: Path) -> None: + """append_entry re-reads from disk, so a second append sees the first entry.""" + runner = EvalRunner() + p = tmp_path / "gq.yaml" + # Simulate two sequential appends where the second caller has a stale `existing=[]` + runner.append_entry(p, self._make_entry("sales-001"), []) + # Pass stale existing=[] — the re-read-from-disk logic should still pick up sales-001 + runner.append_entry(p, self._make_entry("sales-002"), []) + loaded = runner.load(p) + assert len(loaded) == 2 + ids = {e["id"] for e in loaded} + assert ids == {"sales-001", "sales-002"} diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py new file mode 100644 index 0000000..467755c --- /dev/null +++ b/tests/unit/test_exceptions.py @@ -0,0 +1,63 @@ +"""Unit tests for the exception hierarchy.""" + +from __future__ import annotations + +import pytest + +from open_data_agent.exceptions import ( + ConfigError, + ConnectionError, + DialectError, + EvalFailure, + OdaError, + SafetyError, + StaleDocsWarning, +) + + +class TestExceptionHierarchy: + def test_all_errors_subclass_oda_error(self) -> None: + for exc_class in (ConnectionError, SafetyError, ConfigError, DialectError, EvalFailure): + assert issubclass(exc_class, OdaError), f"{exc_class} must subclass OdaError" + + def test_oda_error_subclasses_exception(self) -> None: + assert issubclass(OdaError, Exception) + + def test_stale_docs_warning_subclasses_user_warning(self) -> None: + assert issubclass(StaleDocsWarning, UserWarning) + + def test_stale_docs_warning_not_an_oda_error(self) -> None: + assert not issubclass(StaleDocsWarning, OdaError) + + def test_connection_error_can_be_raised_and_caught(self) -> None: + with pytest.raises(ConnectionError): + raise ConnectionError("Cannot connect to 'my-db': timeout") + + def test_safety_error_can_be_raised_and_caught(self) -> None: + with pytest.raises(SafetyError): + raise SafetyError("Blocked pattern 'DROP TABLE' in SQL") + + def test_config_error_can_be_raised_and_caught(self) -> None: + with pytest.raises(ConfigError): + raise ConfigError("Config not found at /some/path") + + def test_dialect_error_can_be_raised_and_caught(self) -> None: + with pytest.raises(DialectError): + raise DialectError("Unsupported dialect: mssql") + + def test_eval_failure_can_be_raised_and_caught(self) -> None: + with pytest.raises(EvalFailure): + raise EvalFailure("sales-001 returned 0 rows") + + def test_catching_oda_error_catches_subclass(self) -> None: + with pytest.raises(OdaError): + raise SafetyError("caught as OdaError") + + def test_stale_docs_warning_can_be_issued(self) -> None: + import warnings + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + warnings.warn("Docs are 10 days old", StaleDocsWarning, stacklevel=2) + assert len(w) == 1 + assert issubclass(w[0].category, StaleDocsWarning) diff --git a/tests/unit/test_history.py b/tests/unit/test_history.py new file mode 100644 index 0000000..87fa811 --- /dev/null +++ b/tests/unit/test_history.py @@ -0,0 +1,97 @@ +"""Unit tests for HistoryTracker — JSONL write, schema validation, stats.""" + +from __future__ import annotations + +import json +from pathlib import Path + +import pytest + +from open_data_agent.history import HistoryTracker + +_SAMPLE_ENTRY = { + "id": "abc-123", + "timestamp": "2026-03-09T10:00:00+00:00", + "connection": "my-pg", + "db_type": "postgresql", + "sql": "SELECT * FROM customers LIMIT 10", + "tables": ["customers"], + "row_count": 3, + "duration_ms": 12.5, + "question": "Show me all customers", + "error": None, +} + + +class TestHistoryTrackerAppend: + def test_creates_file_on_first_append(self, tmp_path: Path) -> None: + tracker = HistoryTracker(history_file=tmp_path / "history.jsonl") + tracker.append(_SAMPLE_ENTRY) + assert (tmp_path / "history.jsonl").exists() + + def test_entry_is_valid_json(self, tmp_path: Path) -> None: + tracker = HistoryTracker(history_file=tmp_path / "history.jsonl") + tracker.append(_SAMPLE_ENTRY) + content = (tmp_path / "history.jsonl").read_text().strip() + parsed = json.loads(content) + assert parsed["id"] == "abc-123" + + def test_multiple_appends_are_separate_lines(self, tmp_path: Path) -> None: + tracker = HistoryTracker(history_file=tmp_path / "history.jsonl") + tracker.append(_SAMPLE_ENTRY) + tracker.append({**_SAMPLE_ENTRY, "id": "def-456"}) + lines = (tmp_path / "history.jsonl").read_text().strip().splitlines() + assert len(lines) == 2 + + def test_all_10_fields_present(self, tmp_path: Path) -> None: + tracker = HistoryTracker(history_file=tmp_path / "history.jsonl") + tracker.append(_SAMPLE_ENTRY) + content = (tmp_path / "history.jsonl").read_text().strip() + parsed = json.loads(content) + required = { + "id", + "timestamp", + "connection", + "db_type", + "sql", + "tables", + "row_count", + "duration_ms", + "question", + "error", + } + assert required.issubset(set(parsed.keys())) + + +class TestHistoryTrackerIterEntries: + def test_lazy_iteration(self, tmp_path: Path) -> None: + tracker = HistoryTracker(history_file=tmp_path / "history.jsonl") + tracker.append(_SAMPLE_ENTRY) + tracker.append({**_SAMPLE_ENTRY, "id": "def-456"}) + entries = list(tracker.iter_entries()) + assert len(entries) == 2 + + def test_empty_file_yields_nothing(self, tmp_path: Path) -> None: + tracker = HistoryTracker(history_file=tmp_path / "nonexistent.jsonl") + entries = list(tracker.iter_entries()) + assert entries == [] + + def test_iteration_returns_dicts(self, tmp_path: Path) -> None: + tracker = HistoryTracker(history_file=tmp_path / "history.jsonl") + tracker.append(_SAMPLE_ENTRY) + entry = next(tracker.iter_entries()) + assert isinstance(entry, dict) + + +class TestHistoryTrackerSecretWarning: + def test_warns_on_password_in_sql( + self, tmp_path: Path, caplog: pytest.LogCaptureFixture + ) -> None: + import logging + + tracker = HistoryTracker(history_file=tmp_path / "history.jsonl") + with caplog.at_level(logging.WARNING, logger="open_data_agent.history"): + tracker.append({**_SAMPLE_ENTRY, "sql": "SELECT * FROM t WHERE password='secret'"}) + assert any( + "secret" in r.message.lower() or "password" in r.message.lower() for r in caplog.records + ) diff --git a/tests/unit/test_memory.py b/tests/unit/test_memory.py new file mode 100644 index 0000000..d20dbc8 --- /dev/null +++ b/tests/unit/test_memory.py @@ -0,0 +1,156 @@ +"""Unit tests for MemoryManager — add, list, search, slug collision, invalid category.""" + +from __future__ import annotations + +from pathlib import Path + +import pytest + +from open_data_agent.exceptions import ConfigError +from open_data_agent.memory import MemoryManager + + +class TestMemoryManagerAdd: + def test_creates_file(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "memory") + path = mgr.add("Revenue column", "Use net_item_price not item_price", "data_quality") + assert path.exists() + + def test_file_has_yaml_frontmatter(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "memory") + path = mgr.add("Revenue column", "Use net_item_price", "data_quality") + content = path.read_text() + assert content.startswith("---") + assert "title:" in content + assert "category:" in content + assert "created_at:" in content + assert "tags:" in content + + def test_file_body_contains_content(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "memory") + path = mgr.add("My title", "My body content here", "business_context") + content = path.read_text() + assert "My body content here" in content + + def test_creates_directory_automatically(self, tmp_path: Path) -> None: + mem_dir = tmp_path / "deep" / "memory" + mgr = MemoryManager(memory_dir=mem_dir) + mgr.add("Test", "content", "correction") + assert mem_dir.exists() + + def test_invalid_category_raises_config_error(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "memory") + with pytest.raises(ConfigError, match="Invalid category"): + mgr.add("Test", "content", "invalid_category") + + def test_slug_collision_handled(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "memory") + path1 = mgr.add("Same title", "first", "correction") + path2 = mgr.add("Same title", "second", "correction") + assert path1 != path2 + assert path1.exists() + assert path2.exists() + + def test_all_valid_categories_accepted(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "memory") + for cat in ["query_pattern", "business_context", "correction", "data_quality"]: + mgr.add(f"Title {cat}", "body", cat) + entries = mgr.list_entries() + assert len(entries) == 4 + + def test_tags_stored_in_frontmatter(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "memory") + path = mgr.add("Tagged entry", "content", "query_pattern", tags=["sql", "revenue"]) + content = path.read_text() + assert "sql" in content + assert "revenue" in content + + +class TestMemoryManagerListEntries: + def test_list_round_trip(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "memory") + mgr.add("Entry A", "content A", "correction") + mgr.add("Entry B", "content B", "data_quality") + entries = mgr.list_entries() + titles = [e["title"] for e in entries] + assert "Entry A" in titles + assert "Entry B" in titles + + def test_list_empty_directory(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "empty_memory") + entries = mgr.list_entries() + assert entries == [] + + def test_list_nonexistent_directory(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "nonexistent") + entries = mgr.list_entries() + assert entries == [] + + def test_list_includes_path(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "memory") + mgr.add("My entry", "content", "business_context") + entries = mgr.list_entries() + assert "_path" in entries[0] + + +class TestMemoryManagerSearch: + def test_search_by_title(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "memory") + mgr.add("Revenue insight", "content", "business_context") + mgr.add("Unrelated entry", "content", "correction") + results = mgr.search("revenue") + assert len(results) == 1 + assert results[0]["title"] == "Revenue insight" + + def test_search_case_insensitive(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "memory") + mgr.add("Revenue insight", "content", "business_context") + results = mgr.search("REVENUE") + assert len(results) == 1 + + def test_search_by_body_content(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "memory") + mgr.add("Some title", "Use net_item_price for revenue", "data_quality") + results = mgr.search("net_item_price") + assert len(results) == 1 + + def test_search_by_tag(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "memory") + mgr.add("Tagged", "content", "query_pattern", tags=["important", "revenue"]) + results = mgr.search("important") + assert len(results) == 1 + + def test_search_no_matches(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "memory") + mgr.add("Entry", "content", "correction") + results = mgr.search("zzznomatch") + assert results == [] + + def test_search_empty_directory(self, tmp_path: Path) -> None: + mgr = MemoryManager(memory_dir=tmp_path / "nonexistent") + results = mgr.search("anything") + assert results == [] + + def test_search_reads_each_file_once(self, tmp_path: Path) -> None: + """Regression: search() must read each file only once (no double read_text).""" + import pathlib + + mgr = MemoryManager(memory_dir=tmp_path / "memory") + mgr.add("Revenue insight", "use net price", "business_context") + + reads: list[str] = [] + orig = pathlib.Path.read_text + + def counting_read(self: pathlib.Path, *args: object, **kwargs: object) -> str: + reads.append(str(self)) + return orig(self, *args, **kwargs) # type: ignore[arg-type] + + pathlib.Path.read_text = counting_read # type: ignore[method-assign] + try: + mgr.search("revenue") + finally: + pathlib.Path.read_text = orig # type: ignore[method-assign] + + # Each .md file should be read exactly once during search + md_reads = [r for r in reads if r.endswith(".md")] + assert md_reads.count(md_reads[0]) == 1, "search() read the same file more than once" diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..ebccf33 --- /dev/null +++ b/uv.lock @@ -0,0 +1,716 @@ +version = 1 +revision = 2 +requires-python = ">=3.12" + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.13.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/56/95b7e30fa389756cb56630faa728da46a27b8c6eb46f9d557c68fff12b65/coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91", size = 827239, upload-time = "2026-02-09T12:59:03.86Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/81/4ce2fdd909c5a0ed1f6dedb88aa57ab79b6d1fbd9b588c1ac7ef45659566/coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459", size = 219449, upload-time = "2026-02-09T12:56:54.889Z" }, + { url = "https://files.pythonhosted.org/packages/5d/96/5238b1efc5922ddbdc9b0db9243152c09777804fb7c02ad1741eb18a11c0/coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3", size = 219810, upload-time = "2026-02-09T12:56:56.33Z" }, + { url = "https://files.pythonhosted.org/packages/78/72/2f372b726d433c9c35e56377cf1d513b4c16fe51841060d826b95caacec1/coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634", size = 251308, upload-time = "2026-02-09T12:56:57.858Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a0/2ea570925524ef4e00bb6c82649f5682a77fac5ab910a65c9284de422600/coverage-7.13.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c048ea43875fbf8b45d476ad79f179809c590ec7b79e2035c662e7afa3192e3", size = 254052, upload-time = "2026-02-09T12:56:59.754Z" }, + { url = "https://files.pythonhosted.org/packages/e8/ac/45dc2e19a1939098d783c846e130b8f862fbb50d09e0af663988f2f21973/coverage-7.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b7b38448866e83176e28086674fe7368ab8590e4610fb662b44e345b86d63ffa", size = 255165, upload-time = "2026-02-09T12:57:01.287Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4d/26d236ff35abc3b5e63540d3386e4c3b192168c1d96da5cb2f43c640970f/coverage-7.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de6defc1c9badbf8b9e67ae90fd00519186d6ab64e5cc5f3d21359c2a9b2c1d3", size = 257432, upload-time = "2026-02-09T12:57:02.637Z" }, + { url = "https://files.pythonhosted.org/packages/ec/55/14a966c757d1348b2e19caf699415a2a4c4f7feaa4bbc6326a51f5c7dd1b/coverage-7.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7eda778067ad7ffccd23ecffce537dface96212576a07924cbf0d8799d2ded5a", size = 251716, upload-time = "2026-02-09T12:57:04.056Z" }, + { url = "https://files.pythonhosted.org/packages/77/33/50116647905837c66d28b2af1321b845d5f5d19be9655cb84d4a0ea806b4/coverage-7.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e87f6c587c3f34356c3759f0420693e35e7eb0e2e41e4c011cb6ec6ecbbf1db7", size = 253089, upload-time = "2026-02-09T12:57:05.503Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b4/8efb11a46e3665d92635a56e4f2d4529de6d33f2cb38afd47d779d15fc99/coverage-7.13.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8248977c2e33aecb2ced42fef99f2d319e9904a36e55a8a68b69207fb7e43edc", size = 251232, upload-time = "2026-02-09T12:57:06.879Z" }, + { url = "https://files.pythonhosted.org/packages/51/24/8cd73dd399b812cc76bb0ac260e671c4163093441847ffe058ac9fda1e32/coverage-7.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:25381386e80ae727608e662474db537d4df1ecd42379b5ba33c84633a2b36d47", size = 255299, upload-time = "2026-02-09T12:57:08.245Z" }, + { url = "https://files.pythonhosted.org/packages/03/94/0a4b12f1d0e029ce1ccc1c800944a9984cbe7d678e470bb6d3c6bc38a0da/coverage-7.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ee756f00726693e5ba94d6df2bdfd64d4852d23b09bb0bc700e3b30e6f333985", size = 250796, upload-time = "2026-02-09T12:57:10.142Z" }, + { url = "https://files.pythonhosted.org/packages/73/44/6002fbf88f6698ca034360ce474c406be6d5a985b3fdb3401128031eef6b/coverage-7.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fdfc1e28e7c7cdce44985b3043bc13bbd9c747520f94a4d7164af8260b3d91f0", size = 252673, upload-time = "2026-02-09T12:57:12.197Z" }, + { url = "https://files.pythonhosted.org/packages/de/c6/a0279f7c00e786be75a749a5674e6fa267bcbd8209cd10c9a450c655dfa7/coverage-7.13.4-cp312-cp312-win32.whl", hash = "sha256:01d4cbc3c283a17fc1e42d614a119f7f438eabb593391283adca8dc86eff1246", size = 221990, upload-time = "2026-02-09T12:57:14.085Z" }, + { url = "https://files.pythonhosted.org/packages/77/4e/c0a25a425fcf5557d9abd18419c95b63922e897bc86c1f327f155ef234a9/coverage-7.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:9401ebc7ef522f01d01d45532c68c5ac40fb27113019b6b7d8b208f6e9baa126", size = 222800, upload-time = "2026-02-09T12:57:15.944Z" }, + { url = "https://files.pythonhosted.org/packages/47/ac/92da44ad9a6f4e3a7debd178949d6f3769bedca33830ce9b1dcdab589a37/coverage-7.13.4-cp312-cp312-win_arm64.whl", hash = "sha256:b1ec7b6b6e93255f952e27ab58fbc68dcc468844b16ecbee881aeb29b6ab4d8d", size = 221415, upload-time = "2026-02-09T12:57:17.497Z" }, + { url = "https://files.pythonhosted.org/packages/db/23/aad45061a31677d68e47499197a131eea55da4875d16c1f42021ab963503/coverage-7.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b66a2da594b6068b48b2692f043f35d4d3693fb639d5ea8b39533c2ad9ac3ab9", size = 219474, upload-time = "2026-02-09T12:57:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/a5/70/9b8b67a0945f3dfec1fd896c5cefb7c19d5a3a6d74630b99a895170999ae/coverage-7.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3599eb3992d814d23b35c536c28df1a882caa950f8f507cef23d1cbf334995ac", size = 219844, upload-time = "2026-02-09T12:57:20.66Z" }, + { url = "https://files.pythonhosted.org/packages/97/fd/7e859f8fab324cef6c4ad7cff156ca7c489fef9179d5749b0c8d321281c2/coverage-7.13.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93550784d9281e374fb5a12bf1324cc8a963fd63b2d2f223503ef0fd4aa339ea", size = 250832, upload-time = "2026-02-09T12:57:22.007Z" }, + { url = "https://files.pythonhosted.org/packages/e4/dc/b2442d10020c2f52617828862d8b6ee337859cd8f3a1f13d607dddda9cf7/coverage-7.13.4-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b720ce6a88a2755f7c697c23268ddc47a571b88052e6b155224347389fdf6a3b", size = 253434, upload-time = "2026-02-09T12:57:23.339Z" }, + { url = "https://files.pythonhosted.org/packages/5a/88/6728a7ad17428b18d836540630487231f5470fb82454871149502f5e5aa2/coverage-7.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b322db1284a2ed3aa28ffd8ebe3db91c929b7a333c0820abec3d838ef5b3525", size = 254676, upload-time = "2026-02-09T12:57:24.774Z" }, + { url = "https://files.pythonhosted.org/packages/7c/bc/21244b1b8cedf0dff0a2b53b208015fe798d5f2a8d5348dbfece04224fff/coverage-7.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f4594c67d8a7c89cf922d9df0438c7c7bb022ad506eddb0fdb2863359ff78242", size = 256807, upload-time = "2026-02-09T12:57:26.125Z" }, + { url = "https://files.pythonhosted.org/packages/97/a0/ddba7ed3251cff51006737a727d84e05b61517d1784a9988a846ba508877/coverage-7.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:53d133df809c743eb8bce33b24bcababb371f4441340578cd406e084d94a6148", size = 251058, upload-time = "2026-02-09T12:57:27.614Z" }, + { url = "https://files.pythonhosted.org/packages/9b/55/e289addf7ff54d3a540526f33751951bf0878f3809b47f6dfb3def69c6f7/coverage-7.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76451d1978b95ba6507a039090ba076105c87cc76fc3efd5d35d72093964d49a", size = 252805, upload-time = "2026-02-09T12:57:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/13/4e/cc276b1fa4a59be56d96f1dabddbdc30f4ba22e3b1cd42504c37b3313255/coverage-7.13.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f57b33491e281e962021de110b451ab8a24182589be17e12a22c79047935e23", size = 250766, upload-time = "2026-02-09T12:57:30.522Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/1093b8f93018f8b41a8cf29636c9292502f05e4a113d4d107d14a3acd044/coverage-7.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1731dc33dc276dafc410a885cbf5992f1ff171393e48a21453b78727d090de80", size = 254923, upload-time = "2026-02-09T12:57:31.946Z" }, + { url = "https://files.pythonhosted.org/packages/8b/55/ea2796da2d42257f37dbea1aab239ba9263b31bd91d5527cdd6db5efe174/coverage-7.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:bd60d4fe2f6fa7dff9223ca1bbc9f05d2b6697bc5961072e5d3b952d46e1b1ea", size = 250591, upload-time = "2026-02-09T12:57:33.842Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/7c4bb72aacf8af5020675aa633e59c1fbe296d22aed191b6a5b711eb2bc7/coverage-7.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9181a3ccead280b828fae232df12b16652702b49d41e99d657f46cc7b1f6ec7a", size = 252364, upload-time = "2026-02-09T12:57:35.743Z" }, + { url = "https://files.pythonhosted.org/packages/5c/38/a8d2ec0146479c20bbaa7181b5b455a0c41101eed57f10dd19a78ab44c80/coverage-7.13.4-cp313-cp313-win32.whl", hash = "sha256:f53d492307962561ac7de4cd1de3e363589b000ab69617c6156a16ba7237998d", size = 222010, upload-time = "2026-02-09T12:57:37.25Z" }, + { url = "https://files.pythonhosted.org/packages/e2/0c/dbfafbe90a185943dcfbc766fe0e1909f658811492d79b741523a414a6cc/coverage-7.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:e6f70dec1cc557e52df5306d051ef56003f74d56e9c4dd7ddb07e07ef32a84dd", size = 222818, upload-time = "2026-02-09T12:57:38.734Z" }, + { url = "https://files.pythonhosted.org/packages/04/d1/934918a138c932c90d78301f45f677fb05c39a3112b96fd2c8e60503cdc7/coverage-7.13.4-cp313-cp313-win_arm64.whl", hash = "sha256:fb07dc5da7e849e2ad31a5d74e9bece81f30ecf5a42909d0a695f8bd1874d6af", size = 221438, upload-time = "2026-02-09T12:57:40.223Z" }, + { url = "https://files.pythonhosted.org/packages/52/57/ee93ced533bcb3e6df961c0c6e42da2fc6addae53fb95b94a89b1e33ebd7/coverage-7.13.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40d74da8e6c4b9ac18b15331c4b5ebc35a17069410cad462ad4f40dcd2d50c0d", size = 220165, upload-time = "2026-02-09T12:57:41.639Z" }, + { url = "https://files.pythonhosted.org/packages/c5/e0/969fc285a6fbdda49d91af278488d904dcd7651b2693872f0ff94e40e84a/coverage-7.13.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4223b4230a376138939a9173f1bdd6521994f2aff8047fae100d6d94d50c5a12", size = 220516, upload-time = "2026-02-09T12:57:44.215Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b8/9531944e16267e2735a30a9641ff49671f07e8138ecf1ca13db9fd2560c7/coverage-7.13.4-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1d4be36a5114c499f9f1f9195e95ebf979460dbe2d88e6816ea202010ba1c34b", size = 261804, upload-time = "2026-02-09T12:57:45.989Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f3/e63df6d500314a2a60390d1989240d5f27318a7a68fa30ad3806e2a9323e/coverage-7.13.4-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:200dea7d1e8095cc6e98cdabe3fd1d21ab17d3cee6dab00cadbb2fe35d9c15b9", size = 263885, upload-time = "2026-02-09T12:57:47.42Z" }, + { url = "https://files.pythonhosted.org/packages/f3/67/7654810de580e14b37670b60a09c599fa348e48312db5b216d730857ffe6/coverage-7.13.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8eb931ee8e6d8243e253e5ed7336deea6904369d2fd8ae6e43f68abbf167092", size = 266308, upload-time = "2026-02-09T12:57:49.345Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/39d41eca0eab3cc82115953ad41c4e77935286c930e8fad15eaed1389d83/coverage-7.13.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:75eab1ebe4f2f64d9509b984f9314d4aa788540368218b858dad56dc8f3e5eb9", size = 267452, upload-time = "2026-02-09T12:57:50.811Z" }, + { url = "https://files.pythonhosted.org/packages/50/6d/39c0fbb8fc5cd4d2090811e553c2108cf5112e882f82505ee7495349a6bf/coverage-7.13.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c35eb28c1d085eb7d8c9b3296567a1bebe03ce72962e932431b9a61f28facf26", size = 261057, upload-time = "2026-02-09T12:57:52.447Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a2/60010c669df5fa603bb5a97fb75407e191a846510da70ac657eb696b7fce/coverage-7.13.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb88b316ec33760714a4720feb2816a3a59180fd58c1985012054fa7aebee4c2", size = 263875, upload-time = "2026-02-09T12:57:53.938Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d9/63b22a6bdbd17f1f96e9ed58604c2a6b0e72a9133e37d663bef185877cf6/coverage-7.13.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d41eead3cc673cbd38a4417deb7fd0b4ca26954ff7dc6078e33f6ff97bed940", size = 261500, upload-time = "2026-02-09T12:57:56.012Z" }, + { url = "https://files.pythonhosted.org/packages/70/bf/69f86ba1ad85bc3ad240e4c0e57a2e620fbc0e1645a47b5c62f0e941ad7f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:fb26a934946a6afe0e326aebe0730cdff393a8bc0bbb65a2f41e30feddca399c", size = 265212, upload-time = "2026-02-09T12:57:57.5Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f2/5f65a278a8c2148731831574c73e42f57204243d33bedaaf18fa79c5958f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:dae88bc0fc77edaa65c14be099bd57ee140cf507e6bfdeea7938457ab387efb0", size = 260398, upload-time = "2026-02-09T12:57:59.027Z" }, + { url = "https://files.pythonhosted.org/packages/ef/80/6e8280a350ee9fea92f14b8357448a242dcaa243cb2c72ab0ca591f66c8c/coverage-7.13.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:845f352911777a8e722bfce168958214951e07e47e5d5d9744109fa5fe77f79b", size = 262584, upload-time = "2026-02-09T12:58:01.129Z" }, + { url = "https://files.pythonhosted.org/packages/22/63/01ff182fc95f260b539590fb12c11ad3e21332c15f9799cb5e2386f71d9f/coverage-7.13.4-cp313-cp313t-win32.whl", hash = "sha256:2fa8d5f8de70688a28240de9e139fa16b153cc3cbb01c5f16d88d6505ebdadf9", size = 222688, upload-time = "2026-02-09T12:58:02.736Z" }, + { url = "https://files.pythonhosted.org/packages/a9/43/89de4ef5d3cd53b886afa114065f7e9d3707bdb3e5efae13535b46ae483d/coverage-7.13.4-cp313-cp313t-win_amd64.whl", hash = "sha256:9351229c8c8407645840edcc277f4a2d44814d1bc34a2128c11c2a031d45a5dd", size = 223746, upload-time = "2026-02-09T12:58:05.362Z" }, + { url = "https://files.pythonhosted.org/packages/35/39/7cf0aa9a10d470a5309b38b289b9bb07ddeac5d61af9b664fe9775a4cb3e/coverage-7.13.4-cp313-cp313t-win_arm64.whl", hash = "sha256:30b8d0512f2dc8c8747557e8fb459d6176a2c9e5731e2b74d311c03b78451997", size = 222003, upload-time = "2026-02-09T12:58:06.952Z" }, + { url = "https://files.pythonhosted.org/packages/92/11/a9cf762bb83386467737d32187756a42094927150c3e107df4cb078e8590/coverage-7.13.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:300deaee342f90696ed186e3a00c71b5b3d27bffe9e827677954f4ee56969601", size = 219522, upload-time = "2026-02-09T12:58:08.623Z" }, + { url = "https://files.pythonhosted.org/packages/d3/28/56e6d892b7b052236d67c95f1936b6a7cf7c3e2634bf27610b8cbd7f9c60/coverage-7.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29e3220258d682b6226a9b0925bc563ed9a1ebcff3cad30f043eceea7eaf2689", size = 219855, upload-time = "2026-02-09T12:58:10.176Z" }, + { url = "https://files.pythonhosted.org/packages/e5/69/233459ee9eb0c0d10fcc2fe425a029b3fa5ce0f040c966ebce851d030c70/coverage-7.13.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:391ee8f19bef69210978363ca930f7328081c6a0152f1166c91f0b5fdd2a773c", size = 250887, upload-time = "2026-02-09T12:58:12.503Z" }, + { url = "https://files.pythonhosted.org/packages/06/90/2cdab0974b9b5bbc1623f7876b73603aecac11b8d95b85b5b86b32de5eab/coverage-7.13.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0dd7ab8278f0d58a0128ba2fca25824321f05d059c1441800e934ff2efa52129", size = 253396, upload-time = "2026-02-09T12:58:14.615Z" }, + { url = "https://files.pythonhosted.org/packages/ac/15/ea4da0f85bf7d7b27635039e649e99deb8173fe551096ea15017f7053537/coverage-7.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78cdf0d578b15148b009ccf18c686aa4f719d887e76e6b40c38ffb61d264a552", size = 254745, upload-time = "2026-02-09T12:58:16.162Z" }, + { url = "https://files.pythonhosted.org/packages/99/11/bb356e86920c655ca4d61daee4e2bbc7258f0a37de0be32d233b561134ff/coverage-7.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:48685fee12c2eb3b27c62f2658e7ea21e9c3239cba5a8a242801a0a3f6a8c62a", size = 257055, upload-time = "2026-02-09T12:58:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/c9/0f/9ae1f8cb17029e09da06ca4e28c9e1d5c1c0a511c7074592e37e0836c915/coverage-7.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4e83efc079eb39480e6346a15a1bcb3e9b04759c5202d157e1dd4303cd619356", size = 250911, upload-time = "2026-02-09T12:58:19.495Z" }, + { url = "https://files.pythonhosted.org/packages/89/3a/adfb68558fa815cbc29747b553bc833d2150228f251b127f1ce97e48547c/coverage-7.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecae9737b72408d6a950f7e525f30aca12d4bd8dd95e37342e5beb3a2a8c4f71", size = 252754, upload-time = "2026-02-09T12:58:21.064Z" }, + { url = "https://files.pythonhosted.org/packages/32/b1/540d0c27c4e748bd3cd0bd001076ee416eda993c2bae47a73b7cc9357931/coverage-7.13.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ae4578f8528569d3cf303fef2ea569c7f4c4059a38c8667ccef15c6e1f118aa5", size = 250720, upload-time = "2026-02-09T12:58:22.622Z" }, + { url = "https://files.pythonhosted.org/packages/c7/95/383609462b3ffb1fe133014a7c84fc0dd01ed55ac6140fa1093b5af7ebb1/coverage-7.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6fdef321fdfbb30a197efa02d48fcd9981f0d8ad2ae8903ac318adc653f5df98", size = 254994, upload-time = "2026-02-09T12:58:24.548Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ba/1761138e86c81680bfc3c49579d66312865457f9fe405b033184e5793cb3/coverage-7.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b0f6ccf3dbe577170bebfce1318707d0e8c3650003cb4b3a9dd744575daa8b5", size = 250531, upload-time = "2026-02-09T12:58:26.271Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8e/05900df797a9c11837ab59c4d6fe94094e029582aab75c3309a93e6fb4e3/coverage-7.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75fcd519f2a5765db3f0e391eb3b7d150cce1a771bf4c9f861aeab86c767a3c0", size = 252189, upload-time = "2026-02-09T12:58:27.807Z" }, + { url = "https://files.pythonhosted.org/packages/00/bd/29c9f2db9ea4ed2738b8a9508c35626eb205d51af4ab7bf56a21a2e49926/coverage-7.13.4-cp314-cp314-win32.whl", hash = "sha256:8e798c266c378da2bd819b0677df41ab46d78065fb2a399558f3f6cae78b2fbb", size = 222258, upload-time = "2026-02-09T12:58:29.441Z" }, + { url = "https://files.pythonhosted.org/packages/a7/4d/1f8e723f6829977410efeb88f73673d794075091c8c7c18848d273dc9d73/coverage-7.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:245e37f664d89861cf2329c9afa2c1fe9e6d4e1a09d872c947e70718aeeac505", size = 223073, upload-time = "2026-02-09T12:58:31.026Z" }, + { url = "https://files.pythonhosted.org/packages/51/5b/84100025be913b44e082ea32abcf1afbf4e872f5120b7a1cab1d331b1e13/coverage-7.13.4-cp314-cp314-win_arm64.whl", hash = "sha256:ad27098a189e5838900ce4c2a99f2fe42a0bf0c2093c17c69b45a71579e8d4a2", size = 221638, upload-time = "2026-02-09T12:58:32.599Z" }, + { url = "https://files.pythonhosted.org/packages/a7/e4/c884a405d6ead1370433dad1e3720216b4f9fd8ef5b64bfd984a2a60a11a/coverage-7.13.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:85480adfb35ffc32d40918aad81b89c69c9cc5661a9b8a81476d3e645321a056", size = 220246, upload-time = "2026-02-09T12:58:34.181Z" }, + { url = "https://files.pythonhosted.org/packages/81/5c/4d7ed8b23b233b0fffbc9dfec53c232be2e695468523242ea9fd30f97ad2/coverage-7.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79be69cf7f3bf9b0deeeb062eab7ac7f36cd4cc4c4dd694bd28921ba4d8596cc", size = 220514, upload-time = "2026-02-09T12:58:35.704Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6f/3284d4203fd2f28edd73034968398cd2d4cb04ab192abc8cff007ea35679/coverage-7.13.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:caa421e2684e382c5d8973ac55e4f36bed6821a9bad5c953494de960c74595c9", size = 261877, upload-time = "2026-02-09T12:58:37.864Z" }, + { url = "https://files.pythonhosted.org/packages/09/aa/b672a647bbe1556a85337dc95bfd40d146e9965ead9cc2fe81bde1e5cbce/coverage-7.13.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14375934243ee05f56c45393fe2ce81fe5cc503c07cee2bdf1725fb8bef3ffaf", size = 264004, upload-time = "2026-02-09T12:58:39.492Z" }, + { url = "https://files.pythonhosted.org/packages/79/a1/aa384dbe9181f98bba87dd23dda436f0c6cf2e148aecbb4e50fc51c1a656/coverage-7.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25a41c3104d08edb094d9db0d905ca54d0cd41c928bb6be3c4c799a54753af55", size = 266408, upload-time = "2026-02-09T12:58:41.852Z" }, + { url = "https://files.pythonhosted.org/packages/53/5e/5150bf17b4019bc600799f376bb9606941e55bd5a775dc1e096b6ffea952/coverage-7.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f01afcff62bf9a08fb32b2c1d6e924236c0383c02c790732b6537269e466a72", size = 267544, upload-time = "2026-02-09T12:58:44.093Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/f1de5c675987a4a7a672250d2c5c9d73d289dbf13410f00ed7181d8017dd/coverage-7.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eb9078108fbf0bcdde37c3f4779303673c2fa1fe8f7956e68d447d0dd426d38a", size = 260980, upload-time = "2026-02-09T12:58:45.721Z" }, + { url = "https://files.pythonhosted.org/packages/b3/e3/fe758d01850aa172419a6743fe76ba8b92c29d181d4f676ffe2dae2ba631/coverage-7.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e086334e8537ddd17e5f16a344777c1ab8194986ec533711cbe6c41cde841b6", size = 263871, upload-time = "2026-02-09T12:58:47.334Z" }, + { url = "https://files.pythonhosted.org/packages/b6/76/b829869d464115e22499541def9796b25312b8cf235d3bb00b39f1675395/coverage-7.13.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:725d985c5ab621268b2edb8e50dfe57633dc69bda071abc470fed55a14935fd3", size = 261472, upload-time = "2026-02-09T12:58:48.995Z" }, + { url = "https://files.pythonhosted.org/packages/14/9e/caedb1679e73e2f6ad240173f55218488bfe043e38da577c4ec977489915/coverage-7.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3c06f0f1337c667b971ca2f975523347e63ec5e500b9aa5882d91931cd3ef750", size = 265210, upload-time = "2026-02-09T12:58:51.178Z" }, + { url = "https://files.pythonhosted.org/packages/3a/10/0dd02cb009b16ede425b49ec344aba13a6ae1dc39600840ea6abcb085ac4/coverage-7.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:590c0ed4bf8e85f745e6b805b2e1c457b2e33d5255dd9729743165253bc9ad39", size = 260319, upload-time = "2026-02-09T12:58:53.081Z" }, + { url = "https://files.pythonhosted.org/packages/92/8e/234d2c927af27c6d7a5ffad5bd2cf31634c46a477b4c7adfbfa66baf7ebb/coverage-7.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:eb30bf180de3f632cd043322dad5751390e5385108b2807368997d1a92a509d0", size = 262638, upload-time = "2026-02-09T12:58:55.258Z" }, + { url = "https://files.pythonhosted.org/packages/2f/64/e5547c8ff6964e5965c35a480855911b61509cce544f4d442caa759a0702/coverage-7.13.4-cp314-cp314t-win32.whl", hash = "sha256:c4240e7eded42d131a2d2c4dec70374b781b043ddc79a9de4d55ca71f8e98aea", size = 223040, upload-time = "2026-02-09T12:58:56.936Z" }, + { url = "https://files.pythonhosted.org/packages/c7/96/38086d58a181aac86d503dfa9c47eb20715a79c3e3acbdf786e92e5c09a8/coverage-7.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4c7d3cc01e7350f2f0f6f7036caaf5673fb56b6998889ccfe9e1c1fe75a9c932", size = 224148, upload-time = "2026-02-09T12:58:58.645Z" }, + { url = "https://files.pythonhosted.org/packages/ce/72/8d10abd3740a0beb98c305e0c3faf454366221c0f37a8bcf8f60020bb65a/coverage-7.13.4-cp314-cp314t-win_arm64.whl", hash = "sha256:23e3f687cf945070d1c90f85db66d11e3025665d8dafa831301a0e0038f3db9b", size = 222172, upload-time = "2026-02-09T12:59:00.396Z" }, + { url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" }, +] + +[[package]] +name = "cryptography" +version = "46.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" }, + { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" }, + { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" }, + { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" }, + { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" }, + { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" }, + { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" }, + { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" }, + { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" }, + { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" }, + { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" }, + { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, + { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, + { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" }, + { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" }, + { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" }, + { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" }, + { url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" }, + { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" }, + { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" }, + { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "jaraco-classes" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/c0/ed4a27bc5571b99e3cff68f8a9fa5b56ff7df1c2251cc715a652ddd26402/jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd", size = 11780, upload-time = "2024-03-31T07:27:36.643Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/66/b15ce62552d84bbfcec9a4873ab79d993a1dd4edb922cbfccae192bd5b5f/jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790", size = 6777, upload-time = "2024-03-31T07:27:34.792Z" }, +] + +[[package]] +name = "jaraco-context" +version = "6.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/27/7b/c3081ff1af947915503121c649f26a778e1a2101fd525f74aef997d75b7e/jaraco_context-6.1.1.tar.gz", hash = "sha256:bc046b2dc94f1e5532bd02402684414575cc11f565d929b6563125deb0a6e581", size = 15832, upload-time = "2026-03-07T15:46:04.63Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/49/c152890d49102b280ecf86ba5f80a8c111c3a155dafa3bd24aeb64fde9e1/jaraco_context-6.1.1-py3-none-any.whl", hash = "sha256:0df6a0287258f3e364072c3e40d5411b20cafa30cb28c4839d24319cecf9f808", size = 7005, upload-time = "2026-03-07T15:46:03.515Z" }, +] + +[[package]] +name = "jaraco-functools" +version = "4.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0f/27/056e0638a86749374d6f57d0b0db39f29509cce9313cf91bdc0ac4d91084/jaraco_functools-4.4.0.tar.gz", hash = "sha256:da21933b0417b89515562656547a77b4931f98176eb173644c0d35032a33d6bb", size = 19943, upload-time = "2025-12-21T09:29:43.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/c4/813bb09f0985cb21e959f21f2464169eca882656849adf727ac7bb7e1767/jaraco_functools-4.4.0-py3-none-any.whl", hash = "sha256:9eec1e36f45c818d9bf307c8948eb03b2b56cd44087b3cdc989abca1f20b9176", size = 10481, upload-time = "2025-12-21T09:29:42.27Z" }, +] + +[[package]] +name = "jeepney" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/6f/357efd7602486741aa73ffc0617fb310a29b588ed0fd69c2399acbb85b0c/jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732", size = 106758, upload-time = "2025-02-27T18:51:01.684Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/a3/e137168c9c44d18eff0376253da9f1e9234d0239e0ee230d2fee6cea8e55/jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683", size = 49010, upload-time = "2025-02-27T18:51:00.104Z" }, +] + +[[package]] +name = "keyring" +version = "25.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jaraco-classes" }, + { name = "jaraco-context" }, + { name = "jaraco-functools" }, + { name = "jeepney", marker = "sys_platform == 'linux'" }, + { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" }, + { name = "secretstorage", marker = "sys_platform == 'linux'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/43/4b/674af6ef2f97d56f0ab5153bf0bfa28ccb6c3ed4d1babf4305449668807b/keyring-25.7.0.tar.gz", hash = "sha256:fe01bd85eb3f8fb3dd0405defdeac9a5b4f6f0439edbb3149577f244a2e8245b", size = 63516, upload-time = "2025-11-16T16:26:09.482Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/db/e655086b7f3a705df045bf0933bdd9c2f79bb3c97bfef1384598bb79a217/keyring-25.7.0-py3-none-any.whl", hash = "sha256:be4a0b195f149690c166e850609a477c532ddbfbaed96a404d4e43f8d5e2689f", size = 39160, upload-time = "2025-11-16T16:26:08.402Z" }, +] + +[[package]] +name = "librt" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/56/9c/b4b0c54d84da4a94b37bd44151e46d5e583c9534c7e02250b961b1b6d8a8/librt-0.8.1.tar.gz", hash = "sha256:be46a14693955b3bd96014ccbdb8339ee8c9346fbe11c1b78901b55125f14c73", size = 177471, upload-time = "2026-02-17T16:13:06.101Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/21/d39b0a87ac52fc98f621fb6f8060efb017a767ebbbac2f99fbcbc9ddc0d7/librt-0.8.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a28f2612ab566b17f3698b0da021ff9960610301607c9a5e8eaca62f5e1c350a", size = 66516, upload-time = "2026-02-17T16:11:41.604Z" }, + { url = "https://files.pythonhosted.org/packages/69/f1/46375e71441c43e8ae335905e069f1c54febee63a146278bcee8782c84fd/librt-0.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:60a78b694c9aee2a0f1aaeaa7d101cf713e92e8423a941d2897f4fa37908dab9", size = 68634, upload-time = "2026-02-17T16:11:43.268Z" }, + { url = "https://files.pythonhosted.org/packages/0a/33/c510de7f93bf1fa19e13423a606d8189a02624a800710f6e6a0a0f0784b3/librt-0.8.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:758509ea3f1eba2a57558e7e98f4659d0ea7670bff49673b0dde18a3c7e6c0eb", size = 198941, upload-time = "2026-02-17T16:11:44.28Z" }, + { url = "https://files.pythonhosted.org/packages/dd/36/e725903416409a533d92398e88ce665476f275081d0d7d42f9c4951999e5/librt-0.8.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:039b9f2c506bd0ab0f8725aa5ba339c6f0cd19d3b514b50d134789809c24285d", size = 209991, upload-time = "2026-02-17T16:11:45.462Z" }, + { url = "https://files.pythonhosted.org/packages/30/7a/8d908a152e1875c9f8eac96c97a480df425e657cdb47854b9efaa4998889/librt-0.8.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bb54f1205a3a6ab41a6fd71dfcdcbd278670d3a90ca502a30d9da583105b6f7", size = 224476, upload-time = "2026-02-17T16:11:46.542Z" }, + { url = "https://files.pythonhosted.org/packages/a8/b8/a22c34f2c485b8903a06f3fe3315341fe6876ef3599792344669db98fcff/librt-0.8.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:05bd41cdee35b0c59c259f870f6da532a2c5ca57db95b5f23689fcb5c9e42440", size = 217518, upload-time = "2026-02-17T16:11:47.746Z" }, + { url = "https://files.pythonhosted.org/packages/79/6f/5c6fea00357e4f82ba44f81dbfb027921f1ab10e320d4a64e1c408d035d9/librt-0.8.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adfab487facf03f0d0857b8710cf82d0704a309d8ffc33b03d9302b4c64e91a9", size = 225116, upload-time = "2026-02-17T16:11:49.298Z" }, + { url = "https://files.pythonhosted.org/packages/f2/a0/95ced4e7b1267fe1e2720a111685bcddf0e781f7e9e0ce59d751c44dcfe5/librt-0.8.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:153188fe98a72f206042be10a2c6026139852805215ed9539186312d50a8e972", size = 217751, upload-time = "2026-02-17T16:11:50.49Z" }, + { url = "https://files.pythonhosted.org/packages/93/c2/0517281cb4d4101c27ab59472924e67f55e375bc46bedae94ac6dc6e1902/librt-0.8.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dd3c41254ee98604b08bd5b3af5bf0a89740d4ee0711de95b65166bf44091921", size = 218378, upload-time = "2026-02-17T16:11:51.783Z" }, + { url = "https://files.pythonhosted.org/packages/43/e8/37b3ac108e8976888e559a7b227d0ceac03c384cfd3e7a1c2ee248dbae79/librt-0.8.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e0d138c7ae532908cbb342162b2611dbd4d90c941cd25ab82084aaf71d2c0bd0", size = 241199, upload-time = "2026-02-17T16:11:53.561Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/35812d041c53967fedf551a39399271bbe4257e681236a2cf1a69c8e7fa1/librt-0.8.1-cp312-cp312-win32.whl", hash = "sha256:43353b943613c5d9c49a25aaffdba46f888ec354e71e3529a00cca3f04d66a7a", size = 54917, upload-time = "2026-02-17T16:11:54.758Z" }, + { url = "https://files.pythonhosted.org/packages/de/d1/fa5d5331b862b9775aaf2a100f5ef86854e5d4407f71bddf102f4421e034/librt-0.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:ff8baf1f8d3f4b6b7257fcb75a501f2a5499d0dda57645baa09d4d0d34b19444", size = 62017, upload-time = "2026-02-17T16:11:55.748Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7c/c614252f9acda59b01a66e2ddfd243ed1c7e1deab0293332dfbccf862808/librt-0.8.1-cp312-cp312-win_arm64.whl", hash = "sha256:0f2ae3725904f7377e11cc37722d5d401e8b3d5851fb9273d7f4fe04f6b3d37d", size = 52441, upload-time = "2026-02-17T16:11:56.801Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3c/f614c8e4eaac7cbf2bbdf9528790b21d89e277ee20d57dc6e559c626105f/librt-0.8.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7e6bad1cd94f6764e1e21950542f818a09316645337fd5ab9a7acc45d99a8f35", size = 66529, upload-time = "2026-02-17T16:11:57.809Z" }, + { url = "https://files.pythonhosted.org/packages/ab/96/5836544a45100ae411eda07d29e3d99448e5258b6e9c8059deb92945f5c2/librt-0.8.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cf450f498c30af55551ba4f66b9123b7185362ec8b625a773b3d39aa1a717583", size = 68669, upload-time = "2026-02-17T16:11:58.843Z" }, + { url = "https://files.pythonhosted.org/packages/06/53/f0b992b57af6d5531bf4677d75c44f095f2366a1741fb695ee462ae04b05/librt-0.8.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:eca45e982fa074090057132e30585a7e8674e9e885d402eae85633e9f449ce6c", size = 199279, upload-time = "2026-02-17T16:11:59.862Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ad/4848cc16e268d14280d8168aee4f31cea92bbd2b79ce33d3e166f2b4e4fc/librt-0.8.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0c3811485fccfda840861905b8c70bba5ec094e02825598bb9d4ca3936857a04", size = 210288, upload-time = "2026-02-17T16:12:00.954Z" }, + { url = "https://files.pythonhosted.org/packages/52/05/27fdc2e95de26273d83b96742d8d3b7345f2ea2bdbd2405cc504644f2096/librt-0.8.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e4af413908f77294605e28cfd98063f54b2c790561383971d2f52d113d9c363", size = 224809, upload-time = "2026-02-17T16:12:02.108Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d0/78200a45ba3240cb042bc597d6f2accba9193a2c57d0356268cbbe2d0925/librt-0.8.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5212a5bd7fae98dae95710032902edcd2ec4dc994e883294f75c857b83f9aba0", size = 218075, upload-time = "2026-02-17T16:12:03.631Z" }, + { url = "https://files.pythonhosted.org/packages/af/72/a210839fa74c90474897124c064ffca07f8d4b347b6574d309686aae7ca6/librt-0.8.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e692aa2d1d604e6ca12d35e51fdc36f4cda6345e28e36374579f7ef3611b3012", size = 225486, upload-time = "2026-02-17T16:12:04.725Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c1/a03cc63722339ddbf087485f253493e2b013039f5b707e8e6016141130fa/librt-0.8.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4be2a5c926b9770c9e08e717f05737a269b9d0ebc5d2f0060f0fe3fe9ce47acb", size = 218219, upload-time = "2026-02-17T16:12:05.828Z" }, + { url = "https://files.pythonhosted.org/packages/58/f5/fff6108af0acf941c6f274a946aea0e484bd10cd2dc37610287ce49388c5/librt-0.8.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:fd1a720332ea335ceb544cf0a03f81df92abd4bb887679fd1e460976b0e6214b", size = 218750, upload-time = "2026-02-17T16:12:07.09Z" }, + { url = "https://files.pythonhosted.org/packages/71/67/5a387bfef30ec1e4b4f30562c8586566faf87e47d696768c19feb49e3646/librt-0.8.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2af9e01e0ef80d95ae3c720be101227edae5f2fe7e3dc63d8857fadfc5a1d", size = 241624, upload-time = "2026-02-17T16:12:08.43Z" }, + { url = "https://files.pythonhosted.org/packages/d4/be/24f8502db11d405232ac1162eb98069ca49c3306c1d75c6ccc61d9af8789/librt-0.8.1-cp313-cp313-win32.whl", hash = "sha256:086a32dbb71336627e78cc1d6ee305a68d038ef7d4c39aaff41ae8c9aa46e91a", size = 54969, upload-time = "2026-02-17T16:12:09.633Z" }, + { url = "https://files.pythonhosted.org/packages/5c/73/c9fdf6cb2a529c1a092ce769a12d88c8cca991194dfe641b6af12fa964d2/librt-0.8.1-cp313-cp313-win_amd64.whl", hash = "sha256:e11769a1dbda4da7b00a76cfffa67aa47cfa66921d2724539eee4b9ede780b79", size = 62000, upload-time = "2026-02-17T16:12:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/d3/97/68f80ca3ac4924f250cdfa6e20142a803e5e50fca96ef5148c52ee8c10ea/librt-0.8.1-cp313-cp313-win_arm64.whl", hash = "sha256:924817ab3141aca17893386ee13261f1d100d1ef410d70afe4389f2359fea4f0", size = 52495, upload-time = "2026-02-17T16:12:11.633Z" }, + { url = "https://files.pythonhosted.org/packages/c9/6a/907ef6800f7bca71b525a05f1839b21f708c09043b1c6aa77b6b827b3996/librt-0.8.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6cfa7fe54fd4d1f47130017351a959fe5804bda7a0bc7e07a2cdbc3fdd28d34f", size = 66081, upload-time = "2026-02-17T16:12:12.766Z" }, + { url = "https://files.pythonhosted.org/packages/1b/18/25e991cd5640c9fb0f8d91b18797b29066b792f17bf8493da183bf5caabe/librt-0.8.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:228c2409c079f8c11fb2e5d7b277077f694cb93443eb760e00b3b83cb8b3176c", size = 68309, upload-time = "2026-02-17T16:12:13.756Z" }, + { url = "https://files.pythonhosted.org/packages/a4/36/46820d03f058cfb5a9de5940640ba03165ed8aded69e0733c417bb04df34/librt-0.8.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7aae78ab5e3206181780e56912d1b9bb9f90a7249ce12f0e8bf531d0462dd0fc", size = 196804, upload-time = "2026-02-17T16:12:14.818Z" }, + { url = "https://files.pythonhosted.org/packages/59/18/5dd0d3b87b8ff9c061849fbdb347758d1f724b9a82241aa908e0ec54ccd0/librt-0.8.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:172d57ec04346b047ca6af181e1ea4858086c80bdf455f61994c4aa6fc3f866c", size = 206907, upload-time = "2026-02-17T16:12:16.513Z" }, + { url = "https://files.pythonhosted.org/packages/d1/96/ef04902aad1424fd7299b62d1890e803e6ab4018c3044dca5922319c4b97/librt-0.8.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6b1977c4ea97ce5eb7755a78fae68d87e4102e4aaf54985e8b56806849cc06a3", size = 221217, upload-time = "2026-02-17T16:12:17.906Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ff/7e01f2dda84a8f5d280637a2e5827210a8acca9a567a54507ef1c75b342d/librt-0.8.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:10c42e1f6fd06733ef65ae7bebce2872bcafd8d6e6b0a08fe0a05a23b044fb14", size = 214622, upload-time = "2026-02-17T16:12:19.108Z" }, + { url = "https://files.pythonhosted.org/packages/1e/8c/5b093d08a13946034fed57619742f790faf77058558b14ca36a6e331161e/librt-0.8.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4c8dfa264b9193c4ee19113c985c95f876fae5e51f731494fc4e0cf594990ba7", size = 221987, upload-time = "2026-02-17T16:12:20.331Z" }, + { url = "https://files.pythonhosted.org/packages/d3/cc/86b0b3b151d40920ad45a94ce0171dec1aebba8a9d72bb3fa00c73ab25dd/librt-0.8.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:01170b6729a438f0dedc4a26ed342e3dc4f02d1000b4b19f980e1877f0c297e6", size = 215132, upload-time = "2026-02-17T16:12:21.54Z" }, + { url = "https://files.pythonhosted.org/packages/fc/be/8588164a46edf1e69858d952654e216a9a91174688eeefb9efbb38a9c799/librt-0.8.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:7b02679a0d783bdae30d443025b94465d8c3dc512f32f5b5031f93f57ac32071", size = 215195, upload-time = "2026-02-17T16:12:23.073Z" }, + { url = "https://files.pythonhosted.org/packages/f5/f2/0b9279bea735c734d69344ecfe056c1ba211694a72df10f568745c899c76/librt-0.8.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:190b109bb69592a3401fe1ffdea41a2e73370ace2ffdc4a0e8e2b39cdea81b78", size = 237946, upload-time = "2026-02-17T16:12:24.275Z" }, + { url = "https://files.pythonhosted.org/packages/e9/cc/5f2a34fbc8aeb35314a3641f9956fa9051a947424652fad9882be7a97949/librt-0.8.1-cp314-cp314-win32.whl", hash = "sha256:e70a57ecf89a0f64c24e37f38d3fe217a58169d2fe6ed6d70554964042474023", size = 50689, upload-time = "2026-02-17T16:12:25.766Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/cd4d010ab2147339ca2b93e959c3686e964edc6de66ddacc935c325883d7/librt-0.8.1-cp314-cp314-win_amd64.whl", hash = "sha256:7e2f3edca35664499fbb36e4770650c4bd4a08abc1f4458eab9df4ec56389730", size = 57875, upload-time = "2026-02-17T16:12:27.465Z" }, + { url = "https://files.pythonhosted.org/packages/84/0f/2143cb3c3ca48bd3379dcd11817163ca50781927c4537345d608b5045998/librt-0.8.1-cp314-cp314-win_arm64.whl", hash = "sha256:0d2f82168e55ddefd27c01c654ce52379c0750ddc31ee86b4b266bcf4d65f2a3", size = 48058, upload-time = "2026-02-17T16:12:28.556Z" }, + { url = "https://files.pythonhosted.org/packages/d2/0e/9b23a87e37baf00311c3efe6b48d6b6c168c29902dfc3f04c338372fd7db/librt-0.8.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c74a2da57a094bd48d03fa5d196da83d2815678385d2978657499063709abe1", size = 68313, upload-time = "2026-02-17T16:12:29.659Z" }, + { url = "https://files.pythonhosted.org/packages/db/9a/859c41e5a4f1c84200a7d2b92f586aa27133c8243b6cac9926f6e54d01b9/librt-0.8.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a355d99c4c0d8e5b770313b8b247411ed40949ca44e33e46a4789b9293a907ee", size = 70994, upload-time = "2026-02-17T16:12:31.516Z" }, + { url = "https://files.pythonhosted.org/packages/4c/28/10605366ee599ed34223ac2bf66404c6fb59399f47108215d16d5ad751a8/librt-0.8.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2eb345e8b33fb748227409c9f1233d4df354d6e54091f0e8fc53acdb2ffedeb7", size = 220770, upload-time = "2026-02-17T16:12:33.294Z" }, + { url = "https://files.pythonhosted.org/packages/af/8d/16ed8fd452dafae9c48d17a6bc1ee3e818fd40ef718d149a8eff2c9f4ea2/librt-0.8.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9be2f15e53ce4e83cc08adc29b26fb5978db62ef2a366fbdf716c8a6c8901040", size = 235409, upload-time = "2026-02-17T16:12:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/89/1b/7bdf3e49349c134b25db816e4a3db6b94a47ac69d7d46b1e682c2c4949be/librt-0.8.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:785ae29c1f5c6e7c2cde2c7c0e148147f4503da3abc5d44d482068da5322fd9e", size = 246473, upload-time = "2026-02-17T16:12:36.656Z" }, + { url = "https://files.pythonhosted.org/packages/4e/8a/91fab8e4fd2a24930a17188c7af5380eb27b203d72101c9cc000dbdfd95a/librt-0.8.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1d3a7da44baf692f0c6aeb5b2a09c5e6fc7a703bca9ffa337ddd2e2da53f7732", size = 238866, upload-time = "2026-02-17T16:12:37.849Z" }, + { url = "https://files.pythonhosted.org/packages/b9/e0/c45a098843fc7c07e18a7f8a24ca8496aecbf7bdcd54980c6ca1aaa79a8e/librt-0.8.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5fc48998000cbc39ec0d5311312dda93ecf92b39aaf184c5e817d5d440b29624", size = 250248, upload-time = "2026-02-17T16:12:39.445Z" }, + { url = "https://files.pythonhosted.org/packages/82/30/07627de23036640c952cce0c1fe78972e77d7d2f8fd54fa5ef4554ff4a56/librt-0.8.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e96baa6820280077a78244b2e06e416480ed859bbd8e5d641cf5742919d8beb4", size = 240629, upload-time = "2026-02-17T16:12:40.889Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c1/55bfe1ee3542eba055616f9098eaf6eddb966efb0ca0f44eaa4aba327307/librt-0.8.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:31362dbfe297b23590530007062c32c6f6176f6099646bb2c95ab1b00a57c382", size = 239615, upload-time = "2026-02-17T16:12:42.446Z" }, + { url = "https://files.pythonhosted.org/packages/2b/39/191d3d28abc26c9099b19852e6c99f7f6d400b82fa5a4e80291bd3803e19/librt-0.8.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cc3656283d11540ab0ea01978378e73e10002145117055e03722417aeab30994", size = 263001, upload-time = "2026-02-17T16:12:43.627Z" }, + { url = "https://files.pythonhosted.org/packages/b9/eb/7697f60fbe7042ab4e88f4ee6af496b7f222fffb0a4e3593ef1f29f81652/librt-0.8.1-cp314-cp314t-win32.whl", hash = "sha256:738f08021b3142c2918c03692608baed43bc51144c29e35807682f8070ee2a3a", size = 51328, upload-time = "2026-02-17T16:12:45.148Z" }, + { url = "https://files.pythonhosted.org/packages/7c/72/34bf2eb7a15414a23e5e70ecb9440c1d3179f393d9349338a91e2781c0fb/librt-0.8.1-cp314-cp314t-win_amd64.whl", hash = "sha256:89815a22daf9c51884fb5dbe4f1ef65ee6a146e0b6a8df05f753e2e4a9359bf4", size = 58722, upload-time = "2026-02-17T16:12:46.85Z" }, + { url = "https://files.pythonhosted.org/packages/b2/c8/d148e041732d631fc76036f8b30fae4e77b027a1e95b7a84bb522481a940/librt-0.8.1-cp314-cp314t-win_arm64.whl", hash = "sha256:bf512a71a23504ed08103a13c941f763db13fb11177beb3d9244c98c29fb4a61", size = 48755, upload-time = "2026-02-17T16:12:47.943Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "more-itertools" +version = "10.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/5d/38b681d3fce7a266dd9ab73c66959406d565b3e85f21d5e66e1181d93721/more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd", size = 137431, upload-time = "2025-09-02T15:23:11.018Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, +] + +[[package]] +name = "mypy" +version = "1.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "librt", marker = "platform_python_implementation != 'PyPy'" }, + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/8a/19bfae96f6615aa8a0604915512e0289b1fad33d5909bf7244f02935d33a/mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1", size = 13206053, upload-time = "2025-12-15T05:03:46.622Z" }, + { url = "https://files.pythonhosted.org/packages/a5/34/3e63879ab041602154ba2a9f99817bb0c85c4df19a23a1443c8986e4d565/mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e", size = 12219134, upload-time = "2025-12-15T05:03:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/89/cc/2db6f0e95366b630364e09845672dbee0cbf0bbe753a204b29a944967cd9/mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2", size = 12731616, upload-time = "2025-12-15T05:02:44.725Z" }, + { url = "https://files.pythonhosted.org/packages/00/be/dd56c1fd4807bc1eba1cf18b2a850d0de7bacb55e158755eb79f77c41f8e/mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8", size = 13620847, upload-time = "2025-12-15T05:03:39.633Z" }, + { url = "https://files.pythonhosted.org/packages/6d/42/332951aae42b79329f743bf1da088cd75d8d4d9acc18fbcbd84f26c1af4e/mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a", size = 13834976, upload-time = "2025-12-15T05:03:08.786Z" }, + { url = "https://files.pythonhosted.org/packages/6f/63/e7493e5f90e1e085c562bb06e2eb32cae27c5057b9653348d38b47daaecc/mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13", size = 10118104, upload-time = "2025-12-15T05:03:10.834Z" }, + { url = "https://files.pythonhosted.org/packages/de/9f/a6abae693f7a0c697dbb435aac52e958dc8da44e92e08ba88d2e42326176/mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250", size = 13201927, upload-time = "2025-12-15T05:02:29.138Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a4/45c35ccf6e1c65afc23a069f50e2c66f46bd3798cbe0d680c12d12935caa/mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b", size = 12206730, upload-time = "2025-12-15T05:03:01.325Z" }, + { url = "https://files.pythonhosted.org/packages/05/bb/cdcf89678e26b187650512620eec8368fded4cfd99cfcb431e4cdfd19dec/mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e", size = 12724581, upload-time = "2025-12-15T05:03:20.087Z" }, + { url = "https://files.pythonhosted.org/packages/d1/32/dd260d52babf67bad8e6770f8e1102021877ce0edea106e72df5626bb0ec/mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef", size = 13616252, upload-time = "2025-12-15T05:02:49.036Z" }, + { url = "https://files.pythonhosted.org/packages/71/d0/5e60a9d2e3bd48432ae2b454b7ef2b62a960ab51292b1eda2a95edd78198/mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75", size = 13840848, upload-time = "2025-12-15T05:02:55.95Z" }, + { url = "https://files.pythonhosted.org/packages/98/76/d32051fa65ecf6cc8c6610956473abdc9b4c43301107476ac03559507843/mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd", size = 10135510, upload-time = "2025-12-15T05:02:58.438Z" }, + { url = "https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1", size = 13199744, upload-time = "2025-12-15T05:03:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718", size = 12215815, upload-time = "2025-12-15T05:02:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c6/bdd60774a0dbfb05122e3e925f2e9e846c009e479dcec4821dad881f5b52/mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b", size = 12740047, upload-time = "2025-12-15T05:03:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045", size = 13601998, upload-time = "2025-12-15T05:03:13.056Z" }, + { url = "https://files.pythonhosted.org/packages/e3/da/5055c63e377c5c2418760411fd6a63ee2b96cf95397259038756c042574f/mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957", size = 13807476, upload-time = "2025-12-15T05:03:17.977Z" }, + { url = "https://files.pythonhosted.org/packages/cd/09/4ebd873390a063176f06b0dbf1f7783dd87bd120eae7727fa4ae4179b685/mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f", size = 10281872, upload-time = "2025-12-15T05:03:05.549Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "open-data-agent" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "click" }, + { name = "keyring" }, + { name = "psycopg", extra = ["binary"] }, + { name = "pymysql" }, + { name = "pyyaml" }, + { name = "rich" }, +] + +[package.dev-dependencies] +dev = [ + { name = "mypy" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "ruff" }, + { name = "types-pymysql" }, + { name = "types-pyyaml" }, +] + +[package.metadata] +requires-dist = [ + { name = "click", specifier = "==8.3.1" }, + { name = "keyring", specifier = ">=25.0" }, + { name = "psycopg", extras = ["binary"], specifier = ">=3.1" }, + { name = "pymysql", specifier = ">=1.1" }, + { name = "pyyaml", specifier = ">=6.0" }, + { name = "rich", specifier = "==14.3.3" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "mypy", specifier = ">=1.10" }, + { name = "pytest", specifier = "==9.0.2" }, + { name = "pytest-cov", specifier = ">=7.0.0" }, + { name = "ruff", specifier = ">=0.15.5" }, + { name = "types-pymysql", specifier = ">=1.1.0.20251220" }, + { name = "types-pyyaml", specifier = ">=6.0.12.20250915" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "pathspec" +version = "1.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "psycopg" +version = "3.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d3/b6/379d0a960f8f435ec78720462fd94c4863e7a31237cf81bf76d0af5883bf/psycopg-3.3.3.tar.gz", hash = "sha256:5e9a47458b3c1583326513b2556a2a9473a1001a56c9efe9e587245b43148dd9", size = 165624, upload-time = "2026-02-18T16:52:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/5b/181e2e3becb7672b502f0ed7f16ed7352aca7c109cfb94cf3878a9186db9/psycopg-3.3.3-py3-none-any.whl", hash = "sha256:f96525a72bcfade6584ab17e89de415ff360748c766f0106959144dcbb38c698", size = 212768, upload-time = "2026-02-18T16:46:27.365Z" }, +] + +[package.optional-dependencies] +binary = [ + { name = "psycopg-binary", marker = "implementation_name != 'pypy'" }, +] + +[[package]] +name = "psycopg-binary" +version = "3.3.3" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/15/021be5c0cbc5b7c1ab46e91cc3434eb42569f79a0592e67b8d25e66d844d/psycopg_binary-3.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6698dbab5bcef8fdb570fc9d35fd9ac52041771bfcfe6fd0fc5f5c4e36f1e99d", size = 4591170, upload-time = "2026-02-18T16:48:55.594Z" }, + { url = "https://files.pythonhosted.org/packages/f1/54/a60211c346c9a2f8c6b272b5f2bbe21f6e11800ce7f61e99ba75cf8b63e1/psycopg_binary-3.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:329ff393441e75f10b673ae99ab45276887993d49e65f141da20d915c05aafd8", size = 4670009, upload-time = "2026-02-18T16:49:03.608Z" }, + { url = "https://files.pythonhosted.org/packages/c1/53/ac7c18671347c553362aadbf65f92786eef9540676ca24114cc02f5be405/psycopg_binary-3.3.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:eb072949b8ebf4082ae24289a2b0fd724da9adc8f22743409d6fd718ddb379df", size = 5469735, upload-time = "2026-02-18T16:49:10.128Z" }, + { url = "https://files.pythonhosted.org/packages/7f/c3/4f4e040902b82a344eff1c736cde2f2720f127fe939c7e7565706f96dd44/psycopg_binary-3.3.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:263a24f39f26e19ed7fc982d7859a36f17841b05bebad3eb47bb9cd2dd785351", size = 5152919, upload-time = "2026-02-18T16:49:16.335Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e7/d929679c6a5c212bcf738806c7c89f5b3d0919f2e1685a0e08d6ff877945/psycopg_binary-3.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5152d50798c2fa5bd9b68ec68eb68a1b71b95126c1d70adaa1a08cd5eefdc23d", size = 6738785, upload-time = "2026-02-18T16:49:22.687Z" }, + { url = "https://files.pythonhosted.org/packages/69/b0/09703aeb69a9443d232d7b5318d58742e8ca51ff79f90ffe6b88f1db45e7/psycopg_binary-3.3.3-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9d6a1e56dd267848edb824dbeb08cf5bac649e02ee0b03ba883ba3f4f0bd54f2", size = 4979008, upload-time = "2026-02-18T16:49:27.313Z" }, + { url = "https://files.pythonhosted.org/packages/cc/a6/e662558b793c6e13a7473b970fee327d635270e41eded3090ef14045a6a5/psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73eaaf4bb04709f545606c1db2f65f4000e8a04cdbf3e00d165a23004692093e", size = 4508255, upload-time = "2026-02-18T16:49:31.575Z" }, + { url = "https://files.pythonhosted.org/packages/5f/7f/0f8b2e1d5e0093921b6f324a948a5c740c1447fbb45e97acaf50241d0f39/psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:162e5675efb4704192411eaf8e00d07f7960b679cd3306e7efb120bb8d9456cc", size = 4189166, upload-time = "2026-02-18T16:49:35.801Z" }, + { url = "https://files.pythonhosted.org/packages/92/ec/ce2e91c33bc8d10b00c87e2f6b0fb570641a6a60042d6a9ae35658a3a797/psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:fab6b5e37715885c69f5d091f6ff229be71e235f272ebaa35158d5a46fd548a0", size = 3924544, upload-time = "2026-02-18T16:49:41.129Z" }, + { url = "https://files.pythonhosted.org/packages/c5/2f/7718141485f73a924205af60041c392938852aa447a94c8cbd222ff389a1/psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a4aab31bd6d1057f287c96c0effca3a25584eb9cc702f282ecb96ded7814e830", size = 4235297, upload-time = "2026-02-18T16:49:46.726Z" }, + { url = "https://files.pythonhosted.org/packages/57/f9/1add717e2643a003bbde31b1b220172e64fbc0cb09f06429820c9173f7fc/psycopg_binary-3.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:59aa31fe11a0e1d1bcc2ce37ed35fe2ac84cd65bb9036d049b1a1c39064d0f14", size = 3547659, upload-time = "2026-02-18T16:49:52.999Z" }, + { url = "https://files.pythonhosted.org/packages/03/0a/cac9fdf1df16a269ba0e5f0f06cac61f826c94cadb39df028cdfe19d3a33/psycopg_binary-3.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05f32239aec25c5fb15f7948cffdc2dc0dac098e48b80a140e4ba32b572a2e7d", size = 4590414, upload-time = "2026-02-18T16:50:01.441Z" }, + { url = "https://files.pythonhosted.org/packages/9c/c0/d8f8508fbf440edbc0099b1abff33003cd80c9e66eb3a1e78834e3fb4fb9/psycopg_binary-3.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c84f9d214f2d1de2fafebc17fa68ac3f6561a59e291553dfc45ad299f4898c1", size = 4669021, upload-time = "2026-02-18T16:50:08.803Z" }, + { url = "https://files.pythonhosted.org/packages/04/05/097016b77e343b4568feddf12c72171fc513acef9a4214d21b9478569068/psycopg_binary-3.3.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e77957d2ba17cada11be09a5066d93026cdb61ada7c8893101d7fe1c6e1f3925", size = 5467453, upload-time = "2026-02-18T16:50:14.985Z" }, + { url = "https://files.pythonhosted.org/packages/91/23/73244e5feb55b5ca109cede6e97f32ef45189f0fdac4c80d75c99862729d/psycopg_binary-3.3.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:42961609ac07c232a427da7c87a468d3c82fee6762c220f38e37cfdacb2b178d", size = 5151135, upload-time = "2026-02-18T16:50:24.82Z" }, + { url = "https://files.pythonhosted.org/packages/11/49/5309473b9803b207682095201d8708bbc7842ddf3f192488a69204e36455/psycopg_binary-3.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae07a3114313dd91fce686cab2f4c44af094398519af0e0f854bc707e1aeedf1", size = 6737315, upload-time = "2026-02-18T16:50:35.106Z" }, + { url = "https://files.pythonhosted.org/packages/d4/5d/03abe74ef34d460b33c4d9662bf6ec1dd38888324323c1a1752133c10377/psycopg_binary-3.3.3-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d257c58d7b36a621dcce1d01476ad8b60f12d80eb1406aee4cf796f88b2ae482", size = 4979783, upload-time = "2026-02-18T16:50:42.067Z" }, + { url = "https://files.pythonhosted.org/packages/f0/6c/3fbf8e604e15f2f3752900434046c00c90bb8764305a1b81112bff30ba24/psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:07c7211f9327d522c9c47560cae00a4ecf6687f4e02d779d035dd3177b41cb12", size = 4509023, upload-time = "2026-02-18T16:50:50.116Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6b/1a06b43b7c7af756c80b67eac8bfaa51d77e68635a8a8d246e4f0bb7604a/psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:8e7e9eca9b363dbedeceeadd8be97149d2499081f3c52d141d7cd1f395a91f83", size = 4185874, upload-time = "2026-02-18T16:50:55.97Z" }, + { url = "https://files.pythonhosted.org/packages/2b/d3/bf49e3dcaadba510170c8d111e5e69e5ae3f981c1554c5bb71c75ce354bb/psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:cb85b1d5702877c16f28d7b92ba030c1f49ebcc9b87d03d8c10bf45a2f1c7508", size = 3925668, upload-time = "2026-02-18T16:51:03.299Z" }, + { url = "https://files.pythonhosted.org/packages/f8/92/0aac830ed6a944fe334404e1687a074e4215630725753f0e3e9a9a595b62/psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4d4606c84d04b80f9138d72f1e28c6c02dc5ae0c7b8f3f8aaf89c681ce1cd1b1", size = 4234973, upload-time = "2026-02-18T16:51:09.097Z" }, + { url = "https://files.pythonhosted.org/packages/2e/96/102244653ee5a143ece5afe33f00f52fe64e389dfce8dbc87580c6d70d3d/psycopg_binary-3.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:74eae563166ebf74e8d950ff359be037b85723d99ca83f57d9b244a871d6c13b", size = 3551342, upload-time = "2026-02-18T16:51:13.892Z" }, + { url = "https://files.pythonhosted.org/packages/a2/71/7a57e5b12275fe7e7d84d54113f0226080423a869118419c9106c083a21c/psycopg_binary-3.3.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:497852c5eaf1f0c2d88ab74a64a8097c099deac0c71de1cbcf18659a8a04a4b2", size = 4607368, upload-time = "2026-02-18T16:51:19.295Z" }, + { url = "https://files.pythonhosted.org/packages/c7/04/cb834f120f2b2c10d4003515ef9ca9d688115b9431735e3936ae48549af8/psycopg_binary-3.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:258d1ea53464d29768bf25930f43291949f4c7becc706f6e220c515a63a24edd", size = 4687047, upload-time = "2026-02-18T16:51:23.84Z" }, + { url = "https://files.pythonhosted.org/packages/40/e9/47a69692d3da9704468041aa5ed3ad6fc7f6bb1a5ae788d261a26bbca6c7/psycopg_binary-3.3.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:111c59897a452196116db12e7f608da472fbff000693a21040e35fc978b23430", size = 5487096, upload-time = "2026-02-18T16:51:29.645Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b6/0e0dd6a2f802864a4ae3dbadf4ec620f05e3904c7842b326aafc43e5f464/psycopg_binary-3.3.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:17bb6600e2455993946385249a3c3d0af52cd70c1c1cdbf712e9d696d0b0bf1b", size = 5168720, upload-time = "2026-02-18T16:51:36.499Z" }, + { url = "https://files.pythonhosted.org/packages/6f/0d/977af38ac19a6b55d22dff508bd743fd7c1901e1b73657e7937c7cccb0a3/psycopg_binary-3.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:642050398583d61c9856210568eb09a8e4f2fe8224bf3be21b67a370e677eead", size = 6762076, upload-time = "2026-02-18T16:51:43.167Z" }, + { url = "https://files.pythonhosted.org/packages/34/40/912a39d48322cf86895c0eaf2d5b95cb899402443faefd4b09abbba6b6e1/psycopg_binary-3.3.3-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:533efe6dc3a7cba5e2a84e38970786bb966306863e45f3db152007e9f48638a6", size = 4997623, upload-time = "2026-02-18T16:51:47.707Z" }, + { url = "https://files.pythonhosted.org/packages/98/0c/c14d0e259c65dc7be854d926993f151077887391d5a081118907a9d89603/psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5958dbf28b77ce2033482f6cb9ef04d43f5d8f4b7636e6963d5626f000efb23e", size = 4532096, upload-time = "2026-02-18T16:51:51.421Z" }, + { url = "https://files.pythonhosted.org/packages/39/21/8b7c50a194cfca6ea0fd4d1f276158307785775426e90700ab2eba5cd623/psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:a6af77b6626ce92b5817bf294b4d45ec1a6161dba80fc2d82cdffdd6814fd023", size = 4208884, upload-time = "2026-02-18T16:51:57.336Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2c/a4981bf42cf30ebba0424971d7ce70a222ae9b82594c42fc3f2105d7b525/psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:47f06fcbe8542b4d96d7392c476a74ada521c5aebdb41c3c0155f6595fc14c8d", size = 3944542, upload-time = "2026-02-18T16:52:04.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/e9/b7c29b56aa0b85a4e0c4d89db691c1ceef08f46a356369144430c155a2f5/psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e7800e6c6b5dc4b0ca7cc7370f770f53ac83886b76afda0848065a674231e856", size = 4254339, upload-time = "2026-02-18T16:52:10.444Z" }, + { url = "https://files.pythonhosted.org/packages/98/5a/291d89f44d3820fffb7a04ebc8f3ef5dda4f542f44a5daea0c55a84abf45/psycopg_binary-3.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:165f22ab5a9513a3d7425ffb7fcc7955ed8ccaeef6d37e369d6cc1dff1582383", size = 3652796, upload-time = "2026-02-18T16:52:14.02Z" }, +] + +[[package]] +name = "pycparser" +version = "3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pymysql" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/ae/1fe3fcd9f959efa0ebe200b8de88b5a5ce3e767e38c7ac32fb179f16a388/pymysql-1.1.2.tar.gz", hash = "sha256:4961d3e165614ae65014e361811a724e2044ad3ea3739de9903ae7c21f539f03", size = 48258, upload-time = "2025-08-24T12:55:55.146Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/4c/ad33b92b9864cbde84f259d5df035a6447f91891f5be77788e2a3892bce3/pymysql-1.1.2-py3-none-any.whl", hash = "sha256:e6b1d89711dd51f8f74b1631fe08f039e7d76cf67a42a323d3178f0f25762ed9", size = 45300, upload-time = "2025-08-24T12:55:53.394Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage" }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, +] + +[[package]] +name = "pywin32-ctypes" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/85/9f/01a1a99704853cb63f253eea009390c88e7131c67e66a0a02099a8c917cb/pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755", size = 29471, upload-time = "2024-08-14T10:15:34.626Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/3d/8161f7711c017e01ac9f008dfddd9410dff3674334c233bde66e7ba65bbf/pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8", size = 30756, upload-time = "2024-08-14T10:15:33.187Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "rich" +version = "14.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, +] + +[[package]] +name = "ruff" +version = "0.15.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/77/9b/840e0039e65fcf12758adf684d2289024d6140cde9268cc59887dc55189c/ruff-0.15.5.tar.gz", hash = "sha256:7c3601d3b6d76dce18c5c824fc8d06f4eef33d6df0c21ec7799510cde0f159a2", size = 4574214, upload-time = "2026-03-05T20:06:34.946Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/20/5369c3ce21588c708bcbe517a8fbe1a8dfdb5dfd5137e14790b1da71612c/ruff-0.15.5-py3-none-linux_armv6l.whl", hash = "sha256:4ae44c42281f42e3b06b988e442d344a5b9b72450ff3c892e30d11b29a96a57c", size = 10478185, upload-time = "2026-03-05T20:06:29.093Z" }, + { url = "https://files.pythonhosted.org/packages/44/ed/e81dd668547da281e5dce710cf0bc60193f8d3d43833e8241d006720e42b/ruff-0.15.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6edd3792d408ebcf61adabc01822da687579a1a023f297618ac27a5b51ef0080", size = 10859201, upload-time = "2026-03-05T20:06:32.632Z" }, + { url = "https://files.pythonhosted.org/packages/c4/8f/533075f00aaf19b07c5cd6aa6e5d89424b06b3b3f4583bfa9c640a079059/ruff-0.15.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:89f463f7c8205a9f8dea9d658d59eff49db05f88f89cc3047fb1a02d9f344010", size = 10184752, upload-time = "2026-03-05T20:06:40.312Z" }, + { url = "https://files.pythonhosted.org/packages/66/0e/ba49e2c3fa0395b3152bad634c7432f7edfc509c133b8f4529053ff024fb/ruff-0.15.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba786a8295c6574c1116704cf0b9e6563de3432ac888d8f83685654fe528fd65", size = 10534857, upload-time = "2026-03-05T20:06:19.581Z" }, + { url = "https://files.pythonhosted.org/packages/59/71/39234440f27a226475a0659561adb0d784b4d247dfe7f43ffc12dd02e288/ruff-0.15.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd4b801e57955fe9f02b31d20375ab3a5c4415f2e5105b79fb94cf2642c91440", size = 10309120, upload-time = "2026-03-05T20:06:00.435Z" }, + { url = "https://files.pythonhosted.org/packages/f5/87/4140aa86a93df032156982b726f4952aaec4a883bb98cb6ef73c347da253/ruff-0.15.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391f7c73388f3d8c11b794dbbc2959a5b5afe66642c142a6effa90b45f6f5204", size = 11047428, upload-time = "2026-03-05T20:05:51.867Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f7/4953e7e3287676f78fbe85e3a0ca414c5ca81237b7575bdadc00229ac240/ruff-0.15.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dc18f30302e379fe1e998548b0f5e9f4dff907f52f73ad6da419ea9c19d66c8", size = 11914251, upload-time = "2026-03-05T20:06:22.887Z" }, + { url = "https://files.pythonhosted.org/packages/77/46/0f7c865c10cf896ccf5a939c3e84e1cfaeed608ff5249584799a74d33835/ruff-0.15.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc6e7f90087e2d27f98dc34ed1b3ab7c8f0d273cc5431415454e22c0bd2a681", size = 11333801, upload-time = "2026-03-05T20:05:57.168Z" }, + { url = "https://files.pythonhosted.org/packages/d3/01/a10fe54b653061585e655f5286c2662ebddb68831ed3eaebfb0eb08c0a16/ruff-0.15.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1cb7169f53c1ddb06e71a9aebd7e98fc0fea936b39afb36d8e86d36ecc2636a", size = 11206821, upload-time = "2026-03-05T20:06:03.441Z" }, + { url = "https://files.pythonhosted.org/packages/7a/0d/2132ceaf20c5e8699aa83da2706ecb5c5dcdf78b453f77edca7fb70f8a93/ruff-0.15.5-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9b037924500a31ee17389b5c8c4d88874cc6ea8e42f12e9c61a3d754ff72f1ca", size = 11133326, upload-time = "2026-03-05T20:06:25.655Z" }, + { url = "https://files.pythonhosted.org/packages/72/cb/2e5259a7eb2a0f87c08c0fe5bf5825a1e4b90883a52685524596bfc93072/ruff-0.15.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:65bb414e5b4eadd95a8c1e4804f6772bbe8995889f203a01f77ddf2d790929dd", size = 10510820, upload-time = "2026-03-05T20:06:37.79Z" }, + { url = "https://files.pythonhosted.org/packages/ff/20/b67ce78f9e6c59ffbdb5b4503d0090e749b5f2d31b599b554698a80d861c/ruff-0.15.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d20aa469ae3b57033519c559e9bc9cd9e782842e39be05b50e852c7c981fa01d", size = 10302395, upload-time = "2026-03-05T20:05:54.504Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e5/719f1acccd31b720d477751558ed74e9c88134adcc377e5e886af89d3072/ruff-0.15.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:15388dd28c9161cdb8eda68993533acc870aa4e646a0a277aa166de9ad5a8752", size = 10754069, upload-time = "2026-03-05T20:06:06.422Z" }, + { url = "https://files.pythonhosted.org/packages/c3/9c/d1db14469e32d98f3ca27079dbd30b7b44dbb5317d06ab36718dee3baf03/ruff-0.15.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b30da330cbd03bed0c21420b6b953158f60c74c54c5f4c1dabbdf3a57bf355d2", size = 11304315, upload-time = "2026-03-05T20:06:10.867Z" }, + { url = "https://files.pythonhosted.org/packages/28/3a/950367aee7c69027f4f422059227b290ed780366b6aecee5de5039d50fa8/ruff-0.15.5-py3-none-win32.whl", hash = "sha256:732e5ee1f98ba5b3679029989a06ca39a950cced52143a0ea82a2102cb592b74", size = 10551676, upload-time = "2026-03-05T20:06:13.705Z" }, + { url = "https://files.pythonhosted.org/packages/b8/00/bf077a505b4e649bdd3c47ff8ec967735ce2544c8e4a43aba42ee9bf935d/ruff-0.15.5-py3-none-win_amd64.whl", hash = "sha256:821d41c5fa9e19117616c35eaa3f4b75046ec76c65e7ae20a333e9a8696bc7fe", size = 11678972, upload-time = "2026-03-05T20:06:45.379Z" }, + { url = "https://files.pythonhosted.org/packages/fe/4e/cd76eca6db6115604b7626668e891c9dd03330384082e33662fb0f113614/ruff-0.15.5-py3-none-win_arm64.whl", hash = "sha256:b498d1c60d2fe5c10c45ec3f698901065772730b411f164ae270bb6bfcc4740b", size = 10965572, upload-time = "2026-03-05T20:06:16.984Z" }, +] + +[[package]] +name = "secretstorage" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "jeepney" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/03/e834bcd866f2f8a49a85eaff47340affa3bfa391ee9912a952a1faa68c7b/secretstorage-3.5.0.tar.gz", hash = "sha256:f04b8e4689cbce351744d5537bf6b1329c6fc68f91fa666f60a380edddcd11be", size = 19884, upload-time = "2025-11-23T19:02:53.191Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/46/f5af3402b579fd5e11573ce652019a67074317e18c1935cc0b4ba9b35552/secretstorage-3.5.0-py3-none-any.whl", hash = "sha256:0ce65888c0725fcb2c5bc0fdb8e5438eece02c523557ea40ce0703c266248137", size = 15554, upload-time = "2025-11-23T19:02:51.545Z" }, +] + +[[package]] +name = "types-pymysql" +version = "1.1.0.20251220" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/59/e959dd6d2f8e3b3c3f058d79ac9ece328922a5a8770c707fe9c3a757481c/types_pymysql-1.1.0.20251220.tar.gz", hash = "sha256:ae1c3df32a777489431e2e9963880a0df48f6591e0aa2fd3a6fabd9dee6eca54", size = 22184, upload-time = "2025-12-20T03:07:38.689Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/fa/4f4d3bfca9ef6dd17d69ed18b96564c53b32d3ce774132308d0bee849f10/types_pymysql-1.1.0.20251220-py3-none-any.whl", hash = "sha256:fa1082af7dea6c53b6caa5784241924b1296ea3a8d3bd060417352c5e10c0618", size = 23067, upload-time = "2025-12-20T03:07:37.766Z" }, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20250915" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/69/3c51b36d04da19b92f9e815be12753125bd8bc247ba0470a982e6979e71c/types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3", size = 17522, upload-time = "2025-09-15T03:01:00.728Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/e0/1eed384f02555dde685fff1a1ac805c1c7dcb6dd019c916fe659b1c1f9ec/types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6", size = 20338, upload-time = "2025-09-15T03:00:59.218Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "tzdata" +version = "2025.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" }, +]